branch_name
stringclasses 15
values | target
stringlengths 26
10.3M
| directory_id
stringlengths 40
40
| languages
sequencelengths 1
9
| num_files
int64 1
1.47k
| repo_language
stringclasses 34
values | repo_name
stringlengths 6
91
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
| input
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>tebeco/electron-starters<file_sep>/electron-ts/src/gui.ts
document.getElementsByTagName('body')[0].innerHTML = `node Version: ${process.versions.node}`;
<file_sep>/electron-ts/webpack.config.ts
require('ts-node/register');
const path = require('path')
const commonConfig = {
node: {
__dirname: false
},
output: {
path: path.resolve(__dirname, 'dist'),
filename: '[name].js'
},
module: {
rules: [
{
test: /\.ts$/,
enforce: 'pre',
loader: 'tslint-loader',
options: {
typeCheck: true,
emitErrors: true
}
},
{
test: /\.tsx?$/,
loader: 'ts-loader'
}
]
},
resolve: {
extensions: ['.ts', '.json']
}
}
const HtmlWebpackPlugin = require('html-webpack-plugin');
const config = [
{
...commonConfig,
...{
target: 'electron-main',
entry: { main: './src/main.ts' }
}
},
{
...commonConfig,
... {
target: 'electron-renderer',
entry: { gui: './src/gui.ts' },
plugins: [new HtmlWebpackPlugin()]
}
}
];
export default config;
<file_sep>/electron-ts-webpack/webpack.config.ts
require('ts-node/register');
import { resolve } from 'path';
import * as HtmlWebpackPlugin from 'html-webpack-plugin';
const commonConfig = {
node: {
__dirname: false
},
output: {
path: resolve(__dirname, 'dist'),
filename: '[name].js'
},
module: {
rules: [
{
test: /\.ts$/,
enforce: 'pre',
loader: 'tslint-loader',
options: {
typeCheck: true,
emitErrors: true
}
},
{
test: /\.tsx?$/,
loader: 'ts-loader'
}
]
},
resolve: {
extensions: ['.ts', '.json']
}
}
const config = [
{
...commonConfig,
...{
target: 'electron-main',
entry: { main: './electron/main.ts' }
}
},
{
...commonConfig,
... {
target: 'electron-renderer',
entry: { gui: './src/main.ts' },
plugins: [new HtmlWebpackPlugin()]
}
}
];
export default config;
| 2483b33e62965bf7827efd2622310b7172eaa329 | [
"TypeScript"
] | 3 | TypeScript | tebeco/electron-starters | 155e55a977d071d9894d2480d7876dd65e8c828d | 2356883484c7d58f6b4e34d2db81d02d8674490d | |
refs/heads/master | <file_sep>extern crate tiff;
use tiff::ColorType;
use tiff::decoder::{Decoder, DecodingResult};
use std::fs::File;
#[test]
fn test_gray_u8()
{
let img_file = File::open("./tests/images/minisblack-1c-8b.tiff").expect("Cannot find test image!");
let mut decoder = Decoder::new(img_file).expect("Cannot create decoder");
assert_eq!(decoder.colortype().unwrap(), ColorType::Gray(8));
let img_res = decoder.read_image();
assert!(img_res.is_ok());
}
#[test]
fn test_rgb_u8()
{
let img_file = File::open("./tests/images/rgb-3c-8b.tiff").expect("Cannot find test image!");
let mut decoder = Decoder::new(img_file).expect("Cannot create decoder");
assert_eq!(decoder.colortype().unwrap(), ColorType::RGB(8));
let img_res = decoder.read_image();
assert!(img_res.is_ok());
}
#[test]
fn test_gray_u16()
{
let img_file = File::open("./tests/images/minisblack-1c-16b.tiff").expect("Cannot find test image!");
let mut decoder = Decoder::new(img_file).expect("Cannot create decoder");
assert_eq!(decoder.colortype().unwrap(), ColorType::Gray(16));
let img_res = decoder.read_image();
assert!(img_res.is_ok());
}
#[test]
fn test_rgb_u16()
{
let img_file = File::open("./tests/images/rgb-3c-16b.tiff").expect("Cannot find test image!");
let mut decoder = Decoder::new(img_file).expect("Cannot create decoder");
assert_eq!(decoder.colortype().unwrap(), ColorType::RGB(16));
let img_res = decoder.read_image();
assert!(img_res.is_ok());
}
// TODO: GrayA support
//#[test]
//fn test_gray_alpha_u8()
//{
//let img_file = File::open("./tests/images/minisblack-2c-8b-alpha.tiff").expect("Cannot find test image!");
//let mut decoder = Decoder::new(img_file).expect("Cannot create decoder");
//assert_eq!(decoder.colortype().unwrap(), ColorType::GrayA(8));
//let img_res = decoder.read_image();
//assert!(img_res.is_ok());
//}
| 83b8adb57ee9f92d0386faa0c5ab8be94db7a402 | [
"Rust"
] | 1 | Rust | bvssvni/image-tiff | a8f4aebf64d0a311fb6e8bbc6422d83f76e376fe | c132d206afd9d9cb765719abaf3069471e139ed9 | |
refs/heads/master | <file_sep>//
// MineInfoViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/28.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class MineInfoViewController: UIViewController,UITableViewDelegate,UITableViewDataSource,UIImagePickerControllerDelegate,UINavigationControllerDelegate {
//cellIdentifier 设置
let headerImageCellIdentifier = "headerImageCell"
let nicknameCellIdentifier = "nicknameCell"
let mobileCellIdentifier = "mobileCell"
let passwordCellIdentifier = "passwordCell"
let showEditPasswordSegueIdentifier = "showEditPasswordSegue"
let showEditPhoneSegueIdentifier = "showEditPhoneSegue"
let showEditNicknameIdentifier = "showEditNicknameSegue"
@IBOutlet weak var tableView: UITableView!
override func viewDidLoad() {
super.viewDidLoad()
self.addBackItem()
//设置tableview尾部为无
self.tableView.tableFooterView = UIView()
}
//MARK:- UITableViewDataSource
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 4
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
switch indexPath.row {
case 0:
let cell = tableView.dequeueReusableCell(withIdentifier: headerImageCellIdentifier , for: indexPath)
return cell
case 1:
let cell = tableView.dequeueReusableCell(withIdentifier: nicknameCellIdentifier , for: indexPath)
return cell
case 2:
let cell = tableView.dequeueReusableCell(withIdentifier: mobileCellIdentifier , for: indexPath)
return cell
default:
let cell = tableView.dequeueReusableCell(withIdentifier: passwordCellIdentifier , for: indexPath)
return cell
}
}
//MARK:- UITableViewDelegate
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
return 50
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
switch indexPath.row {
case 0:
showSheet()
case 1:
self.performSegue(withIdentifier: showEditNicknameIdentifier, sender: self)
case 2:
self.performSegue(withIdentifier: showEditPhoneSegueIdentifier, sender: self)
case 3:
self.performSegue(withIdentifier: showEditPasswordSegueIdentifier, sender: self)
default:
print("default")
}
}
//MARK:- 展示sheet
func showSheet() {
let alert = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
//取消
let actionCancel = UIAlertAction(title: "取消", style: .cancel, handler: nil)
//相册
let actionPhotoLibray = UIAlertAction(title: "相册中选择图片", style: .default) { (UIAlertAction) -> Void in
if UIImagePickerController.isSourceTypeAvailable(.photoLibrary){
let imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.allowsEditing = false
imagePicker.sourceType = .photoLibrary
self.present(imagePicker, animated: true, completion: nil)
}
}
//相机
let actionCamera = UIAlertAction(title: "相机拍摄", style: .default) { (UIAlertAction) -> Void in
if UIImagePickerController.isSourceTypeAvailable(.camera){
let imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.allowsEditing = false
imagePicker.sourceType = .camera
self.present(imagePicker, animated: true, completion: nil)
}
}
alert.addAction(actionPhotoLibray)
alert.addAction(actionCamera)
alert.addAction(actionCancel)
self.present(alert, animated: true,completion: nil)
}
//MARK:- UIImagePickerControllerDelegate
//选择图片回调
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
(self.tableView.cellForRow(at: IndexPath(row: 0, section: 0)) as! InfoHeaderTableViewCell ).headerImageView.image = info[UIImagePickerControllerOriginalImage] as? UIImage
dismiss(animated: true, completion: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if let vc = segue.destination as? EditPhoneOneViewController{
vc.phoneString = "15857476747"
}else if let vc = segue.destination as? EditNicknameViewController{
vc.oldNickname = "小<PASSWORD>"
}
}
}
<file_sep>//
// NavigationControllerExtension.swift
// SuperWeChat
//
// Created by 周鹏杰 on 16/9/18.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
extension UINavigationController{
func customPopViewController(animated:Bool){
self.popViewController(animated: true)
}
//去除下方横线
open override func viewDidLoad() {
self.navigationBar.setBackgroundImage(UIImage(), for: .default)
self.navigationBar.shadowImage = UIImage()
}
}
<file_sep>//
// ClassDetailTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/13.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class ClassDetailTableViewCell: UITableViewCell,UICollectionViewDataSource {
@IBOutlet weak var subClassTitleLabel: UILabel!
@IBOutlet weak var collectionView: UICollectionView!
override func awakeFromNib() {
super.awakeFromNib()
let itemSize = CGSize(width: ITEM_WIDTH, height: ITEM_WIDTH*(125/100))
let layout = PublicCollectionViewLayout(sectionInset: UIEdgeInsets(top: 20, left: 10, bottom: 20, right: 10), minimumLineSpacing: 20, minimumInteritemSpacing: 10.5, itemSize: itemSize)
self.collectionView.collectionViewLayout = layout
self.collectionView.dataSource = self
self.collectionView.isScrollEnabled = false
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
}
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int{
return 6
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell{
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "classDetailItemCell", for: indexPath)
return cell
}
}
<file_sep>//
// TopImageButton.swift
// SuperWeChat
//
// Created by 周鹏杰 on 16/9/19.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
@IBDesignable
class TopImageButton: UIButton {
override init(frame: CGRect) {
super.init(frame: frame)
self.setImage(UIImage(named: "friend"), for: .normal)
self.setTitle("朋友圈", for: .normal)
self.setTitleColor(.red, for: .normal)
self.titleLabel?.font = UIFont.systemFont(ofSize: 18 )
initButton()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
@IBInspectable var space:CGFloat = 0{
didSet{
self.initButton()
}
}
func initButton(){
self.contentVerticalAlignment = .top
self.contentHorizontalAlignment = .center
let imageViewWidth :CGFloat! = self.imageView?.intrinsicContentSize.width
let imageViewHeight :CGFloat! = self.imageView?.intrinsicContentSize.height
let titleLabelWidth :CGFloat! = self.titleLabel?.intrinsicContentSize.width
// let titleLabelHeight:CGFloat! = self.titleLabel?.intrinsicContentSize.height
//文字距离上边框的距离增加imageView的高度,距离左边框减少imageView的宽度,距离下边框和右边框距离不变
self.titleEdgeInsets = UIEdgeInsets(top: imageViewHeight + space, left: -imageViewWidth, bottom: 0, right: 0)
//图片距离右边框距离减少文字的宽度,其它不变
self.imageEdgeInsets = UIEdgeInsets(top: 0, left: 0, bottom: 0, right: -titleLabelWidth)
self.contentEdgeInsets = UIEdgeInsets.zero
}
}
<file_sep>//
// File.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/1.
// Copyright © 2016年 周. All rights reserved.
//
import Foundation
extension UITextField{
/// 设置textField的placeHolder颜色
///
/// - Parameter color: 颜色
func setPlaceholderColor(color:UIColor){
let newPlaceholder = NSAttributedString(string: self.placeholder!, attributes: [NSForegroundColorAttributeName:color])
self.attributedPlaceholder = newPlaceholder
}
}
<file_sep>//
// GoodsBottomTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/12/10.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class GoodsBottomTableViewCell: UITableViewCell ,UITableViewDelegate,UITableViewDataSource{
//cell标识符
let evaluateCellIdentifier = "evaluateCell"
var topBarView = UIView()
var goodsInfoButton = UIButton()
var evaluateButton = UIButton()
var contentTableView = UITableView()
var allButton = UIButton()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(topBarView)
self.topBarView.addSubview(goodsInfoButton)
self.topBarView.addSubview(evaluateButton)
self.contentView.addSubview(contentTableView)
self.contentView.addSubview(allButton)
self.setAllAttributes()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFrame()
}
func setLayout(){
topBarView.snp.makeConstraints { (make) in
make.left.top.right.equalToSuperview()
make.height.equalTo(41)
}
goodsInfoButton.snp.makeConstraints { (make) in
make.left.top.equalToSuperview()
make.width.equalTo(SCREEN_WIDTH/2 - 0.5)
make.height.equalTo(40)
}
evaluateButton.snp.makeConstraints { (make) in
make.right.top.equalToSuperview()
make.width.equalTo(SCREEN_WIDTH/2 - 0.5)
make.height.equalTo(40)
}
contentTableView.snp.makeConstraints { (make) in
make.edges.equalToSuperview().inset(UIEdgeInsets(top: 41, left: 0, bottom: 45, right: 0))
}
allButton.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.bottom.equalToSuperview().offset(-10)
make.height.equalTo(25)
make.width.equalTo(70)
}
}
func setData(){
}
func setFrame(){
//设置背景色方便分割线的显示
self.topBarView.backgroundColor = HiTaoBackgroundColor
self.goodsInfoButton.backgroundColor = .white
self.goodsInfoButton.setTitleColor(.black, for: .normal)
self.goodsInfoButton.setTitle("商品信息", for: .normal)
self.goodsInfoButton.titleLabel?.font = BigFont
self.goodsInfoButton.addTarget(self, action: #selector(GoodsBottomTableViewCell.infoBtn(_:)), for: .touchUpInside)
self.evaluateButton.backgroundColor = .white
self.evaluateButton.setTitleColor(.black, for: .normal)
self.evaluateButton.setTitle("宝贝评价(122)", for: .normal)
self.evaluateButton.titleLabel?.font = BigFont
self.evaluateButton.addTarget(self, action: #selector(GoodsBottomTableViewCell.evaluateBtn(_:)), for: .touchUpInside)
self.contentTableView.separatorStyle = .none
self.contentTableView.isScrollEnabled = false
self.contentTableView.register(EvaluateTableViewCell.self, forCellReuseIdentifier: evaluateCellIdentifier)
self.contentTableView.delegate = self
self.contentTableView.dataSource = self
self.allButton.setTitle("查看全部评价", for: .normal)
self.allButton.titleLabel?.font = SmallFont
self.allButton.setBackgroundImage(UIImage(named:"btn_nor_gray_140x50"), for: .normal)
}
/// 商品信息按钮响应方法
///
/// - Parameter sender: 传递对象
func infoBtn(_ sender:UIButton){
self.goodsInfoButton.setTitleColor(customRed, for: .normal)
self.evaluateButton.setTitleColor(.black, for: .normal)
print("商品信息.....")
}
/// 宝贝详情按钮响应方法
///
/// - Parameter sender: 传递对象
func evaluateBtn(_ sender:UIButton){
self.evaluateButton.setTitleColor(customRed, for: .normal)
self.goodsInfoButton.setTitleColor(.black, for: .normal)
print("宝贝评价.....")
}
//MARK: - UITableViewDataSource
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 2
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: evaluateCellIdentifier, for: indexPath)
//cell选中样式设置
cell.selectionStyle = .none
return cell
}
//MARK: - UITableViewDelegate
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
return 90
}
}
<file_sep>//
// AddressManageViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/31.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class AddressManageViewController: UIViewController,UITableViewDataSource ,UITableViewDelegate{
var count:Int = 4
let addressCellIdentifier = "addressCell"
override func viewDidLoad() {
super.viewDidLoad()
self.addBackItem()
self.setRightItem(fontSize:12)
}
@IBOutlet weak var tableView: UITableView!
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK:- UITableViewDataSource
func numberOfSections(in tableView: UITableView) -> Int {
return count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: addressCellIdentifier, for: indexPath)
return cell
}
//MARK:- UITableViewDelegate
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
return 105
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
switch section {
case 0:
return 1
default:
return 10
}
}
func tableView(_ tableView: UITableView, heightForFooterInSection section: Int) -> CGFloat {
return 0.000001
}
@IBAction func unWindMethod(segue:UIStoryboardSegue){
self.count += 1
self.tableView.reloadData()
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
<file_sep>//
// NewCheapTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/16.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class NewCheapTableViewCell: UITableViewCell {
var headerImageView = UIImageView()
var goodsNameLabel = UILabel()
var newPriceLabel = UILabel()
var oldPriceLabel = UILabel()
var lineView = UIView()
var buyButton = UIButton()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(headerImageView)
self.contentView.addSubview(goodsNameLabel)
self.contentView.addSubview(newPriceLabel)
self.contentView.addSubview(oldPriceLabel)
self.contentView.addSubview(lineView)
self.contentView.addSubview(buyButton)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFrame()
}
func setLayout(){
headerImageView.snp.makeConstraints { (make) in
make.width.equalToSuperview()
make.height.equalTo(150)
make.left.top.equalToSuperview()
}
lineView.snp.makeConstraints { (make) in
make.height.equalTo(10)
make.width.equalTo(1)
make.centerX.equalToSuperview()
make.top.equalTo(headerImageView.snp.bottom).offset(10)
}
goodsNameLabel.snp.makeConstraints { (make) in
make.centerY.equalTo(lineView)
make.right.equalTo(lineView.snp.left).offset(-20)
}
newPriceLabel.snp.makeConstraints { (make) in
make.centerY.equalTo(lineView)
make.left.equalTo(lineView.snp.right).offset(20)
}
oldPriceLabel.snp.makeConstraints { (make) in
make.left.equalTo(newPriceLabel.snp.right).offset(5)
make.centerY.equalTo(newPriceLabel)
}
buyButton.snp.makeConstraints { (make) in
make.width.equalTo(80)
make.height.equalTo(40)
make.centerX.equalToSuperview()
make.bottom.equalToSuperview().offset(-10)
}
}
func setData(){
self.headerImageView.image = UIImage(named:imageDefaultName)
self.goodsNameLabel.text = "新上市蓝莓"
self.newPriceLabel.text = "¥100"
self.oldPriceLabel.text = "¥180"
}
func setFrame(){
self.goodsNameLabel.font = BigFont
self.lineView.backgroundColor = customGray
self.newPriceLabel.setNewLabel(headerFontSize: 9, footerFontSize: 11)
self.oldPriceLabel.setOldLabel(fontSize: 9)
self.buyButton.setBackgroundImage(UIImage(named:"btn_sel_red_160x80"), for: .normal)
self.buyButton.titleLabel?.font = BigFont
self.buyButton.setTitle("抢购", for: .normal)
self.buyButton.titleEdgeInsets.top -= 5
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
}
}
<file_sep>//
// CouponTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/8.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class CouponTableViewCell: UITableViewCell {
var bgView = UIImageView()
var nameLabel = UILabel()
var amountLabel = UILabel()
var totalLabel = UILabel()
var dayLabel = UILabel()
var useButton = UIButton()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(bgView)
self.bgView.addSubview(nameLabel)
self.bgView.addSubview(amountLabel)
self.bgView.addSubview(totalLabel)
self.bgView.addSubview(useButton)
self.bgView.addSubview(dayLabel)
self.setAllAttributes()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setAllAttributes(){
self.setLayout()
self.setData(amount: 11, total: 11)
self.setFrame()
}
func setLayout(){
bgView.snp.makeConstraints { (make) in
make.edges.equalToSuperview().inset(UIEdgeInsets(top: 10, left: 0, bottom: 0, right: 10))
}
amountLabel.snp.makeConstraints { (make) in
make.bottom.equalTo(bgView.snp.centerY).offset(-5)
make.centerX.equalTo(bgView.snp.left).offset(75)
}
totalLabel.snp.makeConstraints { (make) in
make.centerX.equalTo(amountLabel)
make.top.equalTo(bgView.snp.centerY).offset(5)
}
dayLabel.snp.makeConstraints { (make) in
make.bottom.equalTo(bgView.snp.centerY).offset(-10)
let rightSpace = SCREEN_WIDTH - 135
make.centerX.equalTo(bgView.snp.right).offset(-(rightSpace - 35)/2 - 35)
}
nameLabel.snp.makeConstraints { (make) in
make.bottom.equalTo(dayLabel.snp.top).offset(-10)
make.centerX.equalTo(dayLabel)
}
useButton.snp.makeConstraints { (make) in
make.width.equalTo(70)
make.height.equalTo(30)
make.top.equalTo(bgView.snp.centerY).offset(10)
make.centerX.equalTo(nameLabel)
}
}
func setData(amount:Double,total:Double){
self.amountLabel.text = "¥5.00"
self.totalLabel.text = "满78元可用"
self.dayLabel.text = "2016.10.20-2016.10.23 (剩余3天)"
self.nameLabel.text = "新人有礼十一专享劵"
}
func setFrame(){
self.contentView.backgroundColor = HiTaoBackgroundColor
bgView.image = UIImage(named:"img_bg_white")
self.amountLabel.customColorAndSizeSmall(smallFontSize: 9, bigFontSize: 22, ranges: [NSRange(location: 0, length: 1)], smallColor: customRed, bigColor: customRed)
self.totalLabel.setFontAndTextColor(font: BigFont, textColor:customGray)
self.dayLabel.font = SmallFont
self.dayLabel.setColorsLabel(colors: [customGray,customRed], range: [NSMakeRange(0, 21),NSMakeRange(22, (self.dayLabel.text?.characters.count)! - 22)])
self.nameLabel.setFontAndTextColor(font: BigFont, textColor: .black)
self.useButton.setBackgroundImage(UIImage(named:"btn_nor_red_140x60"), for:.normal)
self.useButton.setTitle("立即使用", for: .normal)
self.useButton.titleLabel?.font = BigFont
}
func setGrayColor(){
self.amountLabel.textColor = customGray
self.dayLabel.textColor = customGray
self.nameLabel.textColor = customGray
self.useButton.setBackgroundImage(UIImage(named:"btn_nor_gray_140x60"), for:.normal)
self.useButton.setTitle("已过期", for: .normal)
}
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
}
}
<file_sep>//
// MenuContentView.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/27.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class MenuContentView: UIView {
//合计高度
var allHeight:CGFloat!
//数据源
var menuArray = ["新品发布","热销排行","本周排行","整","新品发布","热销排行","本周排行","整"]
//button高度
let buttonHeight:CGFloat = 30
//button垂直距离
let buttonVerticalSpace:CGFloat = 20
//button水平距离
let buttonHorizontalSpace:CGFloat = 20
//前面button的宽度数组 (换行清空)
var buttonWidthArray :[CGFloat] = []
//开始位置左边空间大小
var leftStartSpace :CGFloat = 10
//头部空间大小
var topSpace:CGFloat = 10
//回调方法
var completion :(()->())!
//重写父类初始化方法
override init(frame: CGRect) {
super.init(frame: frame)
self.addButton()
self.height = topSpace + 50
self.frame.size.height = height
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//动态添加button
func addButton(){
//恢复初始值
leftStartSpace = 10
topSpace = 10
buttonWidthArray.removeAll(keepingCapacity: true)
for index in 0..<menuArray.count {
var leftSpace = leftStartSpace
for buttonWidth in buttonWidthArray {
leftSpace += buttonWidth + buttonHorizontalSpace
}
//根据内容的长度获取button的长度
let width = menuArray[index].getStringWidth(font: MiddleFont) + 30
//判断剩余的长度是否足够放下一个button
if leftSpace + buttonHorizontalSpace + width + 10 > SCREEN_WIDTH{
buttonWidthArray.removeAll(keepingCapacity: true)
topSpace += buttonVerticalSpace + buttonHeight
leftStartSpace = 10
leftSpace = leftStartSpace
}
//设置button样式
let iconButton = UIButton()
iconButton.titleLabel?.font = MiddleFont
iconButton.setTitle(menuArray[index], for: .normal)
iconButton.setTitleColor(customGray, for: .normal)
iconButton.setBackgroundImage(UIImage(named:"background_icon"), for: .normal)
//设置tag区分标签
iconButton.tag = index
self.addSubview(iconButton)
//添加button响应事件
iconButton.addTarget(self, action: #selector(MenuContentView.selectMenu(_:)), for: .touchUpInside)
//将新的button宽度添加到数组
buttonWidthArray.append(width)
//添加约束
iconButton.snp.makeConstraints({ (make) in
make.width.equalTo(width)
make.height.equalTo(buttonHeight)
make.left.equalTo(leftSpace)
make.top.equalTo(topSpace)
})
}
}
/// 响应事件
///
/// - Parameter sender: 响应发起对象
func selectMenu(_ sender:UIButton){
switch sender.tag {
case 0:
print("选中条件1")
self.changeButtonColor(sender: sender)
completion()
default:
print("选中其他条件")
self.changeButtonColor(sender: sender)
completion()
}
}
//改变button的背景图片
func changeButtonColor(sender:UIButton){
for view in self.subviews{
if let view = view as? UIButton{
view.setBackgroundImage(UIImage(named:"background_icon"), for: .normal)
view.setTitleColor(customGray, for: .normal)
}
}
sender.setBackgroundImage(UIImage(named:"selectBackground_icon"), for: .normal)
sender.setTitleColor(.white, for: .normal)
}
}
<file_sep>//
// NSObject+Extension.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/1.
// Copyright © 2016年 周. All rights reserved.
//
import Foundation
extension NSObject{
/**
获取对象对于的属性值,无对于的属性则返回NIL
- parameter property: 要获取值的属性
- returns: 属性的值
*/
func getValueOfProperty(property:String)->AnyObject?{
let allPropertys = self.getAllPropertys()
if(allPropertys.contains(property)){
return self.value(forKey: property) as AnyObject?
}else{
return nil
}
}
/**
设置对象属性的值
- parameter property: 属性
- parameter value: 值
- returns: 是否设置成功
*/
func setValueOfProperty(property:String,value:AnyObject)->Bool{
let allPropertys = self.getAllPropertys()
if(allPropertys.contains(property)){
self.setValue(value, forKey: property)
return true
}else{
return false
}
}
/**
获取对象的所有属性名称
- returns: 属性名称数组
*/
func getAllPropertys()->[String]{
var result = [String]()
let count = UnsafeMutablePointer<UInt32>.allocate(capacity: 0)
let buff = class_copyPropertyList(object_getClass(self), count)
let countInt = Int(count[0])
for i in 0..<countInt{
let temp = buff?[i]
let tempPro = property_getName(temp)
let proper = String(cString: tempPro!)
result.append(proper)
}
return result
}
}
<file_sep>//
// MainTopTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/18.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
protocol MainTopTableViewCellDelegate :NSObjectProtocol {
func pushToNewViewController(index:Int)
}
class MainTopTableViewCell: UITableViewCell {
@IBOutlet weak var view0: UIView!
@IBOutlet weak var view1: UIView!
@IBOutlet weak var view2: UIView!
@IBOutlet weak var view3: UIView!
weak var delegate :MainTopTableViewCellDelegate!
override func awakeFromNib() {
super.awakeFromNib()
self.view0.tag = 0
self.view1.tag = 1
self.view2.tag = 2
self.view3.tag = 3
let tap0 = UITapGestureRecognizer(target: self, action: #selector(MainTopTableViewCell.pushAction(_:)))
let tap1 = UITapGestureRecognizer(target: self, action: #selector(MainTopTableViewCell.pushAction(_:)))
let tap2 = UITapGestureRecognizer(target: self, action: #selector(MainTopTableViewCell.pushAction(_:)))
let tap3 = UITapGestureRecognizer(target: self, action: #selector(MainTopTableViewCell.pushAction(_:)))
self.view0.addGestureRecognizer(tap0)
self.view1.addGestureRecognizer(tap1)
self.view2.addGestureRecognizer(tap2)
self.view3.addGestureRecognizer(tap3)
}
func pushAction(_ sender: UITapGestureRecognizer){
self.delegate.pushToNewViewController(index: (sender.view?.tag)!)
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// MineSecondTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/19.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class MineSecondTableViewCell: UITableViewCell,UICollectionViewDelegate,UICollectionViewDataSource {
let mineSecondCollectionCellIdentifier = "mineSecondCollectionCell"
let imageArray = ["icon_0_0","icon_0_1","icon_0_2","icon_0_3","icon_0_4"]
let titleArray = ["待付款","待发货","待收货","待评价","退款/售后"]
@IBOutlet weak var collectionView: UICollectionView!
override func awakeFromNib() {
super.awakeFromNib()
let layout = PublicCollectionViewLayout(sectionInset: UIEdgeInsets.zero, minimumLineSpacing: 0, minimumInteritemSpacing: 0, itemSize: CGSize(width: SCREEN_WIDTH/5, height: 80))
collectionView.collectionViewLayout = layout
self.collectionView.dataSource = self
self.collectionView.delegate = self
// Initialization code
}
//MARK: -- UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return 5
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: mineSecondCollectionCellIdentifier, for: indexPath) as! MineSecondCollectionViewCell
cell.imageView.image = UIImage(named: imageArray[indexPath.row])
cell.titleLabel.text = titleArray[indexPath.row]
return cell
}
//MARK: -- UICollectionViewDelegate
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
print("点击第\(indexPath.row+1)个")
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// UI.swift
// SuperWeChat
//
// Created by 周鹏杰 on 16/8/19.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
extension UIViewController{
//设置导航标题
func setNavigationTitle(_ title:String?){
self.navigationItem.title = title
}
//添加返回按钮 默认标题为""
func addBackItem(title:String = ""){
let backItem = UIBarButtonItem()
backItem.title = title
self.navigationController?.navigationBar.tintColor = .black
let backgroundImage = UIImage(named:"icon_arrow_left")
let backImage = backgroundImage?.resizableImage(withCapInsets: UIEdgeInsets(top: 0, left: 7, bottom: 0, right: 0))
backItem.setBackButtonBackgroundImage(backImage, for: .normal, barMetrics: .default)
backItem.setBackButtonTitlePositionAdjustment(UIOffset(horizontal: 0, vertical: 0), for: .default)
self.navigationItem.backBarButtonItem = backItem
}
//修改右侧导航栏按钮字体大小
func setRightItem(fontSize:CGFloat){
let rightItem = self.navigationItem.rightBarButtonItem
rightItem?.setTitleTextAttributes([NSFontAttributeName: UIFont.systemFont(ofSize: fontSize)], for: .normal)
self.navigationItem.rightBarButtonItem = rightItem
}
func returnAction(_ sender:UIBarButtonItem){
self.navigationController?.customPopViewController(animated: true)
}
}
<file_sep>//
// ClassStoryboardHelper.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/12.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class ClassStoryboardHelper: NSObject {
static let storyboardName = "Class"
static let navigationController = "classNavigationController"
static let classViewController = "classViewController"
class func instantiateViewController( identifier: String) -> UIViewController {
let storyboard:UIStoryboard!
storyboard = UIStoryboard(name:self.storyboardName, bundle: nil)
let viewController = storyboard.instantiateViewController(withIdentifier: identifier)
return viewController
}
}
<file_sep>//
// EditPhoneOneViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/1.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class EditPhoneOneViewController: UIViewController {
var phoneString:String!
@IBOutlet weak var phoneLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
self.addBackItem()
self.setPhoneLabel()
}
func setPhoneLabel(){
if let phoneNumber = phoneString{
self.phoneLabel.text = phoneNumber
}else{
self.phoneLabel.text = "该账号未绑定手机"
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if let vc = segue.destination as? EditPhoneTwoViewController{
vc.oldPhoneNumber = "15857476747"
}
}
}
<file_sep>//
// AppDelegate.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/9.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
import IQKeyboardManagerSwift
import MMDrawerController
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
var leftSlideVC: LeftSlideViewController!
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
//开启键盘管理
IQKeyboardManager.sharedManager().enable = true
//设置窗口大小和rootVC
self.window = UIWindow(frame: CGRect(x: 0,y: 0,width: SCREEN_WIDTH,height: SCREEN_HEIGTH))
let guide = UserDefaults.standard.object(forKey: "guide") as? String
if guide == nil{
self.window?.rootViewController = GuideViewController()
}else{
let leftViewController = LeftDrawerViewController()
leftViewController.view.backgroundColor = .white
let mainViewController = MainTabBarViewController()
leftSlideVC = LeftSlideViewController(leftView: leftViewController, andMainView:mainViewController)
leftSlideVC.setPanEnabled(false)
self.window?.rootViewController = leftSlideVC
}
self.window?.makeKeyAndVisible()
return true
}
//MMDrawerController的实现
// let drawerViewController = MMDrawerController(center: MainTabBarViewController(), leftDrawerViewController: leftViewController)
// drawerViewController?.maximumLeftDrawerWidth = SCREEN_WIDTH*0.5
// drawerViewController?.openDrawerGestureModeMask = .all
// drawerViewController?.closeDrawerGestureModeMask = .all
func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}
func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
}
<file_sep>//
// GoodsTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/21.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class SupportView:UIView{
var supportView1 = UIImageView()
var supportView2 = UIImageView()
var supportView3 = UIImageView()
var supportLabel1 = UILabel()
var supportLabel2 = UILabel()
var supportLabel3 = UILabel()
var moreImageView = UIImageView()
override init(frame: CGRect) {
super.init(frame: frame)
self.addSubview(supportView1)
self.addSubview(supportView2)
self.addSubview(supportView3)
self.addSubview(supportLabel1)
self.addSubview(supportLabel2)
self.addSubview(supportLabel3)
self.addSubview(moreImageView)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setFormat()
}
func setLayout(){
supportView1.snp.makeConstraints { (make) in
make.width.height.equalTo(10)
make.left.equalToSuperview()
make.centerY.equalToSuperview()
}
supportLabel1.snp.makeConstraints { (make) in
make.left.equalTo(supportView1.snp.right).offset(5)
make.centerY.equalToSuperview()
}
supportView2.snp.makeConstraints { (make) in
make.width.height.equalTo(supportView1)
make.left.equalTo(supportLabel1.snp.right).offset(5)
make.centerY.equalToSuperview()
}
supportLabel2.snp.makeConstraints { (make) in
make.left.equalTo(supportView2.snp.right).offset(5)
make.centerY.equalToSuperview()
}
supportView3.snp.makeConstraints { (make) in
make.width.height.equalTo(supportView1)
make.left.equalTo(supportLabel2.snp.right).offset(5)
make.centerY.equalToSuperview()
}
supportLabel3.snp.makeConstraints { (make) in
make.left.equalTo(supportView3.snp.right).offset(5)
make.centerY.equalToSuperview()
}
moreImageView.snp.makeConstraints { (make) in
make.right.equalToSuperview()
make.centerY.equalToSuperview()
make.width.equalTo(13)
make.height.equalTo(3)
}
}
func setData(supportTitleArray:String ...){
self.supportLabel1.text = supportTitleArray[0]
self.supportLabel2.text = supportTitleArray[1]
self.supportLabel3.text = supportTitleArray[2]
}
func setFormat(){
self.supportLabel1.font = SmallFont
self.supportLabel1.textColor = customGray
self.supportView1.image = UIImage(named:"icon_tick")
self.supportLabel2.font = SmallFont
self.supportLabel2.textColor = customGray
self.supportView2.image = UIImage(named:"icon_tick")
self.supportLabel3.font = SmallFont
self.supportLabel3.textColor = customGray
self.supportView3.image = UIImage(named:"icon_tick")
self.moreImageView.image = UIImage(named:"icon_more")
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
class GoodsTableViewCell: UITableViewCell {
var goodsNameLabel = UILabel()
let lineView = UIView()
var shareButton = UIButton()
var newPriceLabel = UILabel()
var oldPriceLabel = UILabel()
var freightLabel = UILabel()
var saleCountLabel = UILabel()
var addressLabel = UILabel()
var supportView = SupportView()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(goodsNameLabel)
self.contentView.addSubview(lineView)
self.contentView.addSubview(shareButton)
self.contentView.addSubview(newPriceLabel)
self.contentView.addSubview(oldPriceLabel)
self.contentView.addSubview(freightLabel)
self.contentView.addSubview(saleCountLabel)
self.contentView.addSubview(addressLabel)
self.contentView.addSubview(supportView)
self.selectionStyle = .none
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFormat()
}
func setLayout(){
goodsNameLabel.snp.makeConstraints { (make) in
make.left.top.equalToSuperview().offset(10)
}
shareButton.snp.makeConstraints { (make) in
make.right.equalToSuperview().offset(-10)
make.top.equalTo(goodsNameLabel)
make.width.height.equalTo(14)
}
lineView.snp.makeConstraints { (make) in
make.width.equalTo(1)
make.height.equalTo(goodsNameLabel)
make.centerY.equalTo(shareButton)
make.right.equalTo(shareButton.snp.left).offset(-10)
}
newPriceLabel.snp.makeConstraints { (make) in
make.top.equalTo(goodsNameLabel.snp.bottom).offset(10)
make.left.equalToSuperview().offset(10)
}
oldPriceLabel.snp.makeConstraints { (make) in
make.bottom.equalTo(newPriceLabel)
make.left.equalTo(newPriceLabel.snp.right).offset(10)
}
freightLabel.snp.makeConstraints { (make) in
make.left.equalToSuperview().offset(10)
make.top.equalTo(newPriceLabel.snp.bottom).offset(5)
}
saleCountLabel.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.top.equalTo(freightLabel)
}
addressLabel.snp.makeConstraints { (make) in
make.right.equalToSuperview().offset(-10)
make.top.equalTo(saleCountLabel)
}
supportView.snp.makeConstraints { (make) in
make.left.equalToSuperview().offset(10)
make.right.equalToSuperview().offset(-10)
make.top.equalTo(freightLabel.snp.bottom).offset(20)
make.width.equalTo(20)
}
}
func setData(){
self.goodsNameLabel.text = "白色针织衫"
self.newPriceLabel.text = "¥100"
self.oldPriceLabel.text = "¥200"
self.freightLabel.text = "快递:免运费"
self.saleCountLabel.text = "月销1200笔"
self.addressLabel.text = "浙江 宁波"
self.supportView.setData(supportTitleArray: "7天无理由退货","信用卡支付","蚂蚁花呗")
}
func setFormat(){
self.goodsNameLabel.font = MiddleFont
self.shareButton.setBackgroundImage(UIImage(named:"icon_share"), for: .normal)
self.lineView.backgroundColor = customGray
self.newPriceLabel.setNewLabel(headerFontSize:10,footerFontSize: 13)
self.oldPriceLabel.setOldLabel(fontSize: 10)
self.freightLabel.font = SmallFont
self.saleCountLabel.font = SmallFont
self.addressLabel.font = SmallFont
self.freightLabel.textColor = customGray
self.saleCountLabel.textColor = customGray
self.addressLabel.textColor = customGray
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// MineHeaderTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/19.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class MineHeaderTableViewCell: UITableViewCell {
@IBOutlet weak var headerImageView: UIImageView!
override func awakeFromNib() {
super.awakeFromNib()
self.headerImageView.ViewToRoundView(borderWidth: 2.5, borderColor: .white)
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// ClassDetailCollectionViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/13.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class ClassDetailCollectionViewCell: UICollectionViewCell {
}
<file_sep>platform :ios, '8.0'
use_frameworks!
target 'HiShopping' do
pod 'Alamofire'
pod 'SnapKit'
pod 'SDWebImage'
pod 'SDCycleScrollView'
pod 'KNSemiModalViewController'
pod 'XLPagerTabStrip'
pod 'Popover'
pod 'MWPhotoBrowser'
pod 'IQKeyboardManagerSwift'
pod 'SwiftyJSON'
pod 'HandyJSON'
pod 'MMDrawerController', '~> 0.5.7'
end
<file_sep>//
// GuideViewController.swift
// YYT-V3-SalaryEdition
//
// Created by archer.wang on 15/8/12.
// Copyright (c) 2015年 Chinajey. All rights reserved.
//
import UIKit
class GuideViewController: UIViewController ,UIScrollViewDelegate{
let kNumberOfScreen = 2
var scrollView:UIScrollView!
var pageControl:UIPageControl!
var startBtn:UIButton!
var image :UIImage!
var imageStartButton :UIImage!
var startContentOffsetX = CGFloat()
var willEndContentOffsetX = CGFloat()
var endContentOffsetX = CGFloat()
var imagePath:[String]?
var imageVersion:String?
override func viewDidLoad() {
super.viewDidLoad()
self.scrollView = UIScrollView(frame: CGRect(x: 0, y: 0, width: SCREEN_WIDTH, height: SCREEN_HEIGTH))
self.scrollView.isScrollEnabled = false
self.scrollView.isPagingEnabled = true
self.scrollView.showsHorizontalScrollIndicator=false
self.scrollView.showsVerticalScrollIndicator=false
self.scrollView.contentSize=CGSize(width: CGFloat(kNumberOfScreen) * SCREEN_WIDTH, height: SCREEN_HEIGTH)
self.getOldImage()
CountDown.startTime(time: 5){ timeout in
if timeout == 3{
UIView.animate(withDuration: 1, animations: {
self.scrollView.contentOffset = CGPoint(x: SCREEN_WIDTH, y: 0)
})
}else if timeout == 0{
UserDefaults.standard.set("1", forKey: "guide")
UserDefaults.standard.synchronize()
(UIApplication.shared.delegate as! AppDelegate).window?.rootViewController = MainTabBarViewController()
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
// MARK: - ScrollView Delegate
func scrollViewWillBeginDragging(_ scrollView: UIScrollView) {
self.startContentOffsetX = scrollView.contentOffset.x
}
func scrollViewWillEndDragging(_ scrollView: UIScrollView, withVelocity velocity: CGPoint, targetContentOffset: UnsafeMutablePointer<CGPoint>) {
self.willEndContentOffsetX = scrollView.contentOffset.x
if (self.willEndContentOffsetX != 0 && self.startContentOffsetX == CGFloat((kNumberOfScreen - 1) * Int(SCREEN_WIDTH))) && startContentOffsetX == self.willEndContentOffsetX {
UserDefaults.standard.set("1", forKey: "guide")
UserDefaults.standard.synchronize()
(UIApplication.shared.delegate as! AppDelegate).window?.rootViewController = MainTabBarViewController()
}
}
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
let offset = scrollView.contentOffset
let bounds = scrollView.frame
self.pageControl.currentPage = Int(offset.x / bounds.size.width)
}
func getOldImage(){
for i in 1...kNumberOfScreen {
if(SCREEN_WIDTH == 640){
image = UIImage(named: "引导页640(\(i))")
} else if(SCREEN_WIDTH == 750){
image = UIImage(named: "引导页750(\(i))")
} else {
image = UIImage(named: "引导页1242(\(i))")
}
let imageView = UIImageView(image: image)
imageView.frame = CGRect(x: CGFloat((i-1))*SCREEN_WIDTH, y: 0, width: SCREEN_WIDTH, height: SCREEN_HEIGTH)
imageView.contentMode = UIViewContentMode.scaleToFill
self.scrollView.addSubview(imageView)
}
self.scrollView.bounces=false
self.scrollView.delegate=self
self.view.addSubview(self.scrollView)
//self.pageControl=UIPageControl(frame: CGRect(x: SCREEN_WIDTH/2-100, y: 0, width: 200, height: 40))
//self.pageControl.numberOfPages = 2
//self.pageControl.pageIndicatorTintColor = UIColor.lightGray
//self.pageControl.currentPageIndicatorTintColor = UIColor.darkGray
//self.pageControl.currentPage = 0
//self.view.addSubview(self.pageControl)
}
override var prefersStatusBarHidden : Bool {
return true
}
override var shouldAutorotate : Bool{
return false
}
}
<file_sep>//
// MainStoryboardHelper.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/11.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class MainStoryboardHelper: NSObject {
static let storyboardName = "Main"
static let navigationController = "mainNavigationController"
static let mainViewController = "mainViewController"
static let searchViewController = "searchViewController"
static let pointBuyViewController = "pointBuyViewController"
static let todayCheapViewController = "todayCheapViewController"
class func instantiateViewController( identifier: String) -> UIViewController {
let storyboard:UIStoryboard!
storyboard = UIStoryboard(name:self.storyboardName, bundle: nil)
let viewController = storyboard.instantiateViewController(withIdentifier: identifier)
return viewController
}
}
<file_sep>//
// LeftDrawerViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/21.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class LeftDrawerTableViewCell: UITableViewCell {
var iconImageView = UIImageView()
var titleLabel = UILabel()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(iconImageView)
self.contentView.addSubview(titleLabel)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setFrame()
}
func setLayout(){
iconImageView.snp.makeConstraints { (make) in
make.center.equalToSuperview()
make.width.height.equalTo(30)
}
titleLabel.snp.makeConstraints { (make) in
make.top.equalTo(iconImageView.snp.bottom).offset(5)
make.width.equalTo(iconImageView)
make.left.equalTo(iconImageView)
}
}
func setData(image:String,title:String){
self.iconImageView.image = UIImage(named:image)
self.titleLabel.text = title
}
func setFrame(){
self.titleLabel.textAlignment = .center
self.titleLabel.font = BigFont
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
protocol LeftDrawerViewControllerDelegate:NSObjectProtocol {
func pushViewController(index:Int)
}
class LeftDrawerViewController: UIViewController ,UITableViewDelegate,UITableViewDataSource{
let leftCellIdentifier = "leftCell"
let iconArray:[(image:String,title:String)] = [
("icon_home","首页"),
("icon_collection_gray_big","收藏"),
("icon_message_big","消息"),
("icon_position","发现"),
("icon_set","设置")
]
weak var delegate : LeftDrawerViewControllerDelegate!
var tableView = UITableView()
override func viewDidLoad() {
super.viewDidLoad()
self.setTableView()
}
private func setTableView(){
self.view.addSubview(tableView)
self.tableView.frame = self.view.bounds
tableView.separatorStyle = .none
tableView.delegate = self
tableView.dataSource = self
tableView.register(LeftDrawerTableViewCell.self, forCellReuseIdentifier: leftCellIdentifier)
}
//MARK:- UITableViewDataSource
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 5
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: leftCellIdentifier, for: indexPath) as! LeftDrawerTableViewCell
if indexPath.row%2 != 0{
cell.contentView.backgroundColor = HiTaoBackgroundColor
}
cell.setData(image: iconArray[indexPath.row].image, title: iconArray[indexPath.row].title)
return cell
}
//MARK:-UITableViewDelegate
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
return SCREEN_HEIGTH/5
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
switch indexPath.row {
case 3:
let appDelegate = UIApplication.shared.delegate as! AppDelegate
appDelegate.leftSlideVC.closeLeftView()
let brandViewController = BrandViewController()
brandViewController.hidesBottomBarWhenPushed = true
let mainTabBarViewController = appDelegate.leftSlideVC.mainVC as! MainTabBarViewController
(mainTabBarViewController.viewControllers?[mainTabBarViewController.selectedIndex] as! UINavigationController).pushViewController(brandViewController, animated: true)
default:
print("侧边栏点击事件")
}
}
}
<file_sep>//
// ColorTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/24.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class SpecTableViewCell: UITableViewCell {
var contentLabel = UILabel()
var selectImageView = UIImageView()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(selectImageView)
self.contentView.addSubview(contentLabel)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setFormat()
}
func setLayout(){
contentLabel.snp.makeConstraints { (make) in
make.center.equalToSuperview()
}
}
//根据文本内容控制ImageView的大小
override func layoutIfNeeded() {
super.layoutIfNeeded()
self.selectImageView.frame = CGRect(x: contentLabel.frame.origin.x - 5, y: contentLabel.frame.origin.y - 5 , width: contentLabel.frame.size.width + 10, height: contentLabel.frame.size.height + 10)
}
//设置文本控件的内容和颜色
func setData(textString:String){
self.contentLabel.text = textString
self.contentLabel.textColor = customGray
}
func setFormat(){
contentLabel.textAlignment = .center
contentLabel.font = MiddleFont
contentLabel.textColor = customGray
selectImageView.image = UIImage(named:"line_51x51")
selectImageView.isHidden = true
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// WaterFlowTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/11.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class WaterFlowTableViewCell: UITableViewCell,UICollectionViewDataSource {
let layout = WaterFlowViewLayout()
let ReuseIdentifier = "cell"
@IBOutlet weak var waterFlowView: UICollectionView!
override func awakeFromNib() {
super.awakeFromNib()
waterFlowView.dataSource = self
self.waterFlowView.register(UICollectionViewCell.self, forCellWithReuseIdentifier: ReuseIdentifier)
layout.columnCount = 3
layout.columnMargin = 10.5
layout.rowMargin = 10
layout.sectionInsert = UIEdgeInsets(top: 10, left: 10, bottom: 10, right: 10)
layout.itemHeightBlock = {(a,b) in
return CGFloat(80 + arc4random_uniform(50))
}
waterFlowView.collectionViewLayout = layout
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int{
return 100
}
open func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell{
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: ReuseIdentifier, for: indexPath)
cell.backgroundColor = UIColor.randomColor()
return cell
}
}
<file_sep>//
// DetailsStoryboardHelper.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/2.
// Copyright © 2016年 周. All rights reserved.
//
import Foundation
class DetailsStoryboardHelper: NSObject {
static let storyboardName = "Details"
static let navigationController = "classNavigationController"
static let confirmOrderViewController = "confirmOrderViewController"
class func instantiateViewController( identifier: String) -> UIViewController {
let storyboard:UIStoryboard!
storyboard = UIStoryboard(name:self.storyboardName, bundle: nil)
let viewController = storyboard.instantiateViewController(withIdentifier: identifier)
return viewController
}
}
<file_sep>//
// MineOrderChildViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/20.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
import XLPagerTabStrip
class MineOrderChildViewController: UIViewController , IndicatorInfoProvider ,UITableViewDataSource,UITableViewDelegate{
var index:Int!
let mineOrderCellIdentifier = "mineOrderCell"
let mineOrdeBottomCellIdentifier = "mineOrderBottomCell"
var itemInfo : IndicatorInfo!
var tableView = UITableView(frame: CGRect.zero, style: .grouped)
var orderCountArray = [3,4,5,6,2,3,5,6,2,3]
init(itemInfo: IndicatorInfo,index:Int) {
self.itemInfo = itemInfo
self.index = index
super.init(nibName: nil, bundle: nil)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func viewDidLoad() {
super.viewDidLoad()
self.view.addSubview(tableView)
self.setTableView()
}
private func setTableView(){
tableView.snp.makeConstraints { (make) in
make.edges.equalToSuperview()
}
self.tableView.delegate = self
self.tableView.dataSource = self
self.tableView.register(MineOrderProductTableViewCell.self, forCellReuseIdentifier: mineOrderCellIdentifier)
self.tableView.register(MineOrderBottomTableViewCell.self, forCellReuseIdentifier: mineOrdeBottomCellIdentifier)
self.tableView.separatorStyle = .none
self.tableView.backgroundColor = HiTaoBackgroundColor
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
// MARK: - UITableViewDataSource
func numberOfSections(in tableView: UITableView) -> Int {
return 10
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return orderCountArray[section] + 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
var cell: UITableViewCell
//最后一条返回不同的布局
if indexPath.row == orderCountArray[indexPath.section]{
cell = tableView.dequeueReusableCell(withIdentifier: mineOrdeBottomCellIdentifier, for: indexPath)
}else{
cell = tableView.dequeueReusableCell(withIdentifier: mineOrderCellIdentifier, for: indexPath)
}
return cell
}
//MARK: - UITableViewDelegate
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat
{
if indexPath.row == orderCountArray[indexPath.section]{
return 50
}else{
return 80
}
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
switch section {
case 0:
return 1
default:
return 0.0001
}
}
func tableView(_ tableView: UITableView, heightForFooterInSection section: Int) -> CGFloat {
return 10
}
// MARK: - IndicatorInfoProvider
func indicatorInfo(for pagerTabStripController: PagerTabStripViewController) -> IndicatorInfo {
return itemInfo
}
}
<file_sep>//
// MineCollectionViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/23.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class MineCollectionViewController: UIViewController,UICollectionViewDelegate,UICollectionViewDataSource {
let collectionCellIdentifier = "collectionCell"
var dataArray:[Bool] = [true,true,false,true,false,true,false,true,true,true]
var collectionView:UICollectionView!
override func viewDidLoad() {
super.viewDidLoad()
self.setCollectionView()
}
private func setCollectionView(){
let layout = WaterFlowViewLayout()
layout.columnCount = 2
layout.columnMargin = 10
layout.rowMargin = 10
layout.sectionInsert = UIEdgeInsets(top: 0, left: 10, bottom: 10, right: 10)
//判断是否有标签栏
layout.itemHeightBlock = {[weak self](itemWitdh,indexPath) in
return (self?.dataArray[indexPath.row])! ? (SCREEN_WIDTH-30)/2 + 80 : (SCREEN_WIDTH-30)/2 + 50
}
self.collectionView = UICollectionView(frame: CGRect.zero, collectionViewLayout:layout)
self.collectionView.backgroundColor = HiTaoBackgroundColor
self.collectionView.delegate = self
self.collectionView.dataSource = self
self.view.addSubview(collectionView)
self.collectionView.backgroundColor = HiTaoBackgroundColor
collectionView.snp.makeConstraints { (make) in
make.left.right.top.bottom.equalToSuperview()
}
self.collectionView.register(MineCollectionCollViewCell.self, forCellWithReuseIdentifier: collectionCellIdentifier)
}
//UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return 10
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: collectionCellIdentifier, for: indexPath) as! MineCollectionCollViewCell
cell.setData(isBtnHaving: dataArray[indexPath.row])
return cell
}
}
<file_sep>//
// SearchViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/25.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class SearchViewController: UIViewController,UITableViewDataSource ,UITableViewDelegate,UITextFieldDelegate,HJTextFieldDelegate,SearchTableViewCellDelegate{
//是否开始搜索
var isStartSearch :Bool = false{
willSet{
self.topView.isHidden = newValue
if newValue{
self.tableView.frame.origin.y = naviStateBarHeight
}else{
//40为topView高度
self.tableView.frame.origin.y = 40 + naviStateBarHeight
}
}
}
//条件1的值去改变Button的高度
//条件1选择的值
var firstCondition:Int!{
//添加属性观察者
didSet{
//判断是否已有选择
if let oldValue = oldValue{
changeOldButtonHeight(tag: oldValue)
}
}
willSet{
changeNewButtonHeight(tag: newValue)
}
}
//搜索记录数组
var historyArray = ["新品发布","热销排行","本周排行","整","新品发布","热销排行","本周排行","整"]
//标签数组
var iconArray = ["新品发布","热销排行","本周排行","整","新品发布","热销排行","本周排行","整"]
//搜索结果数组
var searchArray = ["新品发布","热销排行","本周排行","整","新品发布","热销排行","本周排行","整"]
@IBOutlet weak var searchTextField: HJTextField!
@IBOutlet weak var topView: UIView!
@IBOutlet weak var tableView: UITableView!
var iconCellHeight:CGFloat = 0
var historyCellHeight:CGFloat = 0
let searchIconCellIdentifier = "searchIconCell"
let searchHistoryCellIdentifier = "searchHistoryCell"
let searchResultCellIdentifier = "searchResultCell"
override func viewDidLoad() {
super.viewDidLoad()
//指定输入框代理
self.searchTextField.delegate = self
self.searchTextField.HJDelegate = self
self.searchTextField.addTarget(self, action: #selector(SearchViewController.textFieldValueChange(_:)), for: .editingChanged)
//搜索框圆角设置
self.searchTextField.ViewToRoundViewBycornerRadius(cornerRadius: 5)
}
@IBAction func selectFirstCondition(_ sender: UIButton) {
self.firstCondition = sender.tag
}
//以前选择的Button高度增大
func changeOldButtonHeight(tag:Int){
for view in topView.subviews {
if view.tag == tag{
view.frame.size.height += 1
}
}
}
//当前选择的Button高度增大
func changeNewButtonHeight(tag:Int){
for view in topView.subviews {
if view.tag == tag{
view.frame.size.height -= 1
}
}
}
override func viewWillAppear(_ animated: Bool) {
//隐藏tabBar
self.navigationController?.navigationBar.isTranslucent = true
}
override func viewWillDisappear(_ animated: Bool) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK:- UITableViewDataSource
func numberOfSections(in tableView: UITableView) -> Int {
return isStartSearch ? searchArray.count : 2
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
//如果开始搜索显示搜索的cell
if isStartSearch{
let cell = tableView.dequeueReusableCell(withIdentifier: searchResultCellIdentifier, for: indexPath)
return cell
}
switch indexPath.section {
case 0:
let cell = tableView.dequeueReusableCell(withIdentifier: searchIconCellIdentifier, for: indexPath) as! SearchIconTableViewCell
cell.iconArray = self.iconArray
cell.addButton()
//30button高度 20分别为垂直方向上下间距
iconCellHeight = cell.topSpace + 30 + 20
return cell
default:
let cell = tableView.dequeueReusableCell(withIdentifier: searchHistoryCellIdentifier, for: indexPath) as! SearchHistoryTableViewCell
cell.historyArray = self.historyArray
cell.addButton()
historyCellHeight = cell.topSpace + 30 + 20
cell.delegate = self
return cell
}
}
//MARK:- UITableViewDelegate
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
if isStartSearch{
let vc = SearchResultViewController()
vc.hidesBottomBarWhenPushed = true
self.navigationController?.pushViewController(vc, animated: true)
}
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
//如果开始搜索返回搜索的cell的高度
if isStartSearch{
return 50
}
switch indexPath.section {
case 0:
return iconCellHeight
default:
//64导航栏和状态栏 40条件1的高度 1tableviewHeader Section == 0的高度 10tableviewHeader Section == 1的高度
return SCREEN_HEIGTH - 64 - 40 - 1 - iconCellHeight - 10
}
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
if isStartSearch{
return 1
}
switch section {
case 0:
return 1
default:
return 10
}
}
func tableView(_ tableView: UITableView, heightForFooterInSection section: Int) -> CGFloat {
return 0.00001
}
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
//判断是否是第一个控制view的高度
let height:CGFloat = section == 0 || isStartSearch ? 1 : 10
let view = UIView(frame: CGRect(x: 0, y: 0, width: SCREEN_WIDTH, height: height))
view.backgroundColor = HiTaoBackgroundColor
return view
}
//MARK:- UITextFieldDelegate
func textField(_ textField: UITextField, shouldChangeCharactersIn range: NSRange, replacementString string: String) -> Bool {
let contentString : String = textField.text!
let newRange = range.toRangeByString(content: contentString)
//搜索内容
let string = contentString.replacingCharacters(in: newRange , with: string)
print(string)
return true
}
//MARK:- SearchTableViewCellDelegate
/// 实现删除操作的代理方法
func deleteHistoryInSearch() {
//清空历史搜索记录数组
self.historyArray.removeAll(keepingCapacity: true)
//指定刷新cell
self.tableView.reloadRows(at: [IndexPath(row: 0, section: 1)], with: .automatic)
}
//设置textField右图为扫一扫
func setTextFieldRightScaningView(){
let rightView = UIButton(frame: CGRect(x: 0, y: 0, width:self.searchTextField.frame.height*0.5, height: self.searchTextField.frame.height*0.5))
rightView.setImage(UIImage(named:"scanning_icon"), for: .normal)
rightView.addTarget(self, action: #selector(SearchViewController.scanningTextField(_:)), for:.touchUpInside)
self.searchTextField.rightView = rightView
}
//设置textField右图为清空
func setTextFieldRightClearView(){
let rightView = UIButton(frame: CGRect(x: 0, y: 0, width:self.searchTextField.frame.height*0.5, height: self.searchTextField.frame.height*0.5))
rightView.setImage(UIImage(named:"clear_icon"), for: .normal)
rightView.addTarget(self, action: #selector(SearchViewController.clearTextField(_:)), for:.touchUpInside)
self.searchTextField.rightView = rightView
}
func textFieldValueChange(_ sender: UITextField){
if (sender.text?.characters.count)! > 0{
isStartSearch = true
self.setTextFieldRightClearView()
}else{
isStartSearch = false
self.setTextFieldRightScaningView()
}
self.tableView.reloadData()
}
//清空输入
func clearTextField(_ sender:UIButton){
self.searchTextField.text = ""
isStartSearch = false
self.setTextFieldRightScaningView()
self.tableView.reloadData()
}
func scanningTextField(_ sender:AnyObject?){
print("start scanning")
}
}
<file_sep>//
// ConfirmOrderViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/2.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class ConfirmOrderViewController: UIViewController,UITableViewDataSource ,UITableViewDelegate{
let addressCellIdentifier = "addressCell"
let goodsCellIdentifier = "goodsCell"
let moreCellIdentifier = "moreCell"
let discountCellIdentifier = "discountCell"
let priceCellIdentifier = "priceCell"
let payModeHeaderCellIdentifier = "payModeHeaderCell"
let payModeCellIdentifier = "payModeCell"
let payModelHeaderIdentifier = "payModeHeader"
var isOpenPayMode = false
var payModeCode :Int! = 0
@IBOutlet weak var tableView: UITableView!
override func viewDidLoad() {
super.viewDidLoad()
self.tableView.register(PayModeHeaderView.self, forHeaderFooterViewReuseIdentifier: payModelHeaderIdentifier)
}
@IBAction func confirmPay(_ sender:UIButton){
print("确认支付")
}
//MARK:- UITableViewDataSource
func numberOfSections(in tableView: UITableView) -> Int {
return 5
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
switch section {
case 0:
return 1
case 1:
return 3
case 2:
return 1
case 3:
return 1
default:
return isOpenPayMode ? 3 : 1
}
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
switch indexPath.section {
case 0:
let cell = tableView.dequeueReusableCell(withIdentifier: addressCellIdentifier, for: indexPath)
return cell
case 1:
switch indexPath.row {
case 0...1:
let cell = tableView.dequeueReusableCell(withIdentifier: goodsCellIdentifier, for: indexPath)
return cell
default:
let cell = tableView.dequeueReusableCell(withIdentifier: moreCellIdentifier, for: indexPath)
return cell
}
case 2:
let cell = tableView.dequeueReusableCell(withIdentifier: discountCellIdentifier, for: indexPath)
return cell
case 3:
let cell = tableView.dequeueReusableCell(withIdentifier: priceCellIdentifier, for: indexPath)
return cell
default:
switch indexPath.row {
case 0:
let cell = tableView.dequeueReusableCell(withIdentifier: payModeHeaderCellIdentifier, for: indexPath) as! PayModeHeaderTableViewCell
return cell
default:
let cell = tableView.dequeueReusableCell(withIdentifier: payModeCellIdentifier, for: indexPath) as! PayModeTableViewCell
if payModeCode == indexPath.row{
cell.selectImageView.image = #imageLiteral(resourceName: "roundSelect")
}else{
cell.selectImageView.image = #imageLiteral(resourceName: "roundUnSelect")
}
return cell
}
}
}
//MARK:- UITableViewDelegate
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
switch indexPath.section {
case 0:
return 80
case 1:
switch indexPath.row {
case 0...1:
return 100
default:
return 50
}
case 2:
return 50
case 3:
return 85
default:
return 50
}
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
switch section {
case 0:
return 1
case 1...3:
return 10
default:
return 5
}
}
func tableView(_ tableView: UITableView, heightForFooterInSection section: Int) -> CGFloat {
return 0.000001
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
if indexPath.section == 4{
if indexPath.row == 0{
let cell = tableView.cellForRow(at: indexPath) as! PayModeHeaderTableViewCell
self.isOpenPayMode = !self.isOpenPayMode
UIView.animate(withDuration: 0.3, animations: {
if self.isOpenPayMode{
tableView.insertRows(at: [IndexPath(row: 1, section: 4),IndexPath(row: 2, section: 4)], with: .automatic)
tableView.setContentOffset(CGPoint(x: 0, y: 100), animated: true)
}else{
tableView.deleteRows(at: [IndexPath(row: 1, section: 4),IndexPath(row: 2, section: 4)], with: .automatic)
}
let transform = cell.isOpenImageView.transform.rotated(by: CGFloat(M_PI))
cell.isOpenImageView.transform = transform
}, completion: nil)
}else{
self.payModeCode = indexPath.row
self.tableView.reloadRows(at: [IndexPath(row: 1, section: 4),IndexPath(row: 2, section: 4)], with: .automatic)
}
}
}
}
<file_sep>//
// HJTextField.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/25.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
protocol HJTextFieldDelegate:NSObjectProtocol {
func scanningTextField(_ sender:AnyObject?)
}
@IBDesignable
class HJTextField: UITextField {
weak var HJDelegate :HJTextFieldDelegate!
override func awakeFromNib() {
let rightTap = UITapGestureRecognizer(target: self, action: #selector(HJTextField.scanning(_:)))
self.rightView?.addGestureRecognizer(rightTap)
}
func scanning(_ tap :UITapGestureRecognizer){
HJDelegate.scanningTextField(tap.view)
}
@IBInspectable var leftImage: UIImage? {
set {
let leftImgView = UIButton()
leftImgView.setImage(newValue, for: .normal)
leftImgView.frame = CGRect(x: frame.height*0.1,
y: frame.height*0.1,
width: frame.height*0.5,
height: frame.height*0.5)
leftView = leftImgView
leftViewMode = .always
}
get {
if let leftImgView = leftView as? UIButton {
return leftImgView.image(for: .normal)
}
return nil
}
}
@IBInspectable var rightImage: UIImage? {
set {
let rightImgView = UIButton()
rightImgView.setImage(newValue, for: .normal)
rightImgView.frame = CGRect(x: frame.height*0.1,
y: frame.height*0.1,
width: frame.height*0.5,
height: frame.height*0.5)
rightView = rightImgView
rightViewMode = .always
}
get {
if let rightImgView = rightView as? UIButton {
return rightImgView.image(for: .normal)
}
return nil
}
}
override func leftViewRect(forBounds bounds: CGRect) -> CGRect {
var rect = super.leftViewRect(forBounds: bounds)
rect = CGRect(x: rect.origin.x+10, y: rect.origin.y, width: rect.size.width, height: rect.size.height)
return rect
}
override func rightViewRect(forBounds bounds: CGRect) -> CGRect {
var rect = super.rightViewRect(forBounds: bounds)
rect = CGRect(x: rect.origin.x-10, y: rect.origin.y, width: rect.size.width, height: rect.size.height)
return rect
}
}
<file_sep>//
// ClassViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/12.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class AllClassViewController: UIViewController,UICollectionViewDataSource,UICollectionViewDelegate {
@IBOutlet weak var collectionView: UICollectionView!
override func viewDidLoad() {
super.viewDidLoad()
self.addBackItem()
self.setCollectionLayout()
//注册单元
self.collectionView.register(UICollectionViewCell.self, forCellWithReuseIdentifier: "cell")
}
override func viewWillAppear(_ animated: Bool) {
let appDelegate = UIApplication.shared.delegate as! AppDelegate
appDelegate.leftSlideVC.setPanEnabled(true)
}
override func viewWillDisappear(_ animated: Bool) {
let appDelegate = UIApplication.shared.delegate as! AppDelegate
appDelegate.leftSlideVC.setPanEnabled(false)
}
//设置collectionView
private func setCollectionLayout(){
let sectionInset = UIEdgeInsets(top: 0, left: 10, bottom: 10, right: 10)
let minimumLineSpacing :CGFloat = 0.0
let minimumInteritemSpacing:CGFloat = 0.0
let itemSize = CGSize(width: (SCREEN_WIDTH-20)/2, height: (SCREEN_WIDTH-20)/2*(5.0/6.0))
let layout = PublicCollectionViewLayout(sectionInset: sectionInset, minimumLineSpacing: minimumLineSpacing, minimumInteritemSpacing: minimumInteritemSpacing, itemSize: itemSize)
self.collectionView.collectionViewLayout = layout
}
//MARK:- UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int{
return 10
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell{
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "collectionCell", for: indexPath) as! ClassCollectionViewCell
if [0,3].contains(indexPath.row%4) {
cell.backgroundColor = UIColor.hexStringToColor("f5f2f1")
}else{
cell.backgroundColor = UIColor.hexStringToColor("ffffff")
}
cell.imageView.image = UIImage(named:classImageArray[indexPath.row])
cell.titleLabel.text = classTitleArray[indexPath.row]
return cell
}
//MARK:- UICollectionViewDelegate
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath){
let vc = ClassStoryboardHelper.instantiateViewController(identifier: ClassStoryboardHelper.classViewController)
vc.hidesBottomBarWhenPushed = true
self.navigationController?.pushViewController(vc, animated: true)
}
}
<file_sep>//
// ResultCollectionViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/26.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class ResultCollectionViewCell: UICollectionViewCell {
var isCollection = false
var starCount = 888
var goodsImageView = UIImageView()
var goodsNameLabel = UILabel()
var priceLabel = UILabel()
var starButton = UIButton()
override init(frame: CGRect) {
super.init(frame: frame)
self.contentView.backgroundColor = .white
self.contentView.addSubview(goodsImageView)
self.contentView.addSubview(goodsNameLabel)
self.contentView.addSubview(priceLabel)
self.contentView.addSubview(starButton)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFormat()
}
func setLayout(){
goodsImageView.snp.makeConstraints { (make) in
make.width.equalToSuperview()
make.height.equalTo(goodsImageView.snp.width)
make.left.top.equalToSuperview()
}
goodsNameLabel.snp.makeConstraints { (make) in
make.left.equalTo(goodsImageView).offset(10)
make.top.equalTo(goodsImageView.snp.bottom).offset(10)
make.height.equalTo(11)
}
priceLabel.snp.makeConstraints { (make) in
make.left.equalTo(goodsNameLabel)
make.top.equalTo(goodsNameLabel.snp.bottom).offset(10)
}
starButton.snp.makeConstraints { (make) in
make.right.equalToSuperview().offset(-10)
make.centerY.equalTo(priceLabel)
}
}
func setData(){
self.goodsImageView.image = UIImage(named:imageDefaultName)
self.goodsNameLabel.text = "ML时尚鸭舌帽"
self.priceLabel.text = "¥120"
self.starButton.setTitle("\(starCount)", for: .normal)
}
func setFormat(){
self.goodsNameLabel.setFontAndTextColor(font: MiddleFont, textColor: UIColor.black)
self.priceLabel.setFontAndTextColor(font: MiddleFont, textColor: customRed)
self.starButton.titleLabel?.font = MiddleFont
self.starButton.setImage(UIImage(named:"unCollect_icon"), for: .normal)
self.starButton.setTitleColor(customGray, for: .normal)
self.starButton.addTarget(self, action: #selector(ResultCollectionViewCell.collectGoods(_:)),for: .touchUpInside)
}
func collectGoods(_ sender: UIButton) {
isCollection = !isCollection
if isCollection{
starCount += 1
self.starButton.setImage(UIImage(named:"collect_icon"), for: .normal)
self.starButton.setTitleColor(customRed, for: .normal)
self.starButton.setTitle("\(starCount)", for: .normal)
}else{
starCount -= 1
self.starButton.setImage(UIImage(named:"unCollect_icon"), for: .normal)
self.starButton.setTitleColor(customGray, for: .normal)
self.starButton.setTitle("\(starCount)", for: .normal)
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>//
// CountDownCollectionHeaderView.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/27.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class CountDownCollectionHeaderView: UICollectionReusableView {
var timeSeconds = 10000
var messageLabel = UILabel()
var timeLabel = UILabel()
var leftLineView = UIView()
var rightLineView = UIView()
override init(frame: CGRect) {
super.init(frame: frame)
self.addSubview(messageLabel)
self.addSubview(timeLabel)
self.addSubview(leftLineView)
self.addSubview(rightLineView)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFormat()
}
func setLayout(){
messageLabel.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.top.equalToSuperview().offset(10)
}
timeLabel.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.top.equalTo(messageLabel.snp.bottom).offset(7.5)
}
}
func setData(){
CountDown.startTime(time: timeSeconds) { (timeout) in
self.setTime(timeout: timeout)
}
}
func setFormat(){
self.messageLabel.text = "距离结束还剩"
self.messageLabel.setFontAndTextColor(font: SmallFont, textColor: customGray)
self.timeLabel.setFontAndTextColor(font: SmallFont, textColor: .black)
}
/// 设置时间
///
/// - parameter timeout: 剩余倒计时秒数
func setTime(timeout:Int) {
let timeClock = TimeHelper.getTimeOnClock(second: timeout)
self.timeLabel.text = "\(timeClock.hour):\(timeClock.minute):\(timeClock.second)"
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>//
// NewReleaseTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/18.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class NewReleaseTableViewCell: UITableViewCell {
//计算第一次拉伸的宽度:第一次拉伸宽度=最终宽度/2+原图宽度/2
let tempWidth = Int((SCREEN_WIDTH*5/8)/2 + 200/2)
var bgImageView = UIImageView()
var centerImageView = UIImageView()
var typeLabel = UILabel()
var nameLabel = UILabel()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(bgImageView)
self.contentView.addSubview(centerImageView)
self.centerImageView.addSubview(typeLabel)
self.centerImageView.addSubview(nameLabel)
self.setAllAttributes()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFrame()
}
func setLayout(){
bgImageView.snp.makeConstraints { (make) in
make.edges.equalToSuperview()
}
centerImageView.snp.makeConstraints { (make) in
make.center.equalToSuperview()
make.width.equalTo(tempWidth)
make.height.equalTo(40)
}
typeLabel.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.centerY.equalTo(self.centerImageView.snp.bottom).offset(-10)
}
nameLabel.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.centerY.equalTo(self.centerImageView.snp.top).offset(10)
}
}
func setData(){
self.bgImageView.image = UIImage(named:imageDefaultName)
self.typeLabel.text = "美食特产"
self.nameLabel.text = "鲜花饼"
}
func setFrame(){
let bgImage = UIImage(named:"content_img_bg")
self.centerImageView.image = self.scaleImageView(firstImage: bgImage!)
self.typeLabel.font = SmallFont
self.nameLabel.font = BigFont
}
func scaleImageView(firstImage:UIImage)-> UIImage{
//第一次拉伸图片
let firstScaleImage = firstImage.stretchableImage(withLeftCapWidth:Int(firstImage.size.width * 0.8) , topCapHeight: Int(firstImage.size.height * 0.5))
UIGraphicsBeginImageContextWithOptions(CGSize(width: self.tempWidth, height: 40), false, UIScreen.main.scale)
//如果tempWidth没有取整的话就是217.1875 会造成一条竖线的存在 小数值越接近0.5线越细
//根据猜测应该是屏幕最少显示为1个像素也就是0.5 不足0.5可能会造成绘制图形不足造成空白也就是所谓的竖线 所以这里的处理方式是一律取整从而避免这个问题
firstScaleImage.draw(in:CGRect(x: 0, y:0 , width: self.tempWidth , height: 40) , blendMode: .normal, alpha: 1)
let secondImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
//重写布局
centerImageView.snp.remakeConstraints { (make) in
make.center.equalToSuperview()
make.width.equalTo(Int(SCREEN_WIDTH*5/8))
make.height.equalTo(40)
}
//第二次拉伸图片
let secondScaleImage = secondImage.stretchableImage(withLeftCapWidth:Int(secondImage.size.width * 0.2) , topCapHeight: Int(secondImage.size.height * 0.5))
return secondScaleImage
}
//另一种拉伸的写法
// let firstScaleImage = firstImage.resizableImage(withCapInsets: UIEdgeInsets(top: firstImage.size.height * 0.5, left: firstImage.size.width * 0.1, bottom: firstImage.size.height * 0.5-1, right: firstImage.size.width * 0.9-1) )
// let secondScaleImage = secondImage.resizableImage(withCapInsets: UIEdgeInsets(top: secondImage.size.height * 0.5, left: secondImage.size.width * 0.9-1, bottom: secondImage.size.height * 0.5-1, right: secondImage.size.width * 0.1 ))
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// EvaluateTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/12/10.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class EvaluateTableViewCell: UITableViewCell {
var headerImageView = UIImageView()
var userNameLabel = UILabel()
var dateLabel = UILabel()
var contentLabel = UILabel()
var sizeLabel = UILabel()
var colorLabel = UILabel()
var starButton = UIButton()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(headerImageView)
self.contentView.addSubview(userNameLabel)
self.contentView.addSubview(dateLabel)
self.contentView.addSubview(contentLabel)
self.contentView.addSubview(sizeLabel)
self.contentView.addSubview(colorLabel)
self.contentView.addSubview(starButton)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFrame()
}
func setLayout(){
headerImageView.snp.makeConstraints { (make) in
make.width.height.equalTo(20)
make.left.top.equalTo(15)
}
userNameLabel.snp.makeConstraints { (make) in
make.left.equalTo(headerImageView.snp.right).offset(7.5)
make.centerY.equalTo(headerImageView)
}
dateLabel.snp.makeConstraints { (make) in
make.centerY.equalTo(headerImageView)
make.right.equalToSuperview().offset(-10)
}
contentLabel.snp.makeConstraints { (make) in
make.left.equalTo(headerImageView)
make.top.equalTo(headerImageView.snp.bottom).offset(10)
}
sizeLabel.snp.makeConstraints { (make) in
make.left.equalTo(headerImageView)
make.top.equalTo(contentLabel.snp.bottom).offset(10)
}
colorLabel.snp.makeConstraints { (make) in
make.left.equalTo(sizeLabel.snp.right).offset(10)
make.centerY.equalTo(sizeLabel)
}
starButton.snp.makeConstraints { (make) in
make.right.equalToSuperview().offset(-10)
make.centerY.equalTo(sizeLabel)
}
}
func setData(){
self.headerImageView.image = UIImage(named:imageDefaultName)
self.userNameLabel.text = "设计师是逗逼"
self.dateLabel.text = "2011-11-11"
self.contentLabel.text = "对面的女孩看过来看过来"
self.sizeLabel.text = "尺寸:M"
self.colorLabel.text = "颜色:白色"
self.starButton.setTitle("22".toMinLengthString(minLength: 3), for: .normal)
}
func setFrame(){
//更新约束获取frame 方便切圆
self.layoutIfNeeded()
self.headerImageView.ViewToRoundView()
self.userNameLabel.setFontAndTextColor(font: SmallFont, textColor: customGray)
self.dateLabel.setFontAndTextColor(font: SmallFont, textColor: customGray)
self.contentLabel.font = MiddleFont
self.sizeLabel.setFontAndTextColor(font: SmallFont, textColor: customGray)
self.colorLabel.setFontAndTextColor(font: SmallFont, textColor: customGray)
self.starButton.setTitleColor(customGray, for: .normal)
self.starButton.titleLabel?.font = SmallFont
self.starButton.setImage(UIImage(named:"icon_like"), for: .normal)
self.starButton.contentHorizontalAlignment = .right
self.starButton.imageEdgeInsets.right = 5
starButton.snp.makeConstraints { (make) in
make.width.equalTo(self.starButton.frame.width + 5)
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>//
// SearchHistoryTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/25.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
protocol SearchTableViewCellDelegate:NSObjectProtocol {
func deleteHistoryInSearch()
}
class SearchHistoryTableViewCell: UITableViewCell {
var historyArray :[String] = []
let buttonHeight:CGFloat = 30
let buttonVerticalSpace:CGFloat = 20
let buttonHorizontalSpace:CGFloat = 20
var buttonWidthArray :[CGFloat] = []
var leftStartSpace :CGFloat = 10
var topSpace:CGFloat = 50
weak var delegate :SearchTableViewCellDelegate!
@IBAction func deleteAction(_ sender: UIButton) {
self.delegate.deleteHistoryInSearch()
}
override func awakeFromNib() {
super.awakeFromNib()
self.addButton()
}
func addButton(){
//恢复初始值
leftStartSpace = 10
topSpace = 50
//每次刷新需要清除Button避免复用 暂时想不到更好的办法
for view in self.contentView.subviews{
if let view = view as? UIButton{
//删除Button在storyboard中tag值是100作为条件不做remove处理
guard view.tag == 100 else {
view.removeFromSuperview()
continue
}
}
}
//先清空宽度数组
buttonWidthArray.removeAll(keepingCapacity: true)
for index in 0..<historyArray.count {
//每次获取起始左边距离
var leftSpace = leftStartSpace
for buttonWidth in buttonWidthArray {
leftSpace += buttonWidth + buttonHorizontalSpace
}
//根据内容和字体大小计算button宽度
let width = historyArray[index].getStringWidth(font: MiddleFont) + 30
if leftSpace + buttonHorizontalSpace + width + 10 > SCREEN_WIDTH{
//换行清空宽度数组
buttonWidthArray.removeAll(keepingCapacity: true)
topSpace += buttonVerticalSpace + buttonHeight
leftStartSpace = 10
leftSpace = leftStartSpace
}
let historyButton = UIButton()
historyButton.titleLabel?.font = MiddleFont
historyButton.setTitle(historyArray[index], for: .normal)
historyButton.setTitleColor(customGray, for: .normal)
historyButton.setBackgroundImage(UIImage(named:"background_icon"), for: .normal)
historyButton.tag = index
self.contentView.addSubview(historyButton)
historyButton.addTarget(self, action: #selector(SearchIconTableViewCell.selectFirstCondition(_:)), for: .touchUpInside)
buttonWidthArray.append(width)
historyButton.snp.makeConstraints({ (make) in
make.width.equalTo(width)
make.height.equalTo(buttonHeight)
make.left.equalTo(leftSpace)
make.top.equalTo(topSpace)
})
}
}
func selectFirstCondition(_ sender:UIButton){
switch sender.tag {
case 0:
print("选中条件1")
self.changeButtonColor(sender: sender)
default:
print("选中其他条件")
self.changeButtonColor(sender: sender)
}
}
func changeButtonColor(sender:UIButton){
for view in self.contentView.subviews{
if let view = view as? UIButton{
view.setBackgroundImage(UIImage(named:"background_icon"), for: .normal)
view.setTitleColor(customGray, for: .normal)
}
}
sender.setBackgroundImage(UIImage(named:"selectBackground_icon"), for: .normal)
sender.setTitleColor(.white, for: .normal)
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// PointBuyChildCollectionViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/26.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
import SnapKit
extension Dictionary where Key:ExpressibleByStringLiteral,Value:Any {
func name(){
}
}
class ProgressBar: UIView {
var widthConstraint:Constraint?
var overView = UIView()
var overLabel = UILabel()
override func draw(_ rect: CGRect) {
super.draw(rect)
}
override init(frame: CGRect) {
super.init(frame: frame)
self.addSubview(overView)
self.addSubview(overLabel)
self.setAllAttributes()
}
convenience init(allCount:Int,overCount:Int){
self.init(frame: CGRect.zero)
}
func setAllAttributes(){
self.backgroundColor = UIColor.hexStringToColor("ffd4dc")
overView.snp.makeConstraints { (make) in
make.left.top.bottom.equalToSuperview()
make.width.equalToSuperview()
}
overView.backgroundColor = UIColor.hexStringToColor("ff8fa4")
overLabel.snp.makeConstraints { (make) in
make.center.equalToSuperview()
make.height.equalToSuperview().multipliedBy(0.8)
}
overLabel.setFontAndTextColor(font: MiddleFont, textColor: .white)
}
func setOverCount(overCount:Int,allCount:Int){
let radio = CGFloat(overCount)/CGFloat(allCount)
//updateConstraints约束失败原因不不明上面测试普通的约束可以 带比例的貌似不行
// overView.snp.updateConstraints { (make) in
//
// make.width.equalToSuperview().offset(-100)
//
// }
overView.snp.remakeConstraints { (make) in
make.left.top.bottom.equalToSuperview()
make.width.equalToSuperview().multipliedBy(radio)
}
overLabel.text = "已抢\(overCount)"
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
class PointBuyChildCollectionViewCell: UICollectionViewCell {
var goodsImageView = UIImageView()
var goodsNameLabel = UILabel()
var newPriceLabel = UILabel()
var oldPriceLabel = UILabel()
var buyButton = UIButton()
var progressBar = ProgressBar()
override init(frame: CGRect) {
super.init(frame: frame)
self.contentView.backgroundColor = .white
self.contentView.addSubview(goodsImageView)
self.contentView.addSubview(goodsNameLabel)
self.contentView.addSubview(newPriceLabel)
self.contentView.addSubview(oldPriceLabel)
self.contentView.addSubview(buyButton)
self.contentView.addSubview(progressBar)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFormat()
}
func setLayout(){
goodsImageView.snp.makeConstraints { (make) in
make.width.equalToSuperview()
make.height.equalTo(goodsImageView.snp.width)
make.left.top.equalToSuperview()
}
goodsNameLabel.snp.makeConstraints { (make) in
make.left.equalTo(goodsImageView).offset(10)
make.top.equalTo(goodsImageView.snp.bottom).offset(10)
make.height.equalTo(11)
}
newPriceLabel.snp.makeConstraints { (make) in
make.left.equalTo(goodsNameLabel)
make.top.equalTo(goodsNameLabel.snp.bottom).offset(10)
}
oldPriceLabel.snp.makeConstraints { (make) in
make.centerY.equalTo(newPriceLabel)
make.left.equalTo(newPriceLabel.snp.right).offset(5)
}
buyButton.snp.makeConstraints { (make) in
make.bottom.equalTo(progressBar.snp.top)
make.right.equalToSuperview()
make.width.equalTo(60)
make.height.equalTo(35)
}
progressBar.snp.makeConstraints { (make) in
make.height.equalTo(20)
make.left.right.bottom.equalToSuperview()
}
}
func setData(){
self.goodsImageView.image = #imageLiteral(resourceName: "bourkestreetbakery")
self.goodsNameLabel.text = "ML时尚鸭舌帽"
self.newPriceLabel.text = "¥120"
self.oldPriceLabel.text = "¥200"
self.buyButton.setTitle("马上抢", for: .normal)
self.progressBar.setOverCount(overCount: 100, allCount: 200)
}
func setFormat(){
self.goodsNameLabel.setFontAndTextColor(font: MiddleFont, textColor: UIColor.black)
self.newPriceLabel.setNewLabel(headerFontSize: 8, footerFontSize: 10)
self.oldPriceLabel.setOldLabel(fontSize: 8)
self.buyButton.setBackgroundImage(UIImage(named:"btn_sel_red_120x70"), for: .normal)
self.buyButton.titleEdgeInsets = UIEdgeInsets(top: -5, left: 0, bottom: 0, right: 0)
self.buyButton.titleLabel?.font = MiddleFont
self.buyButton.titleLabel?.textColor = .white
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>//
// GoodsDetailsViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/20.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
import KNSemiModalViewController
import CoreMotion
import MWPhotoBrowser
class GoodsDetailsViewControlleabr: UIViewController,UITableViewDelegate,UITableViewDataSource,SpecViewDelegate,SDCycleScrollViewDelegate,MWPhotoBrowserDelegate{
var photosArray :[String] = ["headerImage","1","headerImage","headerImage"]
var photos :[MWPhoto] = []
var thumbs :[MWPhoto] = []
let detailCellIdentifier = "detailCell"
let selectCellIdentifier = "selectCell"
let bottomCellIdentifier = "bottomCell"
var tableView = UITableView(frame: CGRect.zero, style: .grouped)
var bannerView = SDCycleScrollView()
var topView = DetailsTopView()
lazy var motionManager = CMMotionManager()
var timeInterval :Double = 0
var firstAngle: Double!
var angle :Double = 0
var index = 0
var newBannerView:SDCycleScrollView!
override func viewDidLoad() {
super.viewDidLoad()
self.navigationController?.navigationBar.isTranslucent = true
self.automaticallyAdjustsScrollViewInsets = false
self.view.backgroundColor = .white
self.view.addSubview(tableView)
self.view.addSubview(topView)
topView.snp.makeConstraints { (make) in
make.left.right.equalToSuperview()
make.top.equalToSuperview().offset(64)
make.height.equalTo(35)
}
self.addBackItem()
self.setTableView()
self.setBannerView()
self.setNavigationItem()
self.addBottomButton()
}
//陀螺仪旋转角度转换成图片下标
func calculateRotationByGyro(motion:CMDeviceMotion){
print("转动中")
let y = motion.rotationRate.y
let timeInterval = Date().timeIntervalSince1970
angle += y*(timeInterval - self.timeInterval)*(180.0 / M_PI)
if self.firstAngle == nil{
self.firstAngle = angle
}
self.timeInterval = timeInterval
var index = Int(firstAngle - angle)/3
if index > 0 {
index = index % 16
}else{
index = (index%16 + 16) % 16
}
if index != self.index{
self.becomeImage(index: index)
self.index = index
}
}
//切换图片
func becomeImage(index:Int){
self.photosArray[1] = "\(index)"
self.bannerView.localizationImageNamesGroup = self.photosArray
for view in self.bannerView.subviews{
if let view = view as? UICollectionView{
//当前第几张图片
view.reloadData()
}
}
self.newBannerView.localizationImageNamesGroup = self.photosArray
for view in self.newBannerView.subviews{
if let view = view as? UICollectionView{
//当前第几张图片
view.reloadData()
}
}
}
/// 添加底部button
func addBottomButton(){
let addCartButton = UIButton()
addCartButton.backgroundColor = customGray
addCartButton.setTitle("加入购物车", for: .normal)
addCartButton.titleLabel?.font = BigFont
self.view.addSubview(addCartButton)
addCartButton.snp.makeConstraints { (make) in
make.width.equalToSuperview().multipliedBy(0.5)
make.height.equalTo(40)
make.left.bottom.equalToSuperview()
}
addCartButton.addTarget(self, action: #selector(GoodsDetailsViewControlleabr.addCartAction(_:)), for: .touchUpInside)
let sureBuyButton = UIButton()
sureBuyButton.backgroundColor = customRed
sureBuyButton.setTitle("立即购买", for: .normal)
sureBuyButton.titleLabel?.font = BigFont
self.view.addSubview(sureBuyButton)
sureBuyButton.snp.makeConstraints { (make) in
make.width.equalToSuperview().multipliedBy(0.5)
make.height.equalTo(40)
make.right.bottom.equalToSuperview()
}
sureBuyButton.addTarget(self, action: #selector(GoodsDetailsViewControlleabr.sureBuyAction(_:)), for: .touchUpInside)
}
func addCartAction(_ sender:UIButton){
print("添加成功")
}
//确认购买
func sureBuyAction(_ sender:UIButton){
let confirmOrderViewController = DetailsStoryboardHelper.instantiateViewController(identifier: DetailsStoryboardHelper.confirmOrderViewController)
self.navigationController?.pushViewController(confirmOrderViewController, animated: true)
}
//设置导航栏
private func setNavigationItem(){
let rightButtonItem = UIBarButtonItem(image: UIImage(named:"messageItem"),style: .plain, target: self, action: nil)
self.navigationItem.rightBarButtonItem = rightButtonItem
}
//设置tableView
private func setTableView(){
self.tableView.delegate = self
self.tableView.dataSource = self
self.tableView.backgroundColor = HiTaoBackgroundColor
tableView.snp.makeConstraints { (make) in
make.left.right.equalToSuperview()
make.bottom.equalToSuperview().offset(-40)
make.top.equalToSuperview().offset(64+35)
}
self.tableView.separatorStyle = .none
self.tableView.register(GoodsTableViewCell.self, forCellReuseIdentifier: detailCellIdentifier)
self.tableView.register(GoodsSelectTableViewCell.self, forCellReuseIdentifier: selectCellIdentifier)
self.tableView.register(GoodsBottomTableViewCell.self, forCellReuseIdentifier: bottomCellIdentifier)
}
//设置轮播图
private func setBannerView(){
self.bannerView.frame = CGRect(x: 0, y: 0, width: SCREEN_WIDTH, height: 300)
self.bannerView.pageControlAliment = SDCycleScrollViewPageContolAlimentRight
self.bannerView.currentPageDotImage = UIImage(named:"ellipse_red")
self.bannerView.pageDotImage = UIImage(named: "ellipse_gray")
self.bannerView.autoScroll = false
self.bannerView.infiniteLoop = false
self.bannerView.delegate = self
self.bannerView.localizationImageNamesGroup = ["headerImage","1","headerImage","headerImage"]
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
//MARK: SDCycleScrollViewDelegate
func cycleScrollView(_ cycleScrollView: SDCycleScrollView!, didScrollTo index: Int){
if index == 1{
self.motionManager.startDeviceMotionUpdates(to: OperationQueue.main, withHandler:
{motion,error in
self.calculateRotationByGyro(motion: motion!)
})
}else{
motionManager.stopDeviceMotionUpdates()
}
}
func cycleScrollView(_ cycleScrollView: SDCycleScrollView!, didSelectItemAt index: Int) {
let browser = MWPhotoBrowser(delegate: self)
//分享按钮,默认是
browser?.displayActionButton = false
//左右分页切换,默认否
browser?.displayNavArrows = false
//是否显示选择按钮在图片上,默认否
browser?.displaySelectionButtons = false
//控制条件控件 是否显示,默认否
browser?.alwaysShowControls = false
//是否全屏,默认是
browser?.zoomPhotosToFill = true
//是否允许用网格查看所有图片,默认是
browser?.enableGrid = true
//是否第一张,默认是
browser?.startOnGrid = true
//扫图消失
browser?.enableSwipeToDismiss = true
//设置显示第几张
browser?.setCurrentPhotoIndex(UInt(index))
photos = [MWPhoto(image: UIImage(named: photosArray[0])),MWPhoto(image: UIImage(named: photosArray[1])),MWPhoto(image: UIImage(named: photosArray[2])),MWPhoto(image: UIImage(named: photosArray[3]))]
thumbs = [MWPhoto(image: UIImage(named: photosArray[0])),MWPhoto(image: UIImage(named: photosArray[1])),MWPhoto(image: UIImage(named: photosArray[2])),MWPhoto(image: UIImage(named: photosArray[3]))]
let nc = UINavigationController(rootViewController: browser!)
nc.modalTransitionStyle = .crossDissolve
self.present(nc, animated: true, completion:nil)
//自定义添加点击消失和导航栏消失
browser?.addTouchTap()
}
//MARK:- MWPhotoBrowserDelegate
func numberOfPhotos(in photoBrowser: MWPhotoBrowser!) -> UInt{
return 4
}
func photoBrowser(_ photoBrowser: MWPhotoBrowser!, photoAt index: UInt) -> MWPhotoProtocol! {
return photos[Int(index)]
}
//MARK:- UITableViewDataSource
func numberOfSections(in tableView: UITableView) -> Int {
return 3
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int{
return 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell{
switch indexPath.section {
case 0:
let cell = tableView.dequeueReusableCell(withIdentifier: detailCellIdentifier, for: indexPath)
return cell
case 1:
let cell = tableView.dequeueReusableCell(withIdentifier: selectCellIdentifier, for: indexPath)
return cell
default :
let cell = tableView.dequeueReusableCell(withIdentifier: bottomCellIdentifier, for: indexPath)
cell.selectionStyle = .none
return cell
}
}
//MARK: - UITableViewDelegate
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
switch indexPath.section {
case 0:
return 110
case 1:
return 30
default :
return 265
}
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
switch section {
case 0:
return 300
default:
return 0.0001
}
}
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
if section == 0{
return bannerView
}else{
return nil
}
}
func tableView(_ tableView: UITableView, heightForFooterInSection section: Int) -> CGFloat {
return 10
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
let view = SpecView(frame: CGRect(x: 0, y: 0, width: 0, height: 300))
view.delegate = self
if indexPath.section == 1{
let dict :NSDictionary = [KNSemiModalOptionKeys.pushParentBack : true,
KNSemiModalOptionKeys.animationDuration : 2,
KNSemiModalOptionKeys.shadowOpacity : 0.3,]
self.presentSemiView(view, withOptions: dict as! [AnyHashable:Any] )
}
tableView.deselectRow(at: indexPath, animated: true)
}
//MARK:- SpecViewDelegate
func disMissModleView() {
self.dismissSemiModalView()
}
}
<file_sep>//
// ProgressCircleView.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/22.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class ProgressCircleView: UIView {
var bgView = UIImageView()
var topLabel = UILabel()
var bottomLabel = UILabel()
override init(frame: CGRect) {
super.init(frame: frame)
self.addSubview(bgView)
self.addSubview(topLabel)
self.addSubview(bottomLabel)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setFrame()
}
func setLayout(){
bgView.snp.makeConstraints { (make) in
make.edges.equalToSuperview()
}
topLabel.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.bottom.equalTo(self.bgView.snp.centerY).offset(-3.5-3)
}
bottomLabel.snp.makeConstraints { (make) in
make.centerX.equalToSuperview()
make.top.equalTo(self.bgView.snp.centerY).offset(-3.5+3)
}
}
func setData(visualCount:Int,allCount:Int){
self.topLabel.text = String(visualCount)
self.bottomLabel.text = String(allCount)
}
func setFrame(){
self.bgView.image = #imageLiteral(resourceName: "icon_progress")
self.topLabel.font = SmallFont
self.bottomLabel.font = SmallFont
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>// TableChildExampleViewController.swift
// XLPagerTabStrip ( https://github.com/xmartlabs/XLPagerTabStrip )
//
// Copyright (c) 2016 Xmartlabs ( http://xmartlabs.com )
//
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
import Foundation
import XLPagerTabStrip
class TableChildExampleViewController: UIViewController, IndicatorInfoProvider ,UITableViewDataSource,UITableViewDelegate{
var index:Int!
let cheapCellIdentifier = "cheapCell"
let hotCellIdentifier = "hotCell"
let newCellIdentifier = "newCell"
var blackTheme = false
var itemInfo : IndicatorInfo!
var tableView = UITableView()
init(itemInfo: IndicatorInfo,index:Int) {
self.itemInfo = itemInfo
self.index = index
super.init(nibName: nil, bundle: nil)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func viewDidLoad() {
super.viewDidLoad()
//最后一个需要背景色
if index == 2{
self.view.backgroundColor = HiTaoBackgroundColor
}
self.view.addSubview(tableView)
self.tableView.separatorStyle = .none
self.tableView.snp.makeConstraints { (make) in
make.left.right.equalToSuperview()
if index == 2{
make.top.equalToSuperview().offset(2)
}else{
make.top.equalToSuperview()
}
make.bottom.equalToSuperview()
}
self.tableView.delegate = self
self.tableView.dataSource = self
self.tableView.register(NewCheapTableViewCell.self, forCellReuseIdentifier: cheapCellIdentifier)
self.tableView.register(NewHotTableViewCell.self, forCellReuseIdentifier: hotCellIdentifier)
self.tableView.register(NewTableViewCell.self, forCellReuseIdentifier: newCellIdentifier)
self.tableView.estimatedRowHeight = 60.0;
self.tableView.rowHeight = UITableViewAutomaticDimension
self.tableView.allowsSelection = false
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.tableView.reloadData()
self.tableView.scrollToRow(at: IndexPath(row: 0, section: 0), at: .top, animated: true)
}
override func viewWillDisappear(_ animated: Bool) {
}
// MARK: - UITableViewDataSource
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 5
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
var cell = UITableViewCell()
switch index {
case 0:
cell = tableView.dequeueReusableCell(withIdentifier: cheapCellIdentifier, for: indexPath) as! NewCheapTableViewCell
case 1:
cell = tableView.dequeueReusableCell(withIdentifier: hotCellIdentifier, for: indexPath) as! NewHotTableViewCell
case 2:
cell = tableView.dequeueReusableCell(withIdentifier: newCellIdentifier, for: indexPath) as! NewTableViewCell
default:
break
}
return cell
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
return index == 2 ? 110 + ITEM_WIDTH*(80/93)*2 : 240
}
// MARK: - IndicatorInfoProvider
func indicatorInfo(for pagerTabStripController: PagerTabStripViewController) -> IndicatorInfo {
return itemInfo
}
}
<file_sep>//
// BrandViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/22.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class BrandViewController: UIViewController ,UICollectionViewDataSource,UICollectionViewDelegate{
let brandColCellIdentifier = "brandColCell"
var collectionView :UICollectionView!
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = .white
self.navigationItem.title = "品牌"
self.setCollectionView()
self.setNavigationItem()
}
private func setNavigationItem(){
let rightItem = UIBarButtonItem(image: UIImage(named:navIconMessage), style: .plain, target: self, action: nil)
self.navigationItem.rightBarButtonItem = rightItem
}
private func setCollectionView(){
let layout = PublicCollectionViewLayout(sectionInset: UIEdgeInsets(top: 10, left: 10, bottom: 10, right: 10), minimumLineSpacing: 10, minimumInteritemSpacing: 10, itemSize:CGSize(width: (SCREEN_WIDTH-41)/3, height:(SCREEN_WIDTH-41)/3 + 30))
self.collectionView = UICollectionView(frame: CGRect.zero, collectionViewLayout:layout)
self.collectionView.delegate = self
self.collectionView.dataSource = self
self.view.addSubview(collectionView)
self.collectionView.backgroundColor = HiTaoBackgroundColor
collectionView.snp.makeConstraints { (make) in
make.left.right.bottom.equalToSuperview()
make.top.equalToSuperview()
}
self.collectionView.register(BrandCollectionViewCell.self, forCellWithReuseIdentifier: brandColCellIdentifier)
}
//MARK:- UICollectionDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return 30
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: brandColCellIdentifier, for: indexPath) as! BrandCollectionViewCell
return cell
}
//MARK:- UICollectionDelegate
}
<file_sep>//
// WaterFlowViewLayout.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/10.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class WaterFlowViewLayout: UICollectionViewLayout {
///所有cell的布局属性
var layoutAttributes = [UICollectionViewLayoutAttributes]()
///使用一个字典记录每列的最大Y值
var maxYDict = [Int:CGFloat]()
///瀑布流四周的间距
var sectionInsert :UIEdgeInsets!
//列间距
var columnMargin:CGFloat!
//行间距
var rowMargin:CGFloat!
///瀑布流列数
var columnCount : Int!
//高度计算回调方法
var itemHeightBlock : ((CGFloat, IndexPath) -> CGFloat)!
override func prepare() {
for i in 0 ..< self.columnCount {
self.maxYDict[i] = self.sectionInsert.top
}
let itemCount = self.collectionView?.numberOfItems(inSection: 0)
for i in 0 ..< itemCount!{
let attributes = self.layoutAttributesForItem(at: IndexPath(item: i, section: 0))
self.layoutAttributes.append(attributes!)
}
}
override var collectionViewContentSize: CGSize{
get{
var maxIndex:Int = 0
for(key,value) in maxYDict{
if maxYDict[maxIndex]! < value{
maxIndex = key
}
}
return CGSize(width:0.0,height:self.maxYDict[maxIndex]! + self.sectionInsert.bottom)
}
}
override func layoutAttributesForItem(at indexPath: IndexPath) -> UICollectionViewLayoutAttributes? {
let collectionViewWidth = self.collectionView?.frame.width
let itemWidth :CGFloat = (collectionViewWidth! - self.sectionInsert.left - self.sectionInsert.right - CGFloat((self.columnCount - 1)) * self.columnMargin) / CGFloat(columnCount)
var minIndex = 0
for(key,value) in maxYDict{
if maxYDict[minIndex]! > value{
minIndex = key
}
}
let itemX = self.sectionInsert.left + (self.columnMargin + itemWidth) * CGFloat(minIndex)
let itemY = self.maxYDict[minIndex]! + self.rowMargin
let itemHeight = self.itemHeightBlock(itemWidth,indexPath)
//设置cell的frame
let frame = CGRect(x: itemX, y: itemY, width: itemWidth, height: itemHeight)
//更新最短这列的最大Y值
maxYDict[minIndex] = frame.maxY
let attributes = UICollectionViewLayoutAttributes(forCellWith: indexPath)
attributes.frame = frame
return attributes
}
override func layoutAttributesForElements(in rect: CGRect) -> [UICollectionViewLayoutAttributes]? {
return self.layoutAttributes
}
}
<file_sep>//
// SpecSizeModel.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/24.
// Copyright © 2016年 周. All rights reserved.
//
import Foundation
struct SpecSizeModel {
var sizeName:String
var isSelect:Bool
}
<file_sep>//
// PointBuyChildViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/26.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
import XLPagerTabStrip
class PointBuyChildViewController: UIViewController,IndicatorInfoProvider,UICollectionViewDelegate,UICollectionViewDataSource {
//整点秒杀单元的Identifier
let pointBuyChildCellIdentifier = "pointBuyChildCell"
//collectionHeader的Identifier
let collectionHeaderIdentifier = "collectionHeader"
var index:Int!
var itemInfo : IndicatorInfo!
var collectionView:UICollectionView!
init(itemInfo: IndicatorInfo,index:Int) {
self.itemInfo = itemInfo
self.index = index
super.init(nibName: nil, bundle: nil)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = HiTaoBackgroundColor
self.setCollectionView()
}
//设置collectionView
private func setCollectionView(){
//初始化layout
let layout = PublicCollectionViewLayout(sectionInset: UIEdgeInsets(top: 10, left: 10, bottom: 10, right: 10), minimumLineSpacing: 10, minimumInteritemSpacing: 10, itemSize:CGSize(width: (SCREEN_WIDTH-30)/2, height: (SCREEN_WIDTH-30)/2 + 80))
//头部尺寸
layout.headerReferenceSize = CGSize(width: SCREEN_WIDTH , height: 40)
//设置内部layout
self.collectionView = UICollectionView(frame: CGRect.zero, collectionViewLayout:layout)
//指定代理
self.collectionView.delegate = self
self.collectionView.dataSource = self
self.view.addSubview(collectionView)
//设置背景颜色
self.collectionView.backgroundColor = HiTaoBackgroundColor
//添加约束
collectionView.snp.makeConstraints { (make) in
make.left.right.bottom.equalToSuperview()
make.top.equalToSuperview()
}
//注册单元
self.collectionView.register(PointBuyChildCollectionViewCell.self, forCellWithReuseIdentifier: pointBuyChildCellIdentifier)
self.collectionView.register(CountDownCollectionHeaderView.self, forSupplementaryViewOfKind: UICollectionElementKindSectionHeader, withReuseIdentifier:collectionHeaderIdentifier )
}
//MARK: - UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return 30
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: pointBuyChildCellIdentifier, for: indexPath)
return cell
}
//MARK: - UICollectionViewDelegate
func collectionView(_ collectionView: UICollectionView, viewForSupplementaryElementOfKind kind: String, at indexPath: IndexPath) -> UICollectionReusableView {
let cell = collectionView.dequeueReusableSupplementaryView(ofKind: kind, withReuseIdentifier: collectionHeaderIdentifier, for: indexPath) as! CountDownCollectionHeaderView
return cell
}
// MARK: - IndicatorInfoProvider
func indicatorInfo(for pagerTabStripController: PagerTabStripViewController) -> IndicatorInfo {
return itemInfo
}
}
<file_sep>//
// GoodsSelectTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/24.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class GoodsSelectTableViewCell: UITableViewCell {
var selectLabel = UILabel()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
self.contentView.addSubview(selectLabel)
selectLabel.snp.makeConstraints { (make) in
make.centerY.equalToSuperview()
make.left.equalToSuperview().offset(20)
make.width.equalTo(150)
make.height.equalToSuperview()
}
selectLabel.font = MiddleFont
selectLabel.text = "请选择"
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// WeChatSearchController.swift
// SuperWeChat
//
// Created by 周鹏杰 on 16/9/19.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class WeChatSearchViewController: UISearchController{
override init(searchResultsController: UIViewController!) {
super.init(searchResultsController: searchResultsController)
self.setSearchBar()
}
override init(nibName nibNameOrNil: String?, bundle nibBundleOrNil: Bundle?) {
super.init(nibName: nibNameOrNil, bundle: nibBundleOrNil)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//设置textField
func setSearchBar(){
if let index = indexOfSearchFieldInSubviews() {
// 获取搜索输入框
let searchField: UITextField = (searchBar.subviews[0]).subviews[index] as! UITextField
searchField.placeholder = "请输入搜索内容"
}
//设置bookMarkButton图标
self.searchBar.showsBookmarkButton = true
self.searchBar.setImage(UIImage(named:"scanning_icon"), for: .bookmark, state: .normal)
self.searchBar.barTintColor = RGB(r: 201, g: 201, b: 206)
self.searchBar.searchBarStyle = .default
self.searchBar.tintColor = RGB(r: 38, g: 202, b: 114)
}
//获取textField索引
func indexOfSearchFieldInSubviews() -> Int! {
var index: Int!
let searchBarView = searchBar.subviews[0]
for i in 0 ..< searchBarView.subviews.count {
if searchBarView.subviews[i] is UITextField {
index = i
break
}
}
return index
}
override func viewDidLoad() {
super.viewDidLoad()
//设置控制器
self.setSearchController()
//设置Bar视图的颜色
self.setSearchBar()
//搜索结果更新者为当前控制器
self.setDelegate()
}
func setSearchController(){
//搜索时,背景变暗色
self.dimsBackgroundDuringPresentation = false
//搜索是背景变模糊
if #available(iOS 9.1, *) {
self.obscuresBackgroundDuringPresentation = false
} else {
// Fallback on earlier versions
}
//设置背景色
self.view.backgroundColor = UIColor.clear
}
func setDelegate(){
// self.searchResultsUpdater = VM
// self.delegate = VM
// self.searchBar.delegate = VM
// self.customSearchBar.delegate = VM
// VM.viewDelegate = self
// VM.customDelegate = self
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func becomeActive(){
self.isActive = true
}
//MARK: -- CustomSearchControllerDelegate
func didStartSearching(){
print("开始")
}
func didTapOnSearchButton(){
print("搜索")
}
func didTapOnCancelButton(){
print("取消")
}
func didChangeSearchText(searchText: String){
print("正在输入\(searchText)")
}
}
<file_sep>//
// BrandCollectionViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/22.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class BrandCollectionViewCell: UICollectionViewCell {
var brandImageView = UIImageView()
var brandLabel = UILabel()
override init(frame: CGRect) {
super.init(frame: frame)
self.contentView.addSubview(brandImageView)
self.contentView.addSubview(brandLabel)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setData()
self.setFrame()
}
func setLayout(){
brandImageView.snp.makeConstraints { (make) in
make.width.equalToSuperview()
make.height.equalTo(brandImageView.snp.width)
make.left.top.equalToSuperview()
}
brandLabel.snp.makeConstraints { (make) in
make.left.equalToSuperview()
make.top.equalTo(brandImageView.snp.bottom).offset(5)
}
}
func setData(){
brandImageView.image = UIImage(named:imageDefaultName)
self.brandLabel.text = "阿迪王"
}
func setFrame(){
self.brandLabel.font = BigFont
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>//
// MineHeaderView.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/19.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
protocol MineHeaderViewDelegate :NSObjectProtocol{
func pushToMineOrderViewController()
}
class MineHeaderView: UITableViewHeaderFooterView {
var headerTitleLabel = UILabel()
var moreButton = UIButton()
var delegate : MineHeaderViewDelegate!
override init(reuseIdentifier: String?) {
super.init(reuseIdentifier: reuseIdentifier)
self.contentView.backgroundColor = HiTaoBackgroundColor
self.contentView.addSubview(headerTitleLabel)
self.contentView.addSubview(moreButton)
self.setAllAttributes()
}
func setAllAttributes(){
self.setLayout()
self.setFormat()
}
func setLayout(){
headerTitleLabel.snp.makeConstraints { (make) in
make.left.equalToSuperview().offset(10)
make.centerY.equalToSuperview()
}
moreButton.snp.makeConstraints { (make) in
make.right.equalToSuperview().offset(-10)
make.centerY.equalToSuperview()
}
}
func setData(title:String){
self.headerTitleLabel.text = title
self.moreButton.setImage(UIImage(named:"icon_arrow_right_more"), for: .normal)
self.moreButton.addTarget(self, action: #selector(MineHeaderView.moreAction(_:)), for: .touchUpInside)
}
func moreAction(_ sender:UIButton){
self.delegate.pushToMineOrderViewController()
}
func setFormat(){
self.headerTitleLabel.font = BigFont
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>//
// InfoHeaderTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/30.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class InfoHeaderTableViewCell: UITableViewCell {
@IBOutlet weak var headerImageView: UIImageView!
override func awakeFromNib() {
super.awakeFromNib()
self.headerImageView.ViewToRoundView()
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// ClassCollectionViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/12.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class ClassCollectionViewCell: UICollectionViewCell {
@IBOutlet weak var imageView: UIImageView!
@IBOutlet weak var titleLabel: UILabel!
override func awakeFromNib() {
}
}
<file_sep>//
// NewTableViewCell.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/14.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class NewTableViewCell: UITableViewCell,UICollectionViewDataSource {
let productCellIdentifier = "productCell"
var businessHeadImageView = UIImageView()
var businessNameLabel = UILabel()
var beforeTimeLabel = UILabel()
var followButton = UIButton()
var collectionView : UICollectionView!
var bottomColorView = UIView()
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
let layout = PublicCollectionViewLayout(sectionInset: UIEdgeInsets(top: 20, left: 10, bottom: 20, right: 10), minimumLineSpacing: 10, minimumInteritemSpacing: 10.5, itemSize: CGSize(width: ITEM_WIDTH, height: ITEM_WIDTH*(80/93)))
collectionView = UICollectionView(frame:CGRect.zero, collectionViewLayout: layout)
collectionView.collectionViewLayout = layout
self.contentView.addSubview(businessHeadImageView)
self.contentView.addSubview(businessNameLabel)
self.contentView.addSubview(beforeTimeLabel)
self.contentView.addSubview(collectionView)
self.contentView.addSubview(followButton)
self.contentView.addSubview(bottomColorView)
self.setContentInCell()
}
func setContentInCell(){
self.setLayout()
self.setData()
self.setFormat()
}
func setLayout(){
self.businessHeadImageView.snp.makeConstraints { (make) in
make.left.equalToSuperview().offset(20)
make.top.equalToSuperview().offset(20)
make.width.height.equalTo(30)
}
self.businessNameLabel.snp.makeConstraints { (make) in
make.left.equalTo(businessHeadImageView.snp.right).offset(5)
make.top.equalToSuperview().offset(20)
}
self.beforeTimeLabel.snp.makeConstraints { (make) in
make.left.equalTo(businessNameLabel)
make.top.equalTo(businessNameLabel.snp.bottom).offset(5)
}
self.collectionView.snp.makeConstraints { (make) in
make.top.equalTo(businessHeadImageView.snp.bottom)
make.left.right.equalToSuperview()
make.bottom.equalTo(bottomColorView.snp.top)
}
self.followButton.snp.makeConstraints { (make) in
make.centerY.equalTo(businessHeadImageView).offset(4)
make.right.equalToSuperview().offset(-10)
make.width.equalTo(60)
make.height.equalTo(35)
}
self.bottomColorView.snp.makeConstraints { (make) in
make.left.right.bottom.equalToSuperview()
make.height.equalTo(10)
}
}
func setData(){
self.businessHeadImageView.image = UIImage(named:"bourkestreetbakery")
self.businessNameLabel.text = "他叫向日葵"
self.beforeTimeLabel.text = "3分钟前"
self.collectionView.dataSource = self
self.collectionView.register(UICollectionViewCell.self, forCellWithReuseIdentifier: productCellIdentifier)
self.followButton.setTitle("+关注", for: .normal)
}
func setFormat(){
self.businessNameLabel.font = MiddleFont
self.beforeTimeLabel.font = SmallFont
self.beforeTimeLabel.textColor = UIColor.hexStringToColor("94a3b5")
self.collectionView.isScrollEnabled = false
self.collectionView.backgroundColor = .white
self.bottomColorView.backgroundColor = HiTaoBackgroundColor
self.followButton.setBackgroundImage(UIImage(named:"btn_sel_red_120x70"), for: .normal)
self.followButton.titleLabel?.font = MiddleFont
self.followButton.setTitleColor(.white, for: .normal)
self.followButton.titleEdgeInsets.top = -5
}
//layoutIfNeeded:告知页面布局立刻更新。所以一般都会和setNeedsLayout一起使用。如果希望立刻生成新的frame需要调用此方法,利用这点一般布局动画可以在更新布局后直接使用这个方法让动画生效。
override func layoutIfNeeded() {
super.layoutIfNeeded()
self.businessHeadImageView.ViewToRoundViewBycornerRadius(cornerRadius: 15)
}
public func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int{
return 6
}
public func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell{
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: productCellIdentifier, for: indexPath)
cell.backgroundColor = UIColor.randomColor()
return cell
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// Range+Extension.swift
// SuperWeChat
//
// Created by 周鹏杰 on 16/9/20.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
extension NSRange{
/// NSRange转换成Range<String.Index>
///
/// - Parameter content: 内容字符串
/// - Returns: 返回Range<String.Index>
func toRangeByString(content:String) -> Range<String.Index> {
let startIndex = content.characters.index(content.startIndex, offsetBy: self.location)
let endIndex = content.characters.index(content.startIndex, offsetBy: self.location + self.length)
let newRange :Range<String.Index> = Range(uncheckedBounds: (startIndex,endIndex))
return newRange
}
}
<file_sep>//
// SearchTopView.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/25.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class SearchTopView: UIView {
}
<file_sep>//
// PointBuyViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/26.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
import Popover
import XLPagerTabStrip
class PointBuyViewController: ButtonBarPagerTabStripViewController {
//第一次加载
var isFirst = true
override func viewDidLoad(){
self.view.backgroundColor = .white
settings.style.buttonBarHeight = 50
settings.style.buttonBarBackgroundColor = .white
settings.style.buttonBarItemBackgroundColor = .white
settings.style.selectedBarBackgroundColor = .black
settings.style.buttonBarItemFont = .systemFont(ofSize: 11)
settings.style.selectedBarHeight = 0
settings.style.buttonBarMinimumLineSpacing = 0
settings.style.buttonBarItemTitleColor = customGray
settings.style.buttonBarItemsShouldFillAvailiableWidth = true
settings.style.buttonBarLeftContentInset = 0
settings.style.buttonBarRightContentInset = 0
changeCurrentIndexProgressive = { (oldCell: ButtonBarViewCell?, newCell: ButtonBarViewCell?, progressPercentage: CGFloat, changeCurrentIndex: Bool, animated: Bool) -> Void in
guard changeCurrentIndex == true else { return }
//保证把所有颜色先变成未选中状态
if !self.isFirst{
for subview in self.buttonBarView.subviews{
if let cell = subview as? ButtonBarViewCell{
cell.label.textColor = .black
}
}
}
oldCell?.label.textColor = customGray
newCell?.label.textColor = .black
}
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
self.isFirst = false
}
override func viewWillAppear(_ animated: Bool) {
}
@IBAction func showMenu(_ sender: UIBarButtonItem) {
self.setRightPopoverView()
}
/// 设置右侧弹出框
private func setRightPopoverView(){
let startPoint = CGPoint(x: SCREEN_WIDTH - 80, y: 64)
let options = [
.type(.down),
.cornerRadius(10),
.animationIn(0.3),
.blackOverlayColor(RGBAndAlpha(r: 222, g: 222, b: 222, a: 0.8)),
.arrowSize(CGSize.zero)
] as [PopoverOption]
let popover = Popover(options: options, showHandler: nil, dismissHandler: nil)
let view = MenuContentView(frame:CGRect(x: 0, y: 0, width: SCREEN_WIDTH, height: 0))
//设置闭包可以回传选择值 暂时只做了页面消失
view.completion = {
popover.dismiss()
}
popover.show(view, point: startPoint)
}
// //设置导航栏
// private func setNavigationItem(){
// self.navigationController?.navigationBar.isTranslucent = false
// }
//头部移动
private func moveToIndex(index:Int){
if self.canMoveTo(index: index){
self.moveToViewController(at: index, animated: true)
}
}
//设置子viewController
override func viewControllers(for pagerTabStripController: PagerTabStripViewController) -> [UIViewController] {
let child_1 = PointBuyChildViewController( itemInfo: IndicatorInfo(title: "10:00\n已经秒杀"),index:0)
let child_2 = PointBuyChildViewController( itemInfo: IndicatorInfo(title: "11:00\n已经秒杀"),index:1)
let child_3 = PointBuyChildViewController( itemInfo: IndicatorInfo(title: "13:00\n正在秒杀"),index:2)
let child_4 = PointBuyChildViewController( itemInfo: IndicatorInfo(title: "15:00\n将要秒杀"),index:2)
let child_5 = PointBuyChildViewController( itemInfo: IndicatorInfo(title: "17:00\n将要秒杀"),index:2)
return [child_1, child_2,child_3,child_4,child_5]
}
}
<file_sep>//
// MineViewController.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/10/18.
// Copyright © 2016年 周. All rights reserved.
//
import UIKit
class MineViewController: UIViewController,UITableViewDelegate,UITableViewDataSource ,MineHeaderViewDelegate{
let mineHeaderCellIdentifier = "mineHeaderCell"
let mineSecondeCellIdentifier = "mineSecondCell"
let mineThirdeCellIdentifier = "mineThirdCell"
let mineHeaderViewIdenetifier = "mineHeaderView"
let sectionTitleArray = ["我的订单","必备工具","生活·健康"]
@IBOutlet weak var tableView: UITableView!
@IBAction func showMineCoupon(_ sender: UIButton) {
let vc = CouponViewController()
vc.hidesBottomBarWhenPushed = true
self.navigationController?.pushViewController(vc, animated: true)
}
@IBAction func showMineCollection(_ sender: UIButton) {
let vc = MineCollectionViewController()
vc.hidesBottomBarWhenPushed = true
self.navigationController?.pushViewController(vc, animated: true)
}
override func viewDidLoad() {
super.viewDidLoad()
self.addBackItem()
//注册头部单元
self.tableView.register(MineHeaderView.self, forHeaderFooterViewReuseIdentifier: mineHeaderViewIdenetifier)
// 设置tableview分割线样式
self.tableView.separatorStyle = .none
}
override func viewWillAppear(_ animated: Bool) {
//设置导航栏背景图片
self.navigationController?.navigationBar.setBackgroundImage(HiTaoBackgroundColor.imageFromColor().reSizeImage(reSize: CGSize(width: SCREEN_WIDTH, height: 64)), for: .default)
}
override func viewWillDisappear(_ animated: Bool) {
self.navigationController?.navigationBar.setBackgroundImage(UIImage(), for: .default)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: --UITableViewDataSource
func numberOfSections(in tableView: UITableView) -> Int {
return 4
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = UITableViewCell()
switch indexPath.section {
case 0:
let cell = tableView.dequeueReusableCell(withIdentifier: mineHeaderCellIdentifier, for: indexPath) as! MineHeaderTableViewCell
return cell
case 1:
let cell = tableView.dequeueReusableCell(withIdentifier: mineSecondeCellIdentifier, for: indexPath) as! MineSecondTableViewCell
return cell
case 2:
let cell = tableView.dequeueReusableCell(withIdentifier: mineThirdeCellIdentifier, for: indexPath) as! MineThirdTableViewCell
cell.itemsCount = 8
return cell
case 3:
let cell = tableView.dequeueReusableCell(withIdentifier: mineThirdeCellIdentifier, for: indexPath) as! MineThirdTableViewCell
cell.itemsCount = 4
return cell
default:
return cell
}
}
//MARK:- UITableViewDelegate
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
guard section != 0 else {
return nil
}
let view = tableView.dequeueReusableHeaderFooterView(withIdentifier: mineHeaderViewIdenetifier) as! MineHeaderView
if [2,3].contains(section) {
view.moreButton.isHidden = true
}
view.delegate = self
view.setData(title:sectionTitleArray[section - 1])
let tap = UITapGestureRecognizer(target: self, action: #selector(MineViewController.pushToMineOrderViewController))
view.addGestureRecognizer(tap)
return view
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
switch indexPath.section {
case 0:
return 157
case 1:
return 80
case 2:
return 80 * 2 + 1
case 3:
return 80
default:
return 100
}
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
switch section {
case 0:
return 0.0000000001
default:
return 25
}
}
func tableView(_ tableView: UITableView, heightForFooterInSection section: Int) -> CGFloat {
switch section {
case 0:
return 0.0000000001
default:
return 0.0000000001
}
}
//MARK: -- MineHeaderViewDelegate
func pushToMineOrderViewController() {
let vc = MineOrderViewController()
vc.hidesBottomBarWhenPushed = true
self.navigationController?.pushViewController(vc, animated: true)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
segue.destination.hidesBottomBarWhenPushed = true
}
}
<file_sep>//
// MWPhotoBrowser+Extension.swift
// HiShopping
//
// Created by 周鹏杰 on 2016/11/10.
// Copyright © 2016年 周. All rights reserved.
//
import Foundation
import MWPhotoBrowser
extension MWPhotoBrowser{
//添加清除消失和隐藏导航栏
func addTouchTap(){
self.navigationController?.navigationBar.isHidden = true
let tap = UITapGestureRecognizer(target: self, action: #selector(MWPhotoBrowser.disMissViewController(_:)))
self.view.addGestureRecognizer(tap)
}
/// 消失方法
///
/// - Parameter sender: 手势
func disMissViewController(_ sender:UITapGestureRecognizer){
if self.enableGrid{
if self.startOnGrid {
self.showGrid(true)
return
}else if !self.startOnGrid{
self.hideGrid()
return
}
}
if self.delegate.responds(to: #selector(MWPhotoBrowserDelegate.photoBrowserDidFinishModalPresentation(_:))){
self.delegate.photoBrowserDidFinishModalPresentation!(self)
}else{
self.dismiss(animated: true, completion: nil)
}
}
}
| ce40de4abff775da055ae2cab017faabfd9ceafe | [
"Swift",
"Ruby"
] | 58 | Swift | ZhouPeter/HiTao | 3dd7aff11ec6a2788c20d10f4817d555f82af9e7 | 18cd31af1681958de30845d0c3d715e4f6d33599 | |
refs/heads/master | <file_sep># Une SPA questionnaire pour se tester sur les notions de programmation
localhost:8000<file_sep>const mainDiv = document.getElementById('main')
const render = html => {
mainDiv.innerHTML = html
}
const highlightTrue = () => {
const right = document.getElementsByClassName('right')
Array.from(right).forEach(item => {
item.style.color = "green"
item.style.background = "#ccffcc"
item.style.fontSize = "larger";
})
}
// const highlightFalse = () => {
// const right = document.getElementsByClassName('right')
// Array.from(right).forEach(item => {
// item.style.color = "green"
// item.style.background = "#ccffcc"
// item.style.fontSize = "larger";
// })
// }
const makeQuestion = item => `
<h2>${item.question}</h2>
<ul class="list-group">
<li class="list-group-item ${item.propositions[0][1]}">${item.propositions[0][0]}</li>
<li class="list-group-item ${item.propositions[1][1]}">${item.propositions[1][0]}</li>
<li class="list-group-item ${item.propositions[2][1]}">${item.propositions[2][0]}</li>
</ul>
`
const controllers = {
'/': () => {
console.log("coucou je suis le console log du controller pour le path /")
fetch('/tests')
.then(res => {
console.log("dans le fetch, on s'occupe de la res")
return res.json()
})
.then(tests => tests.reduce((carry, test) => carry + makeQuestion(test), ''))
.then(question => {render(
`
<div>
${question}
</div>
`
)
const rightAnswers = document.getElementsByClassName("right")
Array.from(rightAnswers).forEach(answer => answer.addEventListener('click', highlightTrue))
})
}
}
const route = pathname => {
}
(() => {
['/'].forEach(
path => page(path, controllers[path])
)
page()
// route()
})() | 52398cb75cf9cc6065ceb8428d7e82c164529aa3 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | AurelieBayre/questionnaire-code | a7abff22e7fc3262087c74533b835b905cbedc8b | 69ced31a8d3ff8e3247e8ad6f69e8a39fa308e4a | |
refs/heads/main | <repo_name>raymondberg/math-sheets<file_sep>/math-sheets/math_generator.js
/*
*
*/
var PUZZLE_PACK = [
"A man a plan a canal: panama","For score and seven years ago","Who's on first?",
"Wallace and Gromit","Math is fun","Mount Olympus","The Cubs","The Bears",
"<NAME>","<NAME>", "<NAME>", "<NAME>", "<NAME>",
"Fifty States", "Thirteen colonies", "Chicago fire", "Three-hundred and sixty five days",
"<NAME>","<NAME>","Rick and Morty"
];
var LETTER_OPTIONS = {
"A": {
"numeric_value": 1,
"options": [
{"difficulty" : 1,"prompt": "It's the square root of 1"},
{"difficulty" : 1,"prompt": "It's the difference of six and five"},
{"difficulty" : 1,"prompt": "It's thirty-seven divided by thirty-seven"},
{"difficulty" : 1,"prompt": "It's negative four plus five"},
{"difficulty" : 1,"prompt": "It's fifty minus forty-nine"}
]
},
"B": {
"numeric_value": 2,
"options": [
{"difficulty" : 1,"prompt": "It's the square root of four"},
{"difficulty" : 1,"prompt": "It's six plus negative four"},
{"difficulty" : 1,"prompt": "It's twelve divided by six"},
{"difficulty" : 1,"prompt": "It's four divided by two"},
{"difficulty" : 1,"prompt": "It's twenty-four divided by twelve"},
{"difficulty" : 1,"prompt": "It's thirty-six divided by eighteen"}
]
},
"C": {
"numeric_value": 3,
"options": [
{"difficulty" : 1,"prompt": "It's the square root of nine"},
{"difficulty" : 1,"prompt": "It's eight minus five"},
{"difficulty" : 1,"prompt": "It's twelve minus nine"},
{"difficulty" : 1,"prompt": "It's sixty divided by twenty"},
{"difficulty" : 1,"prompt": "It's fifteen divided by five"}
]
},
"D": {
"numeric_value": 4,
"options": [
{"difficulty" : 1,"prompt": "It's two squared"},
{"difficulty" : 1,"prompt": "It's the square root of sixteen"},
{"difficulty" : 1,"prompt": "It's eighty divided by twenty"},
{"difficulty" : 1,"prompt": "It's sixty divided by fifteen"}
]
},
"E": {
"numeric_value": 5,
"options": [
{"difficulty" : 1,"prompt": "It's thirty divided by six"},
{"difficulty" : 1,"prompt": "It's the square root of twenty-five"},
{"difficulty" : 1,"prompt": "It's ten plus negative five"},
{"difficulty" : 1,"prompt": "It's twenty-five divided by five"},
{"difficulty" : 1,"prompt": "It's forty-five divided by nine"},
]
},
"F": {
"numeric_value": 6,
"options": [
{"difficulty" : 1,"prompt": "It's thirty divided by five"},
{"difficulty" : 1,"prompt": "It's twelve divided by two"},
{"difficulty" : 1,"prompt": "It's the square root of thirty-six"},
{"difficulty" : 1,"prompt": "It's sixty divided by ten"},
{"difficulty" : 1,"prompt": "It's three times two"},
]
},
"G": {
"numeric_value": 7,
"options": [
{"difficulty" : 1,"prompt": "It's forty-nine divided by seven"},
{"difficulty" : 1,"prompt": "It's the square-root of forty-nine"},
{"difficulty" : 1,"prompt": "It's fourteen divided by two"},
{"difficulty" : 1,"prompt": "It's thirty-five divided by five"}
]
},
"H": {
"numeric_value": 8,
"options": [
{"difficulty" : 1,"prompt": "It's two cubed"},
{"difficulty" : 1,"prompt": "It's four times two"},
{"difficulty" : 1,"prompt": "It's the square root of sixty four"},
{"difficulty" : 1,"prompt": "It's sixty-four divided by eight"},
{"difficulty" : 1,"prompt": "It's twelve minus four"}
]
},
"I": {
"numeric_value": 9,
"options": [
{"difficulty" : 1,"prompt": "It's three squared"},
{"difficulty" : 1,"prompt": "It's three times three"},
{"difficulty" : 1,"prompt": "It's twenty seven divided by three"},
{"difficulty" : 1,"prompt": "It's ninety divided by ten"},
{"difficulty" : 1,"prompt": "It's the square root of eighty-one"},
{"difficulty" : 1,"prompt": "It's fourty-five divided by five"}
]
},
"J": {
"numeric_value": 10,
"options": [
{"difficulty" : 1,"prompt": "It's two times five"},
{"difficulty" : 1,"prompt": "It's one-hundred divided by ten"},
{"difficulty" : 1,"prompt": "It's fifty divided by five"},
{"difficulty" : 1,"prompt": "It's the square root of one-hundred"},
{"difficulty" : 1,"prompt": "It's six hundred divided by sixty"},
{"difficulty" : 1,"prompt": "It's two and a half times four"}
]
},
"K": {
"numeric_value": 11,
"options": [
{"difficulty" : 1,"prompt": "It's sixty-six divided by six"},
{"difficulty" : 1,"prompt": "It's fifteen minus four"},
{"difficulty" : 1,"prompt": "It's five plus six"},
{"difficulty" : 1,"prompt": "It's one dozen minus one"}
]
},
"L": {
"numeric_value": 12,
"options": [
{"difficulty" : 1,"prompt": "It's three times four"},
{"difficulty" : 1,"prompt": "It's twenty-four divided by two"},
{"difficulty" : 1,"prompt": "It's sixteen minus four"},
{"difficulty" : 1,"prompt": "It's six times two"}
]
},
"M": {
"numeric_value": 13,
"options": [
{"difficulty" : 1,"prompt": "It's a bakers dozen"},
{"difficulty" : 1,"prompt": "It's nine plus four"},
{"difficulty" : 1,"prompt": "It's twenty-six divided by two"},
{"difficulty" : 1,"prompt": "It's twenty-one minus eight"},
{"difficulty" : 1,"prompt": "It's seven plus six"}
]
},
"N": {
"numeric_value": 14,
"options": [
{"difficulty" : 1,"prompt": "It's seven times two"},
{"difficulty" : 1,"prompt": "It's twenty-eight divided by two"},
{"difficulty" : 1,"prompt": "It's fourty-two divided by three"},
{"difficulty" : 1,"prompt": "It's nine plus five"}
]
},
"O": {
"numeric_value": 15,
"options": [
{"difficulty" : 1,"prompt": "It's five times three"},
{"difficulty" : 1,"prompt": "It's forty-five divided by three"},
{"difficulty" : 1,"prompt": "It's one-hundred and fifty divided by fifteen"},
{"difficulty" : 1,"prompt": "It's thirty divided by two"},
{"difficulty" : 1,"prompt": "It's eight plus seen"}
]
},
"P": {
"numeric_value": 16,
"options": [
{"difficulty" : 1,"prompt": "It's eight times two"},
{"difficulty" : 1,"prompt": "It's thirty-two divided by two"},
{"difficulty" : 1,"prompt": "It's sixty-four divided by four"},
{"difficulty" : 1,"prompt": "It's four squared"}
]
},
"Q": {
"numeric_value": 17,
"options": [
{"difficulty" : 1,"prompt": "It's twelve plus five"},
{"difficulty" : 1,"prompt": "It's thirty-four divided by two"},
{"difficulty" : 1,"prompt": "It's eight plus nine"}
]
},
"R": {
"numeric_value": 18,
"options": [
{"difficulty" : 1, "prompt": "It's three times six"},
{"difficulty" : 1, "prompt": "It's half of thirty-six"},
{"difficulty" : 1, "prompt": "It's nine times two"},
{"difficulty" : 1, "prompt": "It's seventy two divided by four"}
]
},
"S": {
"numeric_value": 19,
"options": [
{"difficulty" : 1, "prompt": "It's twenty-five minus six"},
{"difficulty" : 1, "prompt": "It's twelve plus seven"},
{"difficulty" : 1, "prompt": "It's twenty-nine minus ten"},
{"difficulty" : 1, "prompt": "It's four plus fifteen"},
{"difficulty" : 1, "prompt": "It's twenty plus negative one"}
]
},
"T": {
"numeric_value": 20,
"options": [
{"difficulty" : 1, "prompt": "It's fifty minus thirty"},
{"difficulty" : 1, "prompt": "It's fifteen plus five"},
{"difficulty" : 1, "prompt": "It's the number of toes"},
{"difficulty" : 1, "prompt": "It's forty divided by two"},
{"difficulty" : 1, "prompt": "It's ten times two"},
{"difficulty" : 1, "prompt": "It's four times five"},
{"difficulty" : 1, "prompt": "It's one-hundred divided by five"}
]
},
"U": {
"numeric_value": 21,
"options": [
{"difficulty" : 1, "prompt": "It's seven times three"},
{"difficulty" : 1, "prompt": "It's forty-two divided by two"},
{"difficulty" : 1, "prompt": "It's thirty minus nine"},
{"difficulty" : 1, "prompt": "It's sixty-three divided by three"},
{"difficulty" : 1, "prompt": "It's nine plus twelve"}
]
},
"V": {
"numeric_value": 22,
"options": [
{"difficulty" : 1, "prompt": "It's eleven times two"},
{"difficulty" : 1, "prompt": "It's fourteen plus eight"},
{"difficulty" : 1, "prompt": "It's ten plus twelve"},
{"difficulty" : 1, "prompt": "It's forty-four divided by two"},
{"difficulty" : 1, "prompt": "It's thirty-six minus fourteen"},
{"difficulty" : 1, "prompt": "It's sixteen plus six"}
]
},
"W": {
"numeric_value": 23,
"options": [
{"difficulty" : 1, "prompt": "It's forty-six divided by two"},
{"difficulty" : 1, "prompt": "It's thirty minus seven"},
{"difficulty" : 1, "prompt": "It's one less than two dozen"},
{"difficulty" : 1, "prompt": "It's sixteen plus seven"}
]
},
"X": {
"numeric_value": 24,
"options": [
{"difficulty" : 1, "prompt": "It's four times six"},
{"difficulty" : 1, "prompt": "It's double twelve"},
{"difficulty" : 1, "prompt": "It's six times four"},
{"difficulty" : 1, "prompt": "It's half of forty-eight"},
{"difficulty" : 1, "prompt": "It's seventy-two divided by three"},
{"difficulty" : 1, "prompt": "It's thirty-six minus twelve"},
{"difficulty" : 1, "prompt": "It's two dozen"}
]
},
"Y": {
"numeric_value": 25,
"options": [
{"difficulty" : 1, "prompt": "It's five times five"},
{"difficulty" : 1, "prompt": "It's five squared"},
{"difficulty" : 1, "prompt": "It's two and a half times ten"},
{"difficulty" : 1, "prompt": "It's seventy-five minus fifty"},
{"difficulty" : 1, "prompt": "It's thirty minus five"},
{"difficulty" : 1, "prompt": "It's one-hundred divided by four"}
]
},
"Z": {
"numeric_value": 26,
"options": [
{"difficulty" : 1, "prompt": "It's thirteen times two"},
{"difficulty" : 1, "prompt": "It's two dozen plus two"},
{"difficulty" : 1, "prompt": "It's half of fourty-six"},
{"difficulty" : 1, "prompt": "It's four squared and ten"},
{"difficulty" : 1, "prompt": "It's 2.25 years in months"},
{"difficulty" : 1, "prompt": "It's thirty minus four"}
]
}
}
var PUZZLE_OFFSET_AMOUNT = 14;
var PUZZLE_SOLUTION="ABOMINABLE SNOWMAN";
if (parse_get_parameter("puzzle") != "Not found") {
PUZZLE_SOLUTION = caesar_cipher(parse_get_parameter("puzzle",true), -1 * PUZZLE_OFFSET_AMOUNT);
}
function put_table() {
var table_columns = 6;
document.write("<table style='width:50%'><tr>");
var column_index = 0;
for (option in LETTER_OPTIONS){
document.write(
"<td style='border:1px solid black;text-align:center'>" + option + " - "
+ LETTER_OPTIONS[option]["numeric_value"] + "</td>");
column_index = (column_index + 1) % table_columns;
if(column_index == 0){
document.write("</tr><tr>");
}
}
document.write("</tr></table>")
}
function generate_letter_dropdown(id) {
var identifier = "letter" + id;
document.write("<select id='"+identifier+"' name='" + identifier + "' onchange='populate_solution()'>");
document.write("<option></option>");
for(letter in LETTER_OPTIONS){
document.write("<option value='" + letter + "'>"
+ LETTER_OPTIONS[letter]["numeric_value"] + " - "
+ letter + "</option>");
}
document.write("</select>");
}
function put_expressions() {
document.write("<table style='width:50%'>");
var expression_count = 0;
for (letter_index in PUZZLE_SOLUTION){
letter = PUZZLE_SOLUTION[letter_index];
if(LETTER_OPTIONS[letter] != undefined) {
document.write("<tr><td>");
document.write(random_choice(LETTER_OPTIONS[letter]["options"])["prompt"]);
document.write("</td><td>");
generate_letter_dropdown(expression_count);
document.write("</td></tr>");
expression_count += 1;
}
}
document.write("</table>");
}
function populate_solution() {
var revealed_solution = "";
for(var i=0; i < PUZZLE_SOLUTION.length; i++) {
var item = document.getElementById("letter" + i);
if(item != undefined && item.selectedIndex != 0) {
revealed_solution += " " + item[item.selectedIndex].value;
} else {
revealed_solution += " _";
}
}
document.getElementById("revealed_solution").innerHTML = revealed_solution;
}
function redirect_to_new_puzzle(puzzle) {
var puzzle_proposal = puzzle.replace(/[^a-z]/gi ,"");
window.location = "./?puzzle=" + caesar_cipher(puzzle_proposal, PUZZLE_OFFSET_AMOUNT);
}
function submit_form() {
var puzzle_proposal = document.getElementById("puzzle_field").value;
redirect_to_new_puzzle(puzzle_proposal);
}
function goto_random_puzzle() {
redirect_to_new_puzzle(random_choice(PUZZLE_PACK));
}
setInterval(populate_solution, 1000);
| 4d04d5813d767da9425d9c2e1ce62c00ca80aa80 | [
"JavaScript"
] | 1 | JavaScript | raymondberg/math-sheets | 7127f43724fcb0bddd5facfe8d748b0b3c297dfe | 540e08a643a1e9b79d24e2cdb1dd55c771b0fa05 | |
refs/heads/master | <file_sep>import React, {Component} from 'react';
import {AsyncStorage, View} from 'react-native';
import {Button} from 'react-native-elements';
import styles from './styles'
export default class Logout extends Component {
constructor(props) {
super(props);
this.logoutHandler = this.logoutHandler.bind(this);
}
renderButton(){
if (this.props.needLogout) {
return (<Button
onPress={this.logoutHandler}
title="Log Out"
color="#841584"
accessibilityLabel="Log out"
/>)
}
}
logoutHandler() {
if (this.props.needLogout) {
AsyncStorage.removeItem('qrid', (err, res) => {
if (!err) {
console.log('no more id');
this.props.navigation.navigate('Home')
}
})
}
}
render() {
return (
<View style={styles.buttonContainer}>
{this.renderButton()}
</View>
)
}
}
<file_sep>import React, {Component} from 'react';
import {View, AsyncStorage, Image} from 'react-native';
import {Text, Card, Input, Button} from 'react-native-elements';
import NfcManager, {Ndef, NfcTech, ByteParser} from 'react-native-nfc-manager';
import Icon from 'react-native-vector-icons/FontAwesome';
import styles from './styles';
export default class Registration extends Component {
constructor(props) {
super(props);
this.clickHandler = this.clickHandler.bind(this)
this.renderConfirmation = this.renderConfirmation.bind(this)
this.fillUsername = this.fillUsername.bind(this)
this.state = {
fillName: true,
scannedID: '',
name: ''
}
}
clickHandler() {
this.setState({fillName: false})
}
componentDidMount(){
NfcManager.isSupported()
.then(supported => {
console.log('test' + supported)
if (supported) {
NfcManager.start()
.then(result => {
console.log('start OK', result);
})
.catch(error => {
console.warn('device does not support nfc!');
})
}
});
NfcManager.registerTagEvent(tag => {
console.log('Tag Discovered');
//console.log(tag.id)
if (tag.id) {
this.setState({scannedID: tag.id})
let user = { name: this.state.name, nfcid: this.state.scannedID}
user = JSON.stringify(user)
AsyncStorage.setItem('user', user, (err, res) => {
if (err) {
console.log(err)
} else {
console.log(user)
AsyncStorage.removeItem('qrid', (err, res) => {
if (err) {
console.log(err)
} else {
setTimeout(() => this.props.navigation.navigate('Home'), 1000)
}
})
}
})
} else {
this.setState({scannedID: 'No ID Detected'})
}
});
}
componentWillUnmount() {
NfcManager.unregisterTagEvent();
NfcManager.stop();
}
renderConfirmation() {
if (this.state.scannedID.length > 0) {
return (
<Card
title='Successfully Registered Card'
image={{uri: 'https://thumbs.gfycat.com/ShyCautiousAfricanpiedkingfisher-max-1mb.gif'}}
imageProps={{resizeMode: 'contain'}}>
</Card>
)
} else {
return <View />
}
}
fillUsername() {
if (this.state.fillName) {
return (
<View style={styles.container}>
<Input
placeholder='Username'
leftIcon={{ type: 'font-awesome', name: 'user-circle' }}
style={styles.input}
onChangeText={(name) => this.setState({name})}
value={this.state.name}
/>
<Button
color="#ff300"
accessibilityLabel="Submit"
icon={
<Icon
name='arrow-right'
size={15}
color='white'
/>
}
title='Submit'
onPress={this.clickHandler}
buttonStyle = {styles.button}
containerStyle = {styles.buttonContainer}
/>
</View>
)
} else {
return (
<View>
<Card>
<Text h4 style={styles.text}>Place your bank card on your phone</Text>
{this.renderConfirmation()}
</Card>
</View>
)
}
}
render(){
return (
<View>
{this.fillUsername()}
</View>
)
}
}
<file_sep>import React, {Component} from 'react';
import {View, AsyncStorage} from 'react-native';
import {Card, Button, Header, Icon, Text, ListItem} from 'react-native-elements';
import NFCHandler from '../components/NFCHandler';
import Logout from '../components/Logout';
import Modal from "react-native-modal";
import styles from './styles';
export default class NFCScreen extends Component {
constructor(props) {
super(props);
this.hideModal = this.hideModal.bind(this);
this.state = {hasId: false, id: '', user: '', transaction: '', modal: false}
}
componentDidMount() {
AsyncStorage.multiGet(['user','qrid'], (err, res) => {
if (err) {
console.log('error'+ err)
};
if (res) {
console.log(res)
let user = res[0]
user = user[1]
user = JSON.parse(user)
let qrid = res[1];
qrid = qrid[1]
console.log('User: ' + user.name + 'Qrid: ' + qrid)
if (qrid){
this.setState({hasId: true, id: qrid})
}
if (user){
this.setState({user: user.name})
}
}
})
this.timer = setInterval(()=> this.poll(), 2000);
}
poll() {
if (this.state.user.length > 0) {
const url = 'https://finul-api.herokuapp.com/auth_poll/' + this.state.user;
fetch(url)
.then((response) => response.json())
.then((res) => {
if (res) {
console.log(res.transaction)
if (res.transaction !== null) {
console.log('Result: ' + res.transaction)
clearInterval(this.timer)
this.setState({transaction: res.transaction})
if (typeof this.state.transaction === 'object') {
this.setState({modal: true})
}
}
}
}).catch(err => console.log(err))
}
}
componentWillUnmount() {
clearInterval(this.timer)
this.timer = null; // here...
}
hideModal(){
this.setState({modal: false})
this.timer = setInterval(()=> this.poll(), 2000);
const url = `https://finul-api.herokuapp.com/reject_transaction/${this.state.user}`
fetch(url).then((response) => response.json())
.then((res) => {
console.log("Forget everything")
})
}
render() {
const {navigation} = this.props
console.log('trans at render' + this.state.transaction)
const {amount, date, receiver_id, time, description} = this.state.transaction;
const list = [
{name: 'Amount', val:amount},
{name: 'Received On', val:`${date}, on ${time}`},
{name: 'Given To', val: receiver_id},
{name: 'Notes', val: description}
]
return (
<View>
<Header containerStyle={styles.headerContainer} centerComponent={{ text: 'Verification', style: styles.header }} />
<NFCHandler navigation={navigation}/>
<Modal isVisible={this.state.modal}>
<View>
<Card
title='Incoming Transaction'
>
{
list.map((l, i) => (
<ListItem
key={i}
title={l.name}
subtitle={l.val}
titleStyle={{ fontWeight: 'bold' }}
/>
))
}
<Text >You can tap your bank card on this phone to continue, or reject the transaction by pressing the button below</Text>
<Button
icon={<Icon name='code' color='#fff' />}
backgroundColor='#5c1010'
buttonStyle={{borderRadius: 0, marginLeft: 0, marginRight: 0, marginBottom: 0}}
onPress={this.hideModal}
title='REJECT THE TRANSACTION' />
</Card>
</View>
</Modal>
<Logout needLogout={this.state.hasId} navigation={navigation}/>
</View>
)
}
}
<file_sep>import React, {Component} from 'react';
import {View, AsyncStorage, Button} from 'react-native';
import { Text, Card } from 'react-native-elements';
import NfcManager, {Ndef, NfcTech, ByteParser} from 'react-native-nfc-manager';
import styles from './styles';
export default class NFCHandler extends Component {
constructor(props) {
super(props);
this.state = {
NFCSupported: false,
scannedNFC: false,
scannedID: '',
name: '',
nfcid: '',
msg: ''
}
}
renderConfirmation() {
if (this.state.scannedNFC) {
if (this.state.msg === 'NFC Detected, redirecting...') {
return (
<View>
<Card
title='Scan Successful!'
image={{uri: 'https://thumbs.gfycat.com/ShyCautiousAfricanpiedkingfisher-max-1mb.gif'}}
imageProps={{resizeMode: 'contain'}}>
</Card>
</View>
)
} else {
return (
<View>
<Card
title='Try Again!'
image={{uri: 'https://i.4pcdn.org/pol/1495138675713.gif'}}
imageProps={{resizeMode: 'contain'}}>
</Card>
</View>
)
}
}
if (this.state.NFCSupported) {
return (
<Text style={styles.text}> NFC is supported </Text>
)
}
}
componentDidMount(){
AsyncStorage.getItem('user', (err, res) => {
if (err) {
console.log(err)
}
if (res) {
let info = JSON.parse(res);
this.setState({name: info.name, nfcid: info.nfcid})
console.log(this.state.nfcid)
}
})
NfcManager.isSupported()
.then(supported => {
console.log('test' + supported)
if (supported) {
this.setState({NFCSupported: true});
NfcManager.start()
.then(result => {
console.log('start OK', result);
})
.catch(error => {
console.warn('device does not support nfc!');
this.setState({supported: false});
})
}
});
console.log('testing tag')
NfcManager.registerTagEvent(tag => {
console.log('Tag Discovered');
this.setState({scannedNFC: true})
//console.log(tag.id)
if (tag.id) {
console.log(tag.id)
if (this.state.nfcid !== tag.id) {
this.setState({msg: 'Your card did not match our records, refreshing...'});
setTimeout(() => {
this.props.navigation.navigate('Home')
}, 3000)
} else {
this.setState({scannedID: tag.id, msg: 'NFC Detected, redirecting...'})
setTimeout(() => {
this.props.navigation.navigate('Otp')
}, 2000)
}
} else {
this.setState({scannedID: 'No ID Detected'})
}
});
}
componentWillUnmount() {
NfcManager.unregisterTagEvent();
NfcManager.stop();
this.setState({scannedNFC: false})
}
render(){
return (
<View style={styles.container}>
<Card>
<Text h2 style={styles.text}>Place your bank card on your phone</Text>
{this.renderConfirmation()}
</Card>
</View>
)
}
}
<file_sep>import React, {Component} from 'react';
import {StyleSheet} from 'react-native';
const styles = StyleSheet.create({
container: {
marginTop: 40,
justifyContent: 'center',
alignItems: 'center',
},
text: {
textAlign: 'center',
margin:10,
},
input: {
height: 40,
borderColor: 'gray',
borderWidth: 1,
marginBottom: 30,
marginTop: 'auto'
},
button: {
backgroundColor: '#940000',
width: 250,
height: 40,
borderRadius: 2,
},
buttonContainer: {
height:100,
alignItems: 'center',
padding: 30,
marginTop: 90
}
})
export default styles;
<file_sep># inu_mobile
#### Mobile app for CIMB App challenge
<file_sep>import {StyleSheet} from 'react-native';
const styles = StyleSheet.create({
container: {
flex: 1,
margin: 10,
justifyContent: 'center',
alignItems: 'center',
},
header: {
marginBottom: 25,
fontSize: 30,
fontWeight: 'bold',
alignItems: 'stretch',
color: '#fff',
},
headerContainer: {
backgroundColor: '#c30101',
}
})
export default styles;
| 5f6b204c59732151eef61de4a3858f6c9104b706 | [
"JavaScript",
"Markdown"
] | 7 | JavaScript | dschuan/inu_mobile | ad085174cb08ff0271303ceb325720c4cb7e3906 | c34e9c9a0789f643c73d3299328454ae0961eb32 | |
refs/heads/master | <repo_name>HarlockOfficial/dva232_labs<file_sep>/lab1/app/src/main/java/se/mdh/student/dva232/lab1/ConversionRates.kt
package se.mdh.student.dva232.lab1
import android.app.Activity
import android.os.Bundle
import android.view.View
import android.widget.*
import kotlin.math.round
class ConversionRates: Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.rates_conversion)
val arr:Array<String>? = intent.getStringArrayExtra("se.mdh.student.dva232.lab1.Rates")
if(arr==null || arr.isEmpty()) { //should not happen, almost impossible, but IDK
Toast.makeText(this, getText(R.string.resource_error), Toast.LENGTH_SHORT).show()
this.onBackPressed()
return
}
val currencyTable: TableLayout = findViewById(R.id.currency_grid)
val spinner: Spinner = findViewById(R.id.currency_selector)
ArrayAdapter.createFromResource(this,
R.array.currencies,
android.R.layout.simple_spinner_item
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner.adapter = adapter
}
spinner.onItemSelectedListener = object: AdapterView.OnItemSelectedListener{
override fun onItemSelected(parent: AdapterView<*>, view: View?, pos: Int, id: Long) {
currencyTable.removeAllViewsInLayout()
val startCurrency: CurrencyType = CurrencyType.valueOf(parent.getItemAtPosition(pos).toString())
for(currency:String in arr){
if(currency == startCurrency.toString())
continue
val row:View = layoutInflater.inflate(R.layout.conversion_table_row, currencyTable,false)
(row.findViewById(R.id.currency_code) as TextView).text = currency
(row.findViewById(R.id.currency_change) as TextView).text = "%,.5f".format(
ChangeRate.getExchange(startCurrency, CurrencyType.valueOf(currency)))
currencyTable.addView(row)
}
}
override fun onNothingSelected(p0: AdapterView<*>?) {}
}
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/Geocoder.kt
package se.mdh.student.dva232.lab2
import android.location.Location
import androidx.lifecycle.ViewModel
import org.json.JSONException
import org.json.JSONObject
import java.net.URL
import java.util.*
//not all manufacturers implements Geocoder
//better to implement the needed feature
class Geocoder: ViewModel(){
companion object {
fun getLocale(location: Location): Locale {
return try {
val json = JSONObject(URL("https://nominatim.openstreetmap.org/reverse?format=json&lat=" +
location.latitude + "&lon=" +
location.longitude + "&zoom=18").readText())
Locale.getAvailableLocales().first {
it.country == json.getJSONObject("address").
getString("country_code").toUpperCase(Locale.ROOT)
}
} catch (unused: NoSuchElementException) {
Locale.GERMANY
} catch (unused: JSONException) {
Locale.GERMANY
}
}
}
}<file_sep>/lab1/app/src/main/java/se/mdh/student/dva232/lab1/CurrencyType.kt
package se.mdh.student.dva232.lab1
enum class CurrencyType {
EUR, SEK, USD, GBP, CNY, JPY, KRW
}<file_sep>/lab1/app/src/main/java/se/mdh/student/dva232/lab1/CurrencyConverter.kt
package se.mdh.student.dva232.lab1
import kotlin.math.round
class CurrencyConverter(private var amount: Double,private var inputCurrency: CurrencyType, private var outputCurrency: CurrencyType) {
fun convert(): String {
return "%,.2f".format(amount * ChangeRate.getExchange(inputCurrency,outputCurrency))
}
}<file_sep>/lab1/app/src/main/java/se/mdh/student/dva232/lab1/MainActivity.kt
package se.mdh.student.dva232.lab1
import android.content.Intent
import android.os.Bundle
import android.widget.*
import androidx.appcompat.app.AppCompatActivity
class MainActivity : AppCompatActivity() {
val inputCurrency = ItemSelectionEvent(input = true, mainActivity = this)
val outputCurrency = ItemSelectionEvent(input = false, mainActivity = this)
lateinit var output: TextView
private set
lateinit var input: EditText
private set
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
input = findViewById(R.id.user_input)
output = findViewById(R.id.output)
//add currencies to spinners
var spinner: Spinner = findViewById(R.id.dropdown_input)
ArrayAdapter.createFromResource(this,
R.array.currencies,
android.R.layout.simple_spinner_item
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner.adapter = adapter
}
spinner.onItemSelectedListener = inputCurrency
spinner = findViewById(R.id.dropdown_output)
ArrayAdapter.createFromResource(this,
R.array.currencies,
android.R.layout.simple_spinner_item
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner.adapter = adapter
}
spinner.onItemSelectedListener = outputCurrency
input.addTextChangedListener(TextChangeEvent(mainActivity = this))
findViewById<Button>(R.id.to_screen_two).setOnClickListener {
startActivity(Intent(this.baseContext, ConversionRates::class.java).apply {
putExtra("se.mdh.student.dva232.lab1.Rates", (resources.getStringArray(R.array.currencies) as Array<String>))
})
}
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/Plot.kt
package se.mdh.student.dva232.lab2
import android.app.Activity
import android.graphics.Color
import android.os.Bundle
import android.widget.ArrayAdapter
import android.widget.CalendarView
import android.widget.Spinner
import android.widget.Toast
import com.androidplot.xy.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import org.json.JSONObject
import java.text.FieldPosition
import java.text.Format
import java.text.ParsePosition
import java.text.SimpleDateFormat
import java.util.*
//using https://github.com/halfhp/androidplot
//as plot library
class Plot: Activity() {
private lateinit var plot: XYPlot
private lateinit var itemStartSelectionEvent: PlotItemSelectionEvent
private lateinit var itemEndSelectionEvent: PlotItemSelectionEvent
private lateinit var dateFromChangeListener: PlotDateChangeListener
private lateinit var dateToChangeListener: PlotDateChangeListener
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.plot)
val arr: String? = intent.getStringExtra("se.mdh.student.dva232.lab2.Rates")
if (arr == null || arr.isEmpty()) { //should not happen, almost impossible, but IDK
Toast.makeText(this, getText(R.string.resource_error), Toast.LENGTH_SHORT).show()
this.onBackPressed()
return
}
val array: List<String> = arr.substring(1, arr.length - 1).split(",")
val default: Int = intent.getIntExtra("se.mdh.student.dva232.lab2.Default", 0)
plot = findViewById(R.id.plot)
plot.layoutManager.remove(plot.legend)
//adding listeners to get the selected day
val dateFrom: CalendarView = findViewById(R.id.from)
val dateTo: CalendarView = findViewById(R.id.to)
val calendar = Calendar.getInstance()
dateFromChangeListener = PlotDateChangeListener(this, "${calendar.get(Calendar.YEAR)-2}-${calendar.get(Calendar.MONTH)+1}-${calendar.get(Calendar.DAY_OF_MONTH)}")
dateToChangeListener = PlotDateChangeListener(this, "${calendar.get(Calendar.YEAR)}-${calendar.get(Calendar.MONTH)+1}-${calendar.get(Calendar.DAY_OF_MONTH)}")
dateFrom.setOnDateChangeListener(dateFromChangeListener)
dateTo.setOnDateChangeListener(dateToChangeListener)
var spinner: Spinner = findViewById(R.id.start_currency)
ArrayAdapter(
this,
android.R.layout.simple_spinner_item,
array.toMutableList()
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner.adapter = adapter
}
spinner.setSelection(default, false)
itemStartSelectionEvent = PlotItemSelectionEvent(this, spinner.selectedItem.toString())
spinner.onItemSelectedListener = itemStartSelectionEvent
spinner = findViewById(R.id.end_currency)
ArrayAdapter(
this,
android.R.layout.simple_spinner_item,
array.toMutableList()
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner.adapter = adapter
}
spinner.setSelection(0, false)
itemEndSelectionEvent = PlotItemSelectionEvent(this, spinner.selectedItem.toString())
spinner.onItemSelectedListener = itemEndSelectionEvent
plot()
}
fun plot() {
GlobalScope.launch(Dispatchers.IO) {
if(dateToChangeListener.date == dateFromChangeListener.date){
runOnUiThread{
Toast.makeText(this@Plot, [email protected](R.string.invalid_date_selected), Toast.LENGTH_SHORT).show()
}
return@launch
}
val data: JSONObject? = ChangeRate.getPlotData(
dateFromChangeListener.date,
dateToChangeListener.date
)
if (data!!.length() == 0) {
runOnUiThread {
Toast.makeText(
this@Plot,
getString(R.string.get_plot_data_error),
Toast.LENGTH_SHORT
).show()
}
return@launch
}
val startValue = itemStartSelectionEvent.selectedItem.trim()
val endValue = itemEndSelectionEvent.selectedItem.trim()
if(startValue == endValue){
runOnUiThread {
Toast.makeText(this@Plot, [email protected](R.string.invalid_currencies_selected), Toast.LENGTH_SHORT).show()
}
return@launch
}
val currency: SortedMap<String, Double> = TreeMap()
for(key in data.keys()){
val obj = data.getJSONObject(key)
val currency1: Double = if(startValue!="EUR") {
obj.getDouble(startValue)
}else{
1.0
}
val currency2: Double = if(endValue!="EUR") {
obj.getDouble(endValue)
}else{
1.0
}
currency[key] = (currency2/currency1)
}
//Starting plot stuff
plot.clear()
val series: XYSeries = PlotXYSeries(currency, "")
val seriesFormat = LineAndPointFormatter(Color.RED, Color.GREEN, Color.BLUE, null)
plot.addSeries(series, seriesFormat)
plot.graph.getLineLabelStyle(XYGraphWidget.Edge.BOTTOM).format = object : Format() {
override fun format(obj: Any, toAppendTo: StringBuffer, pos: FieldPosition): StringBuffer {
val date = Date((obj as Double).toLong())
val formatter = SimpleDateFormat("yyyy-MM-dd", Locale.getDefault())
return StringBuffer(formatter.format(date))
}
override fun parseObject(source: String?, pos: ParsePosition?): Any? {
return null
}
}
plot.graph.getLineLabelStyle(XYGraphWidget.Edge.LEFT).format = object: Format(){
override fun format(obj: Any, toAppendTo: StringBuffer, pos: FieldPosition): StringBuffer {
return StringBuffer(String.format("%.3f", (obj as Double)))
}
override fun parseObject(source: String, pos: ParsePosition): Any? {
return null
}
}
plot.redraw()
}
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/ChangeRate.kt
package se.mdh.student.dva232.lab2
import androidx.lifecycle.ViewModel
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import org.json.JSONException
import org.json.JSONObject
import java.net.MalformedURLException
import java.net.URL
class ChangeRate: ViewModel(){
//https://www.countryflags.io/
companion object {
private var currencyExchangeRates: HashMap<String, Double> = HashMap()
fun updateExchangeRates() {
GlobalScope.launch(Dispatchers.IO){
try {
val tmp = JSONObject(
URL("https://api.exchangeratesapi.io/latest").readText()
)
for (key: String in tmp.getJSONObject("rates").keys()) {
currencyExchangeRates[key.trim()] = tmp.getJSONObject("rates").getDouble(key)
}
currencyExchangeRates["EUR"] = 1.0
} catch (unused: JSONException) {
} catch (unused: MalformedURLException){}
}
}
fun getCurrencyList(): MutableSet<String>? {
return if(currencyExchangeRates.keys.size>0) currencyExchangeRates.keys.toSortedSet() else null
}
fun getExchange(start: String, end: String): Double {
if(currencyExchangeRates.isEmpty()){
GlobalScope.launch (Dispatchers.IO){
updateExchangeRates()
}
return 0.0
}
val change1:Double = currencyExchangeRates[start.trim()] ?: return 0.0
val change2:Double = currencyExchangeRates[end.trim()] ?: return 0.0
return change2/change1
}
fun getPlotData(dateFrom: String, dateTo: String): JSONObject?{
return try {
JSONObject(
URL(
"https://api.exchangeratesapi.io/history?start_at=" +
"$dateFrom&end_at=$dateTo"
).readText()
).getJSONObject("rates")
}catch (unused: JSONException){
null
}catch (unused: MalformedURLException){
null
}
}
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/CurrencyConverter.kt
package se.mdh.student.dva232.lab2
class CurrencyConverter(private var amount: Double,private var inputCurrency: String, private var outputCurrency: String) {
fun convert(): String {
return "%,.2f".format(amount * ChangeRate.getExchange(inputCurrency,outputCurrency))
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/ItemSelectionEvent.kt
package se.mdh.student.dva232.lab2
import android.view.View
import android.widget.AdapterView
import java.lang.NumberFormatException
class ItemSelectionEvent(private val input: Boolean, private val mainActivity: MainActivity): AdapterView.OnItemSelectedListener {
lateinit var actCurrency: String
private set
override fun onItemSelected(parent: AdapterView<*>, view: View?, pos: Int, id: Long) {
try{
val inputCurrency: String
val outputCurrency: String
val item : String = parent.getItemAtPosition(pos) as String
if(input) {
actCurrency = item
inputCurrency = actCurrency
outputCurrency = mainActivity.outputCurrency.actCurrency
}else {
actCurrency = item
inputCurrency = mainActivity.inputCurrency.actCurrency
outputCurrency = actCurrency
}
val value: Double = mainActivity.input.text.toString().toDouble()
mainActivity.output.text = CurrencyConverter(value, inputCurrency , outputCurrency).convert()
}catch (unused: NumberFormatException) {
return
}catch (unused: UninitializedPropertyAccessException){
return
}
}
override fun onNothingSelected(parent: AdapterView<*>?) {}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/TextChangeEvent.kt
package se.mdh.student.dva232.lab2
import android.text.Editable
import android.text.TextWatcher
class TextChangeEvent(private val mainActivity: MainActivity): TextWatcher {
override fun beforeTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) {}
override fun onTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) {}
override fun afterTextChanged(editable: Editable?) {
if(editable.isNullOrBlank()){
mainActivity.output.text = ""
return
}
try{
val value = editable.toString().toDouble()
val inputCurrency: String = mainActivity.inputCurrency.actCurrency
val outputCurrency: String = mainActivity.outputCurrency.actCurrency
mainActivity.output.text = CurrencyConverter(value, inputCurrency, outputCurrency).convert()
}catch (unused: NumberFormatException){
return
}
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/MainActivity.kt
package se.mdh.student.dva232.lab2
import android.Manifest
import android.content.Intent
import android.content.pm.PackageManager
import android.location.Location
import android.location.LocationListener
import android.location.LocationManager
import android.os.Bundle
import android.os.Looper
import android.widget.*
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import java.util.*
class MainActivity : AppCompatActivity() {
val inputCurrency = ItemSelectionEvent(input = true, mainActivity = this)
val outputCurrency = ItemSelectionEvent(input = false, mainActivity = this)
lateinit var output: TextView
private set
lateinit var input: EditText
private set
private var answered = false
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
input = findViewById(R.id.user_input)
output = findViewById(R.id.output)
//ask for localization permissions
checkPermissions()
val spinner1: Spinner = findViewById(R.id.dropdown_input)
GlobalScope.launch(Dispatchers.IO) {
//get updated exchange rates
ChangeRate.updateExchangeRates()
var tmp: MutableList<String>? = ChangeRate.getCurrencyList()?.toMutableList()
while (tmp == null) {
delay(10)
tmp = ChangeRate.getCurrencyList()?.toMutableList()
}
//add currencies to spinners
GlobalScope.launch(Dispatchers.Main) {
ArrayAdapter(
this@MainActivity,
android.R.layout.simple_spinner_item,
tmp
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner1.adapter = adapter
}
spinner1.onItemSelectedListener = inputCurrency
val spinner2: Spinner = findViewById(R.id.dropdown_output)
ArrayAdapter(
this@MainActivity,
android.R.layout.simple_spinner_item,
tmp
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner2.adapter = adapter
}
spinner2.onItemSelectedListener = outputCurrency
//set listener for input
input.addTextChangedListener(TextChangeEvent(mainActivity = this@MainActivity))
}
//wait for permissions check
while(!answered){
delay(10)
}
//get location
var provider: String? = null
if(ActivityCompat.checkSelfPermission(this@MainActivity,
Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED){
provider = LocationManager.NETWORK_PROVIDER
}
if (ActivityCompat.checkSelfPermission(this@MainActivity,
Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) {
provider = LocationManager.GPS_PROVIDER
}
var actCurrency = "EUR"
if(provider != null) { //allowed to use geolocationw
val locationManager: LocationManager =
getSystemService(LOCATION_SERVICE) as LocationManager
var actualLocation: Location? = null
//request single update deprecated in API 30
locationManager.requestLocationUpdates(provider, 100L, 0F, object : LocationListener {
override fun onLocationChanged(location: Location?) {
if (location == null) {
return
}
actualLocation = location
locationManager.removeUpdates(this)
}
override fun onStatusChanged(p0: String?, p1: Int, p2: Bundle?) {}
override fun onProviderEnabled(p0: String?) {}
override fun onProviderDisabled(p0: String?) {}
}, Looper.getMainLooper())
while (actualLocation == null) {
delay(10)
}
//get currency code
actCurrency = Currency.getInstance(
Geocoder.getLocale(actualLocation!!)
).currencyCode
}
//change the input currency to country currency or EUR
var selected = -1
for (i in 0 until spinner1.count) {
if (spinner1.getItemAtPosition(i).toString() == "EUR") {
selected = i
}
if (spinner1.getItemAtPosition(i).toString() == actCurrency) {
selected = i
break
}
}
runOnUiThread {
spinner1.setSelection(selected)
//set listener for button
findViewById<Button>(R.id.to_screen_two).setOnClickListener {
startActivity(Intent(this@MainActivity, ConversionRates::class.java).apply {
putExtra("se.mdh.student.dva232.lab2.Rates", tmp.toString())
putExtra("se.mdh.student.dva232.lab2.Default", selected)
})
}
findViewById<Button>(R.id.to_screen_three).setOnClickListener {
startActivity(Intent(this@MainActivity, Plot::class.java).apply {
putExtra("se.mdh.student.dva232.lab2.Rates", tmp.toString())
putExtra("se.mdh.student.dva232.lab2.Default", selected)
})
}
}
}
}
private fun checkPermissions(){
var counter = 1 //number of permissions to ask
val requestPermissionLauncher = registerForActivityResult(
ActivityResultContracts.RequestPermission()
) { isGranted: Boolean ->
if(counter--<=0){
answered = true
if (!isGranted) {
Toast.makeText(this@MainActivity, getString(R.string.location_disabled), Toast.LENGTH_SHORT).show()
}
}
}
//even if they're 2, they are counted as a single permission
//both belong to geolocation
requestPermissionLauncher.launch(Manifest.permission.ACCESS_COARSE_LOCATION)
requestPermissionLauncher.launch(Manifest.permission.ACCESS_FINE_LOCATION)
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/PlotDateChangeListener.kt
package se.mdh.student.dva232.lab2
import android.widget.CalendarView
class PlotDateChangeListener(private val plotContext: Plot, today: String): CalendarView.OnDateChangeListener {
var date: String = today
private set
override fun onSelectedDayChange(view: CalendarView, year: Int, month: Int, day: Int) {
date = "$year-${month+1}-$day"
plotContext.plot()
}
}<file_sep>/lab1/app/src/main/java/se/mdh/student/dva232/lab1/ChangeRate.kt
package se.mdh.student.dva232.lab1
class ChangeRate {
//https://exchangeratesapi.io/
//https://www.countryflags.io/
companion object {
private val toNonCurrency: Map<CurrencyType, Double> = mapOf(
CurrencyType.EUR to 1000.0,
CurrencyType.USD to 1800.0,
CurrencyType.CNY to 7820.0,
CurrencyType.GBP to 900.0,
CurrencyType.JPY to 123880.0,
CurrencyType.KRW to 1311850.0,
CurrencyType.SEK to 10280.0
)
fun getExchange(start: CurrencyType, end: CurrencyType): Double {
val change1 = toNonCurrency[start] ?: return 0.0
val change2 = toNonCurrency[end] ?: return 0.0
return change2/change1
}
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/ConversionRates.kt
package se.mdh.student.dva232.lab2
import android.app.Activity
import android.os.Bundle
import android.view.View
import android.widget.*
class ConversionRates: Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.rates_conversion)
val arr:String? = intent.getStringExtra("se.mdh.student.dva232.lab2.Rates")
if(arr==null || arr.isEmpty()) { //should not happen, almost impossible, but IDK
Toast.makeText(this, getText(R.string.resource_error), Toast.LENGTH_SHORT).show()
this.onBackPressed()
return
}
val array: List<String> = arr.substring(1,arr.length-1).split(",")
val currencyTable: TableLayout = findViewById(R.id.currency_grid)
val spinner: Spinner = findViewById(R.id.currency_selector)
ArrayAdapter(this,
android.R.layout.simple_spinner_item,
array.toMutableList()
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner.adapter = adapter
}
spinner.onItemSelectedListener = object: AdapterView.OnItemSelectedListener{
override fun onItemSelected(parent: AdapterView<*>, view: View?, pos: Int, id: Long) {
currencyTable.removeAllViewsInLayout()
val startCurrency: String = parent.getItemAtPosition(pos).toString()
for(currency:String in array){
if(currency == startCurrency)
continue
val row:View = layoutInflater.inflate(R.layout.conversion_table_row,
currencyTable,false)
(row.findViewById(R.id.currency_code) as TextView).text = currency
(row.findViewById(R.id.currency_change) as TextView).text = "%,.5f".format(
ChangeRate.getExchange(startCurrency, currency))
currencyTable.addView(row)
}
}
override fun onNothingSelected(p0: AdapterView<*>?) {}
}
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/PlotXYSeries.kt
package se.mdh.student.dva232.lab2
import com.androidplot.xy.XYSeries
import java.text.SimpleDateFormat
import java.util.*
class PlotXYSeries(private val series: Map<String, Double>, private val title: String): XYSeries {
override fun getTitle(): String {
return title
}
override fun size(): Int {
return series.size
}
override fun getX(index: Int): Number {
val date: Date? = SimpleDateFormat("yyyy-MM-dd", Locale.getDefault()).parse(series.keys.elementAt(index))
return date!!.time
}
override fun getY(index: Int): Number {
//impossible to go out of bounds
return series.getValue(series.keys.elementAt(index))
}
}<file_sep>/lab2/app/src/main/java/se/mdh/student/dva232/lab2/PlotItemSelectionEvent.kt
package se.mdh.student.dva232.lab2
import android.view.View
import android.widget.AdapterView
class PlotItemSelectionEvent(private val plotContext: Plot, selected: String): AdapterView.OnItemSelectedListener {
var selectedItem: String = selected
private set
override fun onItemSelected(parent: AdapterView<*>, view: View?, pos: Int, id: Long) {
selectedItem = parent.getItemAtPosition(pos) as String
plotContext.plot()
}
override fun onNothingSelected(p0: AdapterView<*>?) {}
} | a123b47612cb429c0de5f2d55f38a1ec2149ae72 | [
"Kotlin"
] | 16 | Kotlin | HarlockOfficial/dva232_labs | 33c27a9690646f4efe580aea98ad335f343e6ce1 | d601dbe64bf5422d003ee7695489f04c9d96fef4 | |
refs/heads/master | <repo_name>XGxin/hello<file_sep>/1.c
#include"stdio.h"
int main (void){
printf("hello world \n");
}
| 9e239dbe24c02eb2bcaea2d3386b501093b95577 | [
"C"
] | 1 | C | XGxin/hello | 4f5c86136f8ce1f728a2a3e0bd7168ea962928fb | 796fb55652beee170ce5848651e9996e4f591b9f | |
refs/heads/master | <repo_name>hjj333/vue<file_sep>/src/store/mutation-types.js
// 使用常代替mutation事件类型
export const RECEIVE_ADDRESS = 'receive_address' // 接收地址
export const RECEIVE_CATEGORYS = 'receive_categorys' // 接收食品分类数组
export const RECEIVE_SHOPS = 'receive_shops' // 接收商家数组
export const RECEIVE_USERINFO = 'receive_userInfo' // 接收商家数组
export const RESET_USERINFO = 'reset_userInfo' // 重置用户信息
export const RECEIVE_GOODS = 'receive_goods'
export const RECEIVE_RATINGS = 'receive_ratings'
export const RECEIVE_INFO = 'receive_info'
export const INCREMENT_FOOD_COUNT = 'increment_food_count' // 增加food中的count值
export const DECREMENT_FOOD_COUNT = 'decrement_food_count'
export const CLEAR_CART = 'clear_cart' // 清空购物车
export const RECEIVE_SEARCHSHOPS = 'receive_searchshops' // 搜索商家列表
<file_sep>/src/main.js
import Vue from 'vue'
import moment from 'moment'
import { Button } from 'mint-ui'
import VueLazyload from 'vue-lazyload'
import loading from '../src/assets/images/loading.gif'
import App from './App.vue'
import router from './router'
import store from './store/index'
import './mock/mockServer'
// 自定义过滤器使用日期格式
Vue.filter('date-format', function (value) {
return moment(value).format('YYYY-MM-DD HH-mm-ss')
})
// 注册全局组件标签<mt-button>
Vue.component(Button.name, Button)
// 使用图片懒加载插件
Vue.use(VueLazyload, {
loading
})
Vue.config.productionTip = false
new Vue({
router,
store,
render: h => h(App)
}).$mount('#app')
<file_sep>/vue.config.js
module.exports = {
chainWebpack: config => {
// 修复HMR
config.resolve.symlinks(true)
},
devServer: {
proxy: {
// 匹配所有以'/api'开头的请求路径
'/api': {
target: 'http://localhost:4000', //对应后台服务器接口
changeOrigin: true, //允许跨域
ws: true,
pathRewrite: { //重写路径:去掉路径中开头的'/api'
'^/api': ''
}
}
}
}
}
| f0b15a47ede6af60e1eefa356a3bd814480a4775 | [
"JavaScript"
] | 3 | JavaScript | hjj333/vue | 8b68c427959ddbf89ca9511288351d71f80ad55e | 604d70d2aff656f1a02b112e486cefeb7f0d48a3 | |
refs/heads/master | <file_sep>import gym
import numpy as np
import time
import sys
class Q_learning(object):
def __init__(self):
self.env = gym.make('InvertedPendulum-v2')
self.digitalied_num = 10
self.steps = 200
self.episodes = 1000000
self.goal_ave = 190
self.moving_ave_num = 10
self.first_prob = 0.5
self.action_num = 7 #3~7
self.moving_ave = np.full(self.moving_ave_num,0)
self.q_table = np.random.uniform(low=-1,high=1,size=(self.digitalied_num\
**self.env.observation_space.shape[0],self.action_num))
self.reward_of_episode = 0
self.render_flag = False
self.learning_finish = False
self.alpha = 0.1
self.gamma = 0.9
self.bin_pram = []
pram_low = [-0.5,-0.3,-0.3,-0.3]
pram_high = [0.5,0.3,0.3,0.3]
for i in range(self.env.observation_space.shape[0]):
self.bin_pram.append(np.linspace(pram_low[i],pram_high[i],self.digitalied_num)[1:-1])
#self.bin_pram.append(np.linspace(self.env.observation_space.low[i],self.env.observation_space.high[i],self.digitalied_num)[1:-1])
def digitalie(self,obs):
state = 0
for i in range(self.env.observation_space.shape[0]):
state += np.digitize(obs[i],self.bin_pram[i]) * (self.digitalied_num ** i)
return state
def decide_action(self,next_state,episode):
epsilon = self.first_prob * (1/(episode+1))
if epsilon < 0.1:
epsilon = 0.1
if epsilon <= np.random.uniform(0,1):
next_action = np.argmax(self.q_table[next_state])
else:
# next_action = int(round(self.env.action_space.sample()[0]))
next_action = np.random.choice(self.action_num)
return next_action
def update_Q_table(self,next_state,state,action,reward,q_table,done):
if not done:
next_max_q = max(q_table[next_state])
else:
next_max_q = 0
q_table[state,action] = (1 - self.alpha) * (q_table[state,action]) + \
self.alpha * (reward + self.gamma * next_max_q)
return q_table
def run(self):
print()
max_step = 0
for episode in range(self.episodes):
obs = self.env.reset()
state = self.digitalie(obs)
action = np.argmax(self.q_table[state])
self.reward_of_episode = 0
for i in range(self.steps):
if self.render_flag or self.learning_finish:
self.env.render()
observation ,reward, done, info = self.env.step(action-3)
self.reward_of_episode += reward
next_state = self.digitalie(observation)
self.q_table = self.update_Q_table(next_state,state,action,reward,self.q_table,done)
action = self.decide_action(next_state,episode)
state = next_state
if done:
if max_step < i:
max_step = i
self.moving_ave = np.hstack((self.moving_ave[1:],self.reward_of_episode))
sys.stdout.write("\repisode:%5d, reward:%3d, step:%3d, max_step:%3d, average:%3d"%(episode+1,self.reward_of_episode,i+1,max_step+1,self.moving_ave.mean()))
sys.stdout.flush()
time.sleep(0.001)
if self.learning_finish:
self.render_flag = True
break
if self.moving_ave.mean() >= self.goal_ave or max_step > self.goal_ave:
if self.learning_finish:
break
print("\nLearning is finished!!")
print("episode: {}\n".format(episode+1))
self.learning_finish = True
pendulum = Q_learning()
pendulum.run()
<file_sep>import gym
import math
import numpy as np
import sys
import time
env = gym.make('InvertedPendulum-v2')
Render_Flag = True
max_step = 0
for i in range(20000):
env.reset()
Kp = 6.0
Ki = 0.04
Kd = 0.03
goal = 0.0
e = 0.0
e1 = 0.0
e2 = 0.0
action = -0.01
action_old = 0.0
for t in range(200):
if Render_Flag:
env.render()
obs, reward, done, info = env.step(-action)
action_old = action
e2 = e1
e1 = e
e = goal - obs[1]
action = action_old + Kp * (e-e1) + Ki * e + Kd * ((e-e1)-(e1-e2))
if done or t >= 199:
if max_step < t:
max_step = t
print("episode:%5d, step:%3d, max:%3d"%(i+1,t+1,max_step+1))
# sys.stdout.write("\repisode:%5d, step:%3d, max:%3d"%(i+1,t+1,max_step+1))
# sys.stdout.flush()
# time.sleep(0.1)
break
<file_sep># mujoco_RL
## version
python 3.7
mujoco 2.0
## overview
Studying reinforcement learning with mujoco
## demo
[](http://www.youtube.com/watch?v=EgifOjdg_P8)
https://youtu.be/QK2n01hk4PY
DDQN
<file_sep>import gym
import numpy as np
import time
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
from keras.utils import plot_model
from collections import deque
from keras import backend as K
import tensorflow as tf
import sys
def huberloss(y_true, y_pred):
err = y_true - y_pred
cond = K.abs(err) < 1.0
L2 = 0.5 * K.square(err)
L1 = (K.abs(err) - 0.5)
loss = tf.where(cond,L2,L1)
return K.mean(loss)
class QNetwork:
def __init__(self,learning_rate=0.01, state_size=4,action_size=7,hidden_size=10):
self.action_size = action_size
self.model = Sequential()
self.model.add(Dense(hidden_size,activation='relu',input_dim=state_size))
self.model.add(Dense(hidden_size,activation='relu'))
self.model.add(Dense(action_size,activation='linear'))
self.optimizer = Adam(lr=learning_rate)
self.model.compile(loss=huberloss,optimizer=self.optimizer)
def replay(self, memory, batch_size, gamma, targetQN):
inputs = np.zeros((batch_size, 4))
targets = np.zeros((batch_size, self.action_size))
mini_batch = memory.sample(batch_size)
for i, (state_b, action_b, reward_b, next_state_b) in enumerate(mini_batch):
inputs[i:i+1] = state_b
target = reward_b
if not (next_state_b == np.zeros(state_b.shape)).all(axis=1):
retmainQs = self.model.predict(next_state_b)[0]
next_action = np.argmax(retmainQs)
target = reward_b + gamma * targetQN.model.predict(next_state_b)[0][next_action]
targets[i] = self.model.predict(state_b)
targets[i][action_b] = target
self.model.fit(inputs, targets, epochs=1, verbose=0)
class Memory:
def __init__(self,max_size=1000):
self.buffer = deque(maxlen=max_size)
def add(self,experience):
self.buffer.append(experience)
def sample(self, batch_size):
idx = np.random.choice(np.arange(len(self.buffer)),size=batch_size,replace = False)
return [self.buffer[ii] for ii in idx]
def len(self):
return len(self.buffer)
class Actor:
def get_action(self, state, episode, mainQN):
epislon = 0.01 + 0.9 /(1.0+episode*0.1)
if epislon <= np.random.uniform(0,1):
reTargetQs = mainQN.model.predict(state)[0]
action = np.argmax(reTargetQs)
else:
action = np.random.choice(7)
return action
METHOD_STR = "DDQN" #DQN or DDQN
RENDER_FLAG = True
env = gym.make('InvertedPendulum-v2')
num_episodes = 300000
max_number_of_steps = 200
goal_average_reward = 195
num_consecutive_iterations = 10
total_reward_vec = np.zeros(num_consecutive_iterations)
gamma = 0.99
islearnd = False
isrender = False
hidden_size = 16
learning_rate = 0.0001
memory_size = 10000
batch_size = 100
max_step = 0
mainQN = QNetwork(hidden_size=hidden_size, learning_rate=learning_rate)
targetQN = QNetwork(hidden_size=hidden_size, learning_rate=learning_rate)
#plot_model(mainQN.model, to_file='Qnetwork.png', show_shapes=True)
memory = Memory(max_size = memory_size)
actor = Actor()
for episode in range(num_episodes):
env.reset()
state, reward, done, info = env.step(env.action_space.sample())
state = np.reshape(state, [1,4])
episode_reward = 0
targetQN.model.set_weights(mainQN.model.get_weights())
for t in range(max_number_of_steps):
if islearnd and RENDER_FLAG:
env.render()
time.sleep(0.01)
action = actor.get_action(state, episode, mainQN)
next_state, reward, done , info = env.step((action-3))
next_state = np.reshape(next_state,[1,4])
if t == 199:
reward = 1
elif done:
next_state = np.zeros(state.shape)
reward = -1
else:
reward = 0
episode_reward += 1
memory.add((state,action,reward,next_state))
state = next_state
if (memory.len() > batch_size) and not islearnd:
mainQN.replay(memory,batch_size,gamma,targetQN)
if METHOD_STR=="DQN":
targetQN.model.set_weights(mainQN.model.get_weights())
else:
pass
if done or t >= 199:
if max_step < t:
max_step = t
total_reward_vec = np.hstack((total_reward_vec[1:], episode_reward))
print('{:5d} Episode finished, {:6.2f} steps, ave: {:6.2f}, max: {:4d}'.format(episode,t+1,total_reward_vec.mean(),max_step+1),flush=True)
break
if total_reward_vec.mean() >= goal_average_reward:
if not islearnd:
print('Episode {:5d} train agent successfuly!'.format(episode+1))
islearnd = True
if not isrender:
isrender = True
| 9eace21db6af191dbd716c8c9c578821f1463afa | [
"Markdown",
"Python"
] | 4 | Python | matsumotokoki/mujoco_RL | 815a44afcdacbd2c9decc39e9483caa8088796ba | 2a444f384070c56925801a8ad6b6f8dadcd95c0c | |
refs/heads/master | <repo_name>svinotavr/JavascriptPhaserTutorials<file_sep>/Tutorial files/M13-Point-and-Click/05-collect-items/js/prefabs/Item.js
var PointClk = PointClk || {};
PointClk.Item = function(state, x, y, data) {
Phaser.Sprite.call(this, state.game, x, y, data.asset);
this.game = state.game;
this.state = state;
this.anchor.setTo(0.5);
this.data = data;
};
PointClk.Item.prototype = Object.create(Phaser.Sprite.prototype);
PointClk.Item.prototype.constructor = PointClk.Item;<file_sep>/Tutorial files/M9-RPG/04-customizable-touch-control/js/states/Game.js
var RPG = RPG || {};
RPG.GameState = {
init: function(currentLevel) {
//keep track of the current level
this.currentLevel = currentLevel ? currentLevel : 'map1';
//constants
this.PLAYER_SPEED = 90;
//no gravity in a top-down game
this.game.physics.arcade.gravity.y = 0;
//keyboard cursors
this.cursors = this.game.input.keyboard.createCursorKeys();
},
create: function() {
this.game.onscreenControls = this.game.plugins.add(Phaser.Plugin.OnscreenControls);
this.loadLevel();
},
update: function() {
},
loadLevel: function(){
//create a tilemap object
this.map = this.add.tilemap(this.currentLevel);
//join the tile images to the json data
this.map.addTilesetImage('terrains', 'tilesheet');
//create tile layers
this.backgroundLayer = this.map.createLayer('backgroundLayer');
this.collisionLayer = this.map.createLayer('collisionLayer');
//send background to the back
this.game.world.sendToBack(this.backgroundLayer);
//collision layer should be collisionLayer
this.map.setCollisionBetween(1,16, true, 'collisionLayer');
//resize the world to fit the layer
this.collisionLayer.resizeWorld();
//create player
var playerData = {
//list of items
items: [],
//player stats
health: 25,
attack: 12,
defense: 8,
gold: 100,
//quest
quests: []
};
this.player = new RPG.Player(this, 100, 100, playerData);
//add player to the world
this.add.existing(this.player);
this.initGUI();
},
gameOver: function() {
this.game.state.start('Game', true, false, this.currentLevel);
},
initGUI: function() {
//onscreen controls setup
this.game.onscreenControls.setup(this.player, {
left: true,
right: true,
up: true,
down: true,
upleft: true,
downleft: true,
upright: true,
downright: true,
action: false
})
}
};
<file_sep>/Tutorial files/M11-Hipster-Town-Simulation/07-building-creation/js/states/Game.js
var HTown = HTown || {};
HTown.GameState = {
init: function() {
//game constants
this.STEP =2;
//no gravity in a top-down game
this.game.physics.arcade.gravity.y = 0;
},
create: function() {
//grass floor
this.background = this.add.tileSprite(0,0, 1200, 800, 'grass');
this.game.world.setBounds(0, 0, 1200, 800);
this.buildings = this.add.group();
var house = new HTown.Building(this, 100, 100, {asset: 'house', housing: 100});
this.buildings.add(house);
var farm = new HTown.Building(this, 200, 200, {asset: 'crops', food: 100});
this.buildings.add(farm);
var factory = new HTown.Building(this, 200, 300, {asset: 'factory', jobs: 20});
this.buildings.add(factory);
//create a town
this.town = new HTown.TownModel({}, {population: 100, food: 200, money: 100}, this.buildings);
//update simulation
this.simulationTimer = this.game.time.events.loop(Phaser.Timer.SECOND * this.STEP, this.simulationStep, this);
},
update: function() {
},
simulationStep: function() {
this.town.step();
}
};
<file_sep>/Tutorial files/M9-RPG/12-battle/js/prefabs/Battle.js
var RPG = RPG || {};
RPG.Battle = function(game) {
this.game = game;
};
RPG.Battle.prototype.attack = function(attacker, attacked) {
var damage = Math.max(0, attacker.data.attack * Math.random() - attacked.data.defense * Math.random());
console.log(damage);
attacked.data.health -= damage;
if(attacked.data.health <= 0) {
attacked.kill();
}
};<file_sep>/Tutorial files/M8-Veggies-vs-Zombies-updated-2015-07-03/14-button-bar/js/states/Game.js
var Veggies = Veggies || {};
Veggies.GameState = {
init: function(currentLevel) {
//keep track of the current level
this.currentLevel = currentLevel ? currentLevel : 'level1';
//constants
this.HOUSE_X = 60;
this.SUN_FREQUENCY = 5;
this.SUN_VELOCITY = 50;
//no gravity in a top-down game
this.game.physics.arcade.gravity.y = 0;
},
create: function() {
this.background = this.add.sprite(0, 0, 'background');
//group for game objects
this.bullets = this.add.group();
this.plants = this.add.group();
this.zombies = this.add.group();
this.suns = this.add.group();
//player stats
this.numSums = 100;
//create user interface
this.createGui();
var zombieData = {
asset: 'zombie',
health: 10,
animationFrames: [0, 1, 2, 1],
attack: 0.1,
velocity: -10
};
this.zombie = new Veggies.Zombie(this, 400, 100, zombieData);
this.zombies.add(this.zombie);
var plantData = {
plantAsset: 'plant',
health: 10,
isShooter: true,
//isSunProducer: true,
animationFrames: [1, 2, 1, 0]
};
this.plant = new Veggies.Plant(this, 100, 100, plantData);
this.plants.add(this.plant);
//create new suns with the specified frequency
this.sunGenerationTimer = this.game.time.create(false);
this.sunGenerationTimer.start();
this.scheduleSunGeneration();
//hitting sound
this.hitSound = this.add.audio('hit');
},
update: function() {
this.game.physics.arcade.collide(this.plants, this.zombies, this.attackPlant, null, this)
this.game.physics.arcade.collide(this.bullets, this.zombies, this.hitZombie, null, this)
this.zombies.forEachAlive(function(zombie){
//zombies need to keep their speed
zombie.body.velocity.x = zombie.defaultVelocity;
//if one of them reaches the house, it's game over
if(zombie.x <= this.HOUSE_X) {
this.gameOver();
}
}, this);
},
gameOver: function() {
this.game.state.start('Game');
},
attackPlant: function(plant, zombie) {
plant.damage(zombie.attack);
},
createZombie: function(x, y, data) {
//look for a dead element
var newElement = this.zombies.getFirstDead();
//if there are no dead ones, create a new one
if(!newElement) {
newElement = new Veggies.Zombie(this, x, y, data);
this.zombies.add(newElement);
}
else {
newElement.reset(x, y, data);
}
return newElement;
},
createPlant: function(x, y, data) {
//look for a dead element
var newElement = this.plants.getFirstDead();
//if there are no dead ones, create a new one
if(!newElement) {
newElement = new Veggies.Plant(this, x, y, data);
this.plants.add(newElement);
}
else {
newElement.reset(x, y, data);
}
return newElement;
},
createGui: function() {
//show sun stats
var sun = this.add.sprite(10, this.game.height - 20, 'sun');
sun.anchor.setTo(0.5);
sun.scale.setTo(0.5);
var style = {font: '14px Arial', fill: '#fff'};
this.sunLabel = this.add.text(22, this.game.height - 28, '', style);
this.updateStats();
//show the button bar
this.buttonData = JSON.parse(this.game.cache.getText('buttonData'));
//buttons
this.buttons = this.add.group();
var button;
this.buttonData.forEach(function(element, index){
button = new Phaser.Button(this.game, 80 + index * 40, this.game.height - 35, element.btnAsset, this.clickButton, this)
this.buttons.add(button);
//pass the data to the button
button.plantData = element;
}, this);
},
updateStats: function() {
this.sunLabel.text = this.numSums;
},
increaseSun: function(amount) {
this.numSums += amount;
this.updateStats();
},
scheduleSunGeneration: function() {
this.sunGenerationTimer.add(Phaser.Timer.SECOND * this.SUN_FREQUENCY, function(){
this.generateRandomSun();
this.scheduleSunGeneration();
}, this);
},
generateRandomSun: function() {
//position
var y = -20;
var x = 40 + 420 * Math.random();
//sun object
var sun = this.createSun(x, y);
//falling velocity
sun.body.velocity.y = this.SUN_VELOCITY;
},
createSun: function(x, y) {
//look for a dead element
var newElement = this.suns.getFirstDead();
//if there are no dead ones, create a new one
if(!newElement) {
newElement = new Veggies.Sun(this, x, y);
this.suns.add(newElement);
}
else {
newElement.reset(x, y);
}
return newElement;
},
hitZombie: function(bullet, zombie) {
bullet.kill();
this.hitSound.play();
zombie.damage(1);
},
clickButton: function(button) {
console.log(button.plantData);
}
};
<file_sep>/Tutorial files/M14-Hexagon-Strategy-Game/06-units/js/prefabs/Unit.js
var HexGame = HexGame || {};
HexGame.Unit = function(state, data) {
var position = state.board.getXYFromRowCol(data.row, data.col);
Phaser.Sprite.call(this, state.game, position.x, position.y, data.asset);
this.game = state.game;
this.state = state;
this.board = state.board;
this.row = data.row;
this.col = data.col;
this.data = data;
this.anchor.setTo(0.5);
};
HexGame.Unit.prototype = Object.create(Phaser.Sprite.prototype);
HexGame.Unit.prototype.constructor = HexGame.Unit;
<file_sep>/M8/js/prefabs/Zombie.js
var Veggies = Veggies || {};
Veggies.Zombie = function(state, x, y, data) {
Phaser.Sprite.call(this, state.game, x, y, data.asset);
this.state = state;
this.game = state.game;
this.anchor.setTo(0.5);
//enable physics
this.game.physics.arcade.enable(this);
this.reset(x, y, data);
};
Veggies.Zombie.prototype = Object.create(Phaser.Sprite.prototype);
Veggies.Zombie.prototype.constructor = Veggies.Zombie;
Veggies.Zombie.prototype.reset = function(x, y, data){
Phaser.Sprite.prototype.reset.call(this, x, y, data.health);
//change the image of zombie
this.loadTexture(data.asset);
//create an animation if any was passed
this.animationName = null;
if(data.animationFrames){
this.animationName = data.asset + 'Anim';
this.animations.add(this.animationName, data.animationFrames, 4, true);
this.play(this.animationName);
}
//save properties
this.attack = data.attack;
this.defaultVelocity = data.velocity;
this.body.velocity.x = data.velocity;
};
Veggies.Zombie.prototype.damage = function(amount){
Phaser.Sprite.prototype.damage.call(this, amount);
//particle effect for blood
var emitter = this.game.add.emitter(this.x, this.y, 50);
emitter.makeParticles('bloodParticle');
emitter.minParticleSpeed.setTo(-100, -100);
emitter.maxParticleSpeed.setTo(100, 100);
emitter.gravity = 300;
emitter.start(true, 200, null, 100);
//corpse
if(this.health <= 0){
var corpse = this.game.add.sprite(this.x, this.bottom, 'deadZombie');
corpse.anchor.setTo(0.5, 1);
}
};<file_sep>/Tutorial files/M13-Point-and-Click/04-custom-font/js/states/Game.js
var PointClk = PointClk || {};
PointClk.GameState = {
init: function(playerData) {
this.playerData = playerData ? playerData : {};
this.playerData.room = this.playerData.room ? this.playerData.room : 'livingroom';
},
create: function() {
//panel area
this.panel = this.add.sprite(0, 270, 'panel');
var style = {
font: '16px Prstart',
fill: '#fff',
align: 'left',
wordWrap: true,
wordWrapWidth: 400
};
this.panelLabel = this.add.text(10, 290, '', style);
this.loadRoom();
},
loadRoom: function() {
this.roomData = JSON.parse(this.game.cache.getText(this.playerData.room));
this.background = this.add.sprite(0, 0, this.roomData.background);
//create things
this.things = this.add.group();
var thing;
this.roomData.things.forEach(function(thingData){
thing = new PointClk.Thing(this, thingData);
this.things.add(thing);
}, this);
}
};
<file_sep>/Tutorial files/M14-Hexagon-Strategy-Game/03-terrains/js/prefabs/Board.js
var HexGame = HexGame || {};
HexGame.Board = function(state, grid) {
Phaser.Group.call(this, state.game);
this.state = state;
this.game = state.game;
this.grid = grid;
this.rows = grid.length;
this.cols = grid[0].length;
this.terrains = [
{asset: 'grass'},
{asset: 'water', blocked: true},
{asset: 'rocks'},
{asset: 'grasstrees'},
{asset: 'grasstrees2'}
];
//create hexagons
var row, col, tile, x, y;
for(row = 0; row < this.rows; row++) {
for(col = 0; col < this.cols; col++) {
//even rows
if(row % 2 === 0) {
x = this.state.MARGIN_X + col * this.state.TILE_W;
}
//odd rows
else {
x = this.state.MARGIN_X + col * this.state.TILE_W + this.state.TILE_W/2;
}
y = this.state.MARGIN_Y + row * this.state.TILE_H * 3/4;
tile = new Phaser.Sprite(this.game, x, y, this.terrains[this.grid[row][col]].asset);
//keep some information in the tile object
tile.row = row;
tile.col = col;
tile.terrainAsset = this.terrains[this.grid[row][col]].asset;
tile.blocked = this.terrains[this.grid[row][col]].blocked;
tile.inputEnabled = true;
tile.input.pixelPerfectClick = true;
this.add(tile);
}
}
};
HexGame.Board.prototype = Object.create(Phaser.Group.prototype);
HexGame.Board.prototype.constructor = HexGame.Board;
<file_sep>/Tutorial files/M11-Hipster-Town-Simulation/02-population-growth/js/prefabs/TownModel.js
var HTown = HTown || {};
HTown.TownModel = function(coefs, initialStats){
this.coefs = {};
this.coefs.populationGrowth = this.coefs.populationGrowth || 1.02;
this.coefs.foodConsumption = this.coefs.foodConsumption || 1;
this.stats = {};
this.stats.population = initialStats.population;
this.stats.food = initialStats.food;
this.stats.money = initialStats.money;
};
HTown.TownModel.prototype.step = function(){
//population
this.stats.population = this.stats.population * this.coefs.populationGrowth;
};
<file_sep>/Tutorial files/M15-Dungeon-Crawler/10-generate-enemies/js/prefabs/Enemy.js
var DunCrawl = DunCrawl || {};
DunCrawl.Enemy = function(state, data) {
var position = state.board.getXYFromRowCol(data);
Phaser.Sprite.call(this, state.game, position.x, position.y, data.asset);
this.game = state.game;
this.state = state;
this.board = state.board;
this.row = data.row;
this.col = data.col;
this.data = data;
this.data.type = 'enemy';
this.anchor.setTo(0.5);
};
DunCrawl.Enemy.prototype = Object.create(Phaser.Sprite.prototype);
DunCrawl.Enemy.prototype.constructor = DunCrawl.Unit;
<file_sep>/Tutorial files/M15-Dungeon-Crawler/03-collect-items/js/states/Game.js
var DunCrawl = DunCrawl || {};
DunCrawl.GameState = {
init: function(data) {
//board
this.ROWS = 8;
this.COLS = 6;
this.TILE_SIZE = 60;
data = data || {};
this.currentLevel = data.currentLevel || 1;
this.playerStats = data.playerStats || {
health: 25,
attack: 2,
defense: 1,
gold: 0,
hasKey: false
};
},
create: function() {
//tiles groups
this.backgroundTiles = this.add.group();
//map elements
this.mapElements = this.add.group();
//board
this.board = new DunCrawl.Board(this, {
rows: this.ROWS,
cols: this.COLS,
tileSize: this.TILE_SIZE
});
//hard code an item
this.item = new DunCrawl.Item(this, {
row: 3,
col: 2,
asset: 'sword',
type: 'consumable',
health: 10,
attack: 0,
defense: 1,
gold: 100
});
this.mapElements.add(this.item);
},
gameOver: function() {
this.game.state.start('Game');
},
nextLevel: function() {
this.game.state.start('Game', true, false, {currentLevel: this.currentLevel + 1, playerStats: this.playerStats});
}
};
<file_sep>/Tutorial files/M14-Hexagon-Strategy-Game/05-get-adjacent-cells/js/prefabs/Board.js
var HexGame = HexGame || {};
HexGame.Board = function(state, grid) {
Phaser.Group.call(this, state.game);
this.state = state;
this.game = state.game;
this.grid = grid;
this.rows = grid.length;
this.cols = grid[0].length;
this.terrains = [
{asset: 'grass'},
{asset: 'water', blocked: true},
{asset: 'rocks'},
{asset: 'grasstrees'},
{asset: 'grasstrees2'}
];
//create hexagons
var row, col, tile, x, y;
for(row = 0; row < this.rows; row++) {
for(col = 0; col < this.cols; col++) {
//even rows
if(row % 2 === 0) {
x = this.state.MARGIN_X + col * this.state.TILE_W;
}
//odd rows
else {
x = this.state.MARGIN_X + col * this.state.TILE_W + this.state.TILE_W/2;
}
y = this.state.MARGIN_Y + row * this.state.TILE_H * 3/4;
tile = new Phaser.Sprite(this.game, x, y, this.terrains[this.grid[row][col]].asset);
//keep some information in the tile object
tile.row = row;
tile.col = col;
tile.terrainAsset = this.terrains[this.grid[row][col]].asset;
tile.blocked = this.terrains[this.grid[row][col]].blocked;
tile.inputEnabled = true;
tile.input.pixelPerfectClick = true;
tile.events.onInputDown.add(function(tile){
var adj = this.getAdjacent(tile, true);
adj.forEach(function(t){
t.alpha = 0.3;
}, this);
}, this);
this.add(tile);
}
}
};
HexGame.Board.prototype = Object.create(Phaser.Group.prototype);
HexGame.Board.prototype.constructor = HexGame.Board;
HexGame.Board.prototype.getFromRowCol = function(row, col) {
var foundTile;
this.forEach(function(tile){
if(tile.row === row && tile.col === col) {
foundTile = tile;
}
}, this);
return foundTile;
};
HexGame.Board.prototype.getXYFromRowCol = function(row, col){
var pos = {};
//even rows
if(row % 2 === 0) {
pos.x = this.state.MARGIN_X + col * this.state.TILE_W + this.state.TILE_W/2;
}
//odd rows
else {
pos.x = this.state.MARGIN_X + col * this.state.TILE_W + this.state.TILE_W/2 + this.state.TILE_W/2;
}
pos.y = this.state.MARGIN_Y + row * this.state.TILE_H * 3/4 + this.state.TILE_H/2;
return pos;
};
HexGame.Board.prototype.getAdjacent = function(tile, rejectBlocked) {
var adjacentTiles = [];
var row = tile.row;
var col = tile.col;
var relativePositions = [];
//relative positions of adjacent cells depend whether the row is odd or event
//even rows
if(row % 2 === 0) {
relativePositions = [
{r: -1, c: 0},
{r: -1, c: -1},
{r: 0, c: -1},
{r: 0, c: 1},
{r: 1, c: 0},
{r: 1, c: -1}
]
}
//odd rows
else {
relativePositions = [
{r: -1, c: 0},
{r: -1, c: 1},
{r: 0, c: -1},
{r: 0, c: 1},
{r: 1, c: 0},
{r: 1, c: 1}
];
}
var adjTile;
relativePositions.forEach(function(pos){
//check that we are not on the edge of the map
if((row + pos.r >= 0) && (row + pos.r < this.rows) && (col + pos.c >= 0) && (col + pos.c < this.cols)) {
//get adjacent tile
adjTile = this.getFromRowCol(row + pos.r, col + pos.c);
if(!rejectBlocked || !adjTile.blocked) {
adjacentTiles.push(adjTile);
}
}
}, this);
return adjacentTiles;
};
<file_sep>/Tutorial files/M8-Veggies-vs-Zombies-updated-2015-07-03/16-land patches/js/prefabs/Plant.js
var Veggies = Veggies || {};
Veggies.Plant = function(state, x, y, data) {
Phaser.Sprite.call(this, state.game, x, y, data.plantAsset);
this.state = state;
this.game = state.game;
this.bullets = state.bullets;
this.suns = state.suns;
this.anchor.setTo(0.5);
//init physics body
this.game.physics.arcade.enable(this);
this.body.immovable = true;
//create timers
this.shootingTimer = this.game.time.create(false);
this.producingTimer = this.game.time.create(false);
this.reset(x, y, data);
};
Veggies.Plant.prototype = Object.create(Phaser.Sprite.prototype);
Veggies.Plant.prototype.constructor = Veggies.Plant;
Veggies.Plant.prototype.reset = function(x, y, data){
Phaser.Sprite.prototype.reset.call(this, x, y, data.health);
//change the image of the plant
this.loadTexture(data.plantAsset);
//create an animation if any was passed
this.animationName = null;
if(data.animationFrames) {
this.animationName = data.plantAsset + 'Anim';
this.animations.add(this.animationName, data.animationFrames, 6, false);
}
//save properties
this.isShooter = data.isShooter;
this.isSunProducer = data.isSunProducer;
//if plant is a shooter then setup shooting timer
if(this.isShooter) {
this.shootingTimer.start();
this.scheduleShooting();
}
//if plant is a sun producer then setup production timer
if(this.isSunProducer) {
this.producingTimer.start();
this.scheduleProduction();
}
};
Veggies.Plant.prototype.kill = function() {
Phaser.Sprite.prototype.kill.call(this);
//stop timers
this.shootingTimer.stop();
this.producingTimer.stop();
};
Veggies.Plant.prototype.scheduleShooting = function() {
this.shoot();
//plants shoot once per second
this.shootingTimer.add(Phaser.Timer.SECOND, this.scheduleShooting, this);
};
Veggies.Plant.prototype.shoot = function() {
//play shooting animation
if(this.animations.getAnimation(this.animationName)) {
this.play(this.animationName);
}
//location y ofthe bullet
var y = this.y - 10;
//look for a dead element
var newElement = this.bullets.getFirstDead();
//if there are no dead ones, create a new one
if(!newElement) {
newElement = new Veggies.Bullet(this, this.x, y);
this.bullets.add(newElement);
}
else {
newElement.reset(this.x, y);
}
//set the velocity
newElement.body.velocity.x = 100;
};
Veggies.Plant.prototype.scheduleProduction = function() {
//create a random sun
this.produceSun();
//plants shoot once per second
this.producingTimer.add(Phaser.Timer.SECOND * 5, this.scheduleProduction, this);
};
Veggies.Plant.prototype.produceSun = function() {
//place the sun in a random location near the plant
var diffX = -40 + Math.random() * 80;
var diffY = -40 + Math.random() * 80;
this.state.createSun(this.x + diffX, this.y + diffY);
};
<file_sep>/Tutorial files/M9-RPG/03-hello-world-phaser-plugin/js/plugins/OnscreenControls.js
Phaser.Plugin.OnscreenControls = function(game, parent) {
Phaser.Plugin.call(this, game, parent);
//add your own custom init logic
this.game = game;
console.log('plugin ready');
};
Phaser.Plugin.OnscreenControls.prototype = Object.create(Phaser.Plugin.prototype);
Phaser.Plugin.OnscreenControls.prototype.constructor = Phaser.Plugin.OnscreenControls;<file_sep>/Tutorial files/M8-Veggies-vs-Zombies-updated-2015-07-03/07 shooting plants/js/prefabs/Zombie.js
var Veggies = Veggies || {};
Veggies.Zombie = function(state, x, y, data) {
Phaser.Sprite.call(this, state.game, x, y, data.asset);
this.state = state;
this.game = state.game;
this.anchor.setTo(0.5);
//enable physics
this.game.physics.arcade.enable(this);
this.reset(x, y, data);
};
Veggies.Zombie.prototype = Object.create(Phaser.Sprite.prototype);
Veggies.Zombie.prototype.constructor = Veggies.Zombie;
Veggies.Zombie.prototype.reset = function(x, y, data) {
Phaser.Sprite.prototype.reset.call(this, x, y, data.health);
//change the image of the plant
this.loadTexture(data.asset);
//create an animation if any was passed
this.animationName = null;
if(data.animationFrames) {
this.animationName = data.asset + 'Anim';
this.animations.add(this.animationName, data.animationFrames, 4, true);
this.play(this.animationName);
}
//save properties
this.attack = data.attack;
this.defaultVelocity = data.velocity;
this.body.velocity.x = data.velocity;
}<file_sep>/Tutorial files/M14-Hexagon-Strategy-Game/09-attack/js/states/Game.js
var HexGame = HexGame || {};
HexGame.GameState = {
init: function() {
this.TILE_W = 56;
this.TILE_H = 64;
this.MARGIN_X = 30;
this.MARGIN_Y = 5;
},
create: function() {
this.map = JSON.parse(this.game.cache.getText('map'));
this.board = new HexGame.Board(this, this.map.grid);
this.playerUnits = this.add.group();
this.enemyUnits = this.add.group();
this.initUnits();
},
initUnits: function() {
//load player units
this.playerUnitsData = JSON.parse(this.game.cache.getText('playerUnits'));
var unit;
this.playerUnitsData.forEach(function(unitData){
unit = new HexGame.Unit(this, unitData);
//unit belongs to the player
unit.isPlayer = true;
this.playerUnits.add(unit);
}, this);
//load player units
this.enemyUnitsData = JSON.parse(this.game.cache.getText('enemyUnits'));
this.enemyUnitsData.forEach(function(unitData){
unit = new HexGame.Unit(this, unitData);
this.enemyUnits.add(unit);
}, this);
},
clearSelection: function() {
this.board.setAll('alpha', 1);
//remove attached events from tiles
this.board.forEach(function(tile){
tile.events.onInputDown.removeAll();
}, this);
}
};
<file_sep>/Tutorial files/M11-Hipster-Town-Simulation/08-hud/js/states/Game.js
var HTown = HTown || {};
HTown.GameState = {
init: function() {
//game constants
this.STEP =2;
//no gravity in a top-down game
this.game.physics.arcade.gravity.y = 0;
},
create: function() {
//grass floor
this.background = this.add.tileSprite(0,0, 1200, 800, 'grass');
this.game.world.setBounds(0, 0, 1200, 800);
this.buildings = this.add.group();
var house = new HTown.Building(this, 100, 100, {asset: 'house', housing: 100});
this.buildings.add(house);
var farm = new HTown.Building(this, 200, 200, {asset: 'crops', food: 100});
this.buildings.add(farm);
var factory = new HTown.Building(this, 200, 300, {asset: 'factory', jobs: 20});
this.buildings.add(factory);
//create a town
this.town = new HTown.TownModel({}, {population: 100, food: 200, money: 100}, this.buildings);
//update simulation
this.simulationTimer = this.game.time.events.loop(Phaser.Timer.SECOND * this.STEP, this.simulationStep, this);
this.initGui();
},
update: function() {
},
simulationStep: function() {
this.town.step();
this.refreshStats();
},
initGui: function() {
//money
this.moneyIcon = this.add.sprite(10, 10, 'money');
this.moneyIcon.fixedToCamera = true;
var style = {font: '14px Arial', fill: '#fff'};
this.moneyLabel = this.add.text(45, 15, '0', style);
this.moneyLabel.fixedToCamera = true;
//food icon
this.foodIcon = this.add.sprite(100, 10, 'food');
this.foodIcon.fixedToCamera = true;
style = {font: '14px Arial', fill: '#fff'};
this.foodLabel = this.add.text(135, 15, '0', style);
this.foodLabel.fixedToCamera = true;
//population icon
this.populationIcon = this.add.sprite(190, 10, 'population');
this.populationIcon.fixedToCamera = true;
style = {font: '14px Arial', fill: '#fff'};
this.populationLabel = this.add.text(225, 15, '0', style);
this.populationLabel.fixedToCamera = true;
//jobs icon
this.jobsIcon = this.add.sprite(280, 10, 'jobs');
this.jobsIcon.fixedToCamera = true;
style = {font: '14px Arial', fill: '#fff'};
this.jobsLabel = this.add.text(315, 15, '0', style);
this.jobsLabel.fixedToCamera = true;
//refresh stats
this.refreshStats();
},
refreshStats: function() {
this.moneyLabel.text = Math.round(this.town.stats.money);
this.foodLabel.text = Math.round(this.town.stats.food);
this.populationLabel.text = Math.round(this.town.stats.population) + '/' + Math.round(this.town.stats.housing);
this.jobsLabel.text = Math.round(this.town.stats.jobs);
}
};
<file_sep>/Tutorial files/M13-Point-and-Click/08-unlock-door/js/prefabs/Thing.js
var PointClk = PointClk || {};
PointClk.Thing = function(state, data) {
Phaser.Sprite.call(this, state.game, data.x, data.y, data.asset);
this.game = state.game;
this.state = state;
this.anchor.setTo(0.5);
this.data = data;
//listen for input
this.inputEnabled = true;
this.input.pixelPerfectClick = true;
this.events.onInputDown.add(this.touch, this);
};
PointClk.Thing.prototype = Object.create(Phaser.Sprite.prototype);
PointClk.Thing.prototype.constructor = PointClk.Thing;
PointClk.Thing.prototype.touch = function() {
this.state.panelLabel.text = this.data.text;
//if it's a collectable then collect it!
if(this.data.type == 'collectable') {
this.state.addItem(this.data);
this.kill();
return;
}
//if it's an open door, go to another room
else if(this.data.type == 'door' && this.data.isOpen) {
console.log('go to ' + this.data.destination);
return;
}
//are we selecting anything?
var selectedItem = this.state.selectedItem;
if(selectedItem) {
//are there interactions? are they with the selected item?
if(this.data.interactions && this.data.interactions[this.state.selectedItem.data.id]) {
//we do have an interaction between the "thing" and the selected item
var interaction = this.data.interactions[this.state.selectedItem.data.id];
//show text
if(interaction.text) {
this.state.panelLabel.text = interaction.text;
}
//change asset
if(interaction.asset) {
this.loadTexture(interaction.asset);
this.data.asset = interaction.asset;
}
//open door
if(interaction.action == 'open-door') {
this.data.isOpen = true;
selectedItem.kill();
this.state.clearSelection();
}
}
}
};<file_sep>/Tutorial files/M6-Mr-Hop/02-pool-of-floors/js/states/Game.js
var MrHop = MrHop || {};
MrHop.GameState = {
init: function() {
//pool of floors
this.floorPool = this.add.group();
//gravity
this.game.physics.arcade.gravity.y = 1000;
},
create: function() {
//hard-code first platform
var platform = new MrHop.Platform(this.game, this.floorPool, 3, 100, 200);
this.add.existing(platform);
},
update: function() {
}
};
<file_sep>/Tutorial files/M8-Veggies-vs-Zombies-updated-2015-07-03/02 plant prefab/js/states/Game.js
var Veggies = Veggies || {};
Veggies.GameState = {
init: function(currentLevel) {
//keep track of the current level
this.currentLevel = currentLevel ? currentLevel : 'level1';
//no gravity in a top-down game
this.game.physics.arcade.gravity.y = 0;
},
create: function() {
this.background = this.add.sprite(0, 0, 'background');
//group for game objects
this.bullets = this.add.group();
this.plants = this.add.group();
this.zombies = this.add.group();
this.suns = this.add.group();
var plantData = {
plantAsset: 'plant',
health: 10
};
this.plant = new Veggies.Plant(this, 100, 100, plantData);
this.plants.add(this.plant);
},
update: function() {
},
gameOver: function() {
this.game.state.start('Game');
}
};
<file_sep>/M11/js/prefabs/Building.js
var HTown = HTown || {};
HTown.Building = function(state, x, y, data) {
Phaser.Sprite.call(this, state.game, x, y, data.asset);
this.game = state.game;
this.state = state;
//init stats
this.food = data.food;
this.jobs = data.jobs;
this.housing = data.housing;
this.anchor.setTo(0.5);
//init physics
this.game.physics.arcade.enable(this);
};
HTown.Building.prototype = Object.create(Phaser.Sprite.prototype);
HTown.Building.prototype.constructor = HTown.Building;<file_sep>/M3/js/main.js
//this game will have only 1 state
var GameState = {
//initiate some game-level settings
init: function() {
//scaling options
this.scale.scaleMode = Phaser.ScaleManager.SHOW_ALL;
this.scale.pageAlignHorizontally = true;
this.scale.pageAlignVertically = true;
},
//load the game assets before the game starts
preload: function() {
this.load.image('backyard', 'assets/images/backyard.png');
this.load.image('apple', 'assets/images/apple.png');
this.load.image('candy', 'assets/images/candy.png');
this.load.image('rotate', 'assets/images/rotate.png');
this.load.image('toy', 'assets/images/rubber_duck.png');
this.load.image('arrow', 'assets/images/arrow.png');
this.load.spritesheet('pet', 'assets/images/pet.png', 97, 83, 5, 1, 1);
},
//executed after everything is loaded
create: function() {
this.background = this.game.add.sprite(0, 0, 'backyard');
this.pet = this.game.add.sprite(100, 400, 'pet');
this.pet.anchor.setTo(0.5);
//custom properties
this.pet.customParams = {health: 100, fun: 100};
this.apple = this.game.add.sprite(72, 570, 'apple');
this.candy = this.game.add.sprite(144, 570, 'candy');
this.toy = this.game.add.sprite(216, 570, 'toy');
this.rotate = this.game.add.sprite(288, 570, 'rotate');
},
};
//initiate the Phaser framework
var game = new Phaser.Game(360, 640, Phaser.AUTO);
game.state.add('GameState', GameState);
game.state.add('HomeState', HomeState);
game.state.add('PreloadState', PreloadState);
game.state.add('BootState', BootState);
game.state.start('GameState');<file_sep>/M11/js/states/Game.js
var HTown = HTown || {};
HTown.GameState = {
init: function() {
//game constants
this.STEP = 2;
//no gravity in a top-down game
this.game.physics.arcade.gravity.y = 0;
},
create: function() {
//grass floor
this.background = this.add.tileSprite(0,0, 1200, 800, 'grass');
this.game.world.setBounds(0, 0, 1200, 800);
this.buildings = this.add.group();
var house = new HTown.Building(this, 100, 100, {asset: 'house', housing: 100});
this.buildings.add(house);
var farm = new HTown.Building(this, 200, 200, {asset: 'crops', food: 2200});
this.buildings.add(farm);
var factory = new HTown.Building(this, 300, 300, {asset: 'factory', jobs: 20});
this.buildings.add(factory);
//create town
this.town = new HTown.TownModel({}, {population: 100, food: 200, money: 100}, this.buildings);
//update simulation
this.simulationTimer = this.game.time.events.loop(Phaser.Timer.SECOND * this.STEP, this.simulationStep, this);
this.initGui();
this.refreshStats();
},
update: function() {
//check if dragging is not blocked
if(!this.isDraggingMapBlock){
//start dragging
if(!this.isDraggingMap){
if(this.game.input.activePointer.isDown){
this.isDraggingMap = true;
this.startDragPoint = {};
this.startDragPoint.x = this.game.input.activePointer.position.x;
this.startDragPoint.y = this.game.input.activePointer.position.y;
}
}else{
this.endDragPoint = {};
this.endDragPoint.x = this.game.input.activePointer.position.x;
this.endDragPoint.y = this.game.input.activePointer.position.y;
this.game.camera.x += this.startDragPoint.x - this.endDragPoint.x;
this.game.camera.y += this.startDragPoint.y - this.endDragPoint.y;
//after update take new starting point
this.startDragPoint.x = this.game.input.activePointer.position.x;
this.startDragPoint.y = this.game.input.activePointer.position.y;
//stop dragging map when active pointer is released
if(this.game.input.activePointer.isUp){
this.isDraggingMap = false;
}
}
}
if(this.isBuildingBtnActive && this.game.input.activePointer.isDown){
//we can no longer drag the map
this.isDraggingMapBlock = true;
//start dragging a shadow building
this.isDraggingBuilding = true;
}
if(this.isDraggingBuilding){
var pointerWX = this.game.input.activePointer.worldX;
var pointerWY = this.game.input.activePointer.worldY;
if(!this.shadowBuilding || !this.shadowBuilding.alive){
this.shadowBuilding = this.add.sprite(pointerWX, pointerWY, this.selectedBuilding.asset);
this.shadowBuilding.alpha = 0.5;
this.shadowBuilding.anchor.setTo(0.5);
//enable physics
this.game.physics.arcade.enable(this.shadowBuilding);
}
this.shadowBuilding.x = pointerWX;
this.shadowBuilding.y = pointerWY;
}
if(this.isDraggingBuilding && this.game.input.activePointer.isUp){
if(this.canBuild()){
this.town.stats.money -= this.selectedBuilding.cost;
this.createBuilding(this.game.input.activePointer.worldX, this.game.input.activePointer.worldY, this.selectedBuilding);
}
this.clearSelection();
}
},
simulationStep: function(){
this.town.step();
this.refreshStats();
},
initGui: function(){
//money
this.moneyIcon = this.add.sprite(10, 10, 'money');
this.moneyIcon.fixedToCamera = true;
var style = {font: '14 px Arial', fill: '#fff'};
this.moneyLabel = this.add.text(45, 15, '0', style);
this.moneyLabel.fixedToCamera = true;
//food
this.foodIcon = this.add.sprite(100, 10, 'food');
this.foodIcon.fixedToCamera = true;
this.foodLabel = this.add.text(135, 15, '0', style);
this.foodLabel.fixedToCamera = true;
//population
this.populationIcon = this.add.sprite(190, 10, 'population');
this.populationIcon.fixedToCamera = true;
var style = {font: '14 px Arial', fill: '#fff'};
this.populationLabel = this.add.text(225, 15, '0', style);
this.populationLabel.fixedToCamera = true;
//jobs
this.jobsIcon = this.add.sprite(280, 10, 'jobs');
this.jobsIcon.fixedToCamera = true;
var style = {font: '14 px Arial', fill: '#fff'};
this.jobsLabel = this.add.text(315, 15, '0', style);
this.jobsLabel.fixedToCamera = true;
//button data
this.buttonData = JSON.parse(this.game.cache.getText('buttonData'));
//buttons group
this.buttons = this.add.group();
var button;
this.buttonData.forEach(function(element, index){
button = new Phaser.Button(this.game, this.game.width - 60 - 60 * index, this.game.height - 60, element.btnAsset, this.clickBuildBtn, this);
button.fixedToCamera = true;
this.buttons.add(button);
//pass data of the button
button.buildingData = element;
}, this);
},
refreshStats: function(){
this.moneyLabel.text = Math.round(this.town.stats.money);
this.foodLabel.text = Math.round(this.town.stats.food);
this.jobsLabel.text = Math.round(this.town.stats.jobs);
this.populationLabel.text = Math.round(this.town.stats.population) + '/' + Math.round(this.town.stats.housing);
},
clickBuildBtn: function(button){
this.clearSelection();
//check if user can afford the building
if(this.town.stats.money >= button.buildingData.cost){
button.alpha = 0.5;
this.selectedBuilding = button.buildingData;
this.isBuildingBtnActive = true;
}
},
clearSelection: function(){
this.isDraggingMapBlock = false;
this.isDraggingMap = false;
this.isBuildingBtnActive = false;
this.selectedBuilding = null;
this.isDraggingBuilding = false;
if(this.shadowBuilding){
this.shadowBuilding.kill();
}
// this.refreshStats();
this.buttons.setAll('alpha', 1);
},
createBuilding: function(x, y, data){
var newBuilding = new HTown.Building(this, x, y, data);
this.buildings.add(newBuilding);
},
canBuild: function(){
var isOverlappingBuildings = this.game.physics.arcade.overlap(this.shadowBuilding, this.buildings);
return !isOverlappingBuildings;
}
};
<file_sep>/Tutorial files/M8-Veggies-vs-Zombies-updated-2015-07-03/09 sun prefab and expiration/js/prefabs/Sun.js
var Veggies = Veggies || {};
Veggies.Sun = function(state, x, y) {
Phaser.Sprite.call(this, state.game, x, y, 'sun');
this.state = state;
this.game = state.game;
//init physics body
this.game.physics.arcade.enable(this);
this.animations.add('shine', [0,1,0], 10, true);
this.play('shine');
//some default values
this.anchor.setTo(0.5);
//collect suns when tapping
this.inputEnabled = true;
this.input.pixelPerfectClick = true;
this.events.onInputDown.add(function(){
console.log('sun collected');
this.kill();
}, this);
//expiration of the sun
this.sunExpirationTimer = this.game.time.create(false);
this.reset(x, y);
};
Veggies.Sun.prototype = Object.create(Phaser.Sprite.prototype);
Veggies.Sun.prototype.constructor = Veggies.Sun;
Veggies.Sun.prototype.scheduleExpiration = function(){
this.sunExpirationTimer.start();
//random expiration time between 2 and 6 seconds
var expirationTime = 2 + Math.random() * 4;
this.sunExpirationTimer.add(Phaser.Timer.SECOND * expirationTime, function(){
this.kill();
}, this);
};
Veggies.Sun.prototype.kill = function() {
this.sunExpirationTimer.stop();
Phaser.Sprite.prototype.kill.call(this);
};
Veggies.Sun.prototype.reset = function(x, y) {
Phaser.Sprite.prototype.reset.call(this, x, y);
this.scheduleExpiration();
}
<file_sep>/Tutorial files/M14-Hexagon-Strategy-Game/12-enemy-ai/js/prefabs/Unit.js
var HexGame = HexGame || {};
HexGame.Unit = function(state, data) {
var position = state.board.getXYFromRowCol(data.row, data.col);
Phaser.Sprite.call(this, state.game, position.x, position.y, data.asset);
this.game = state.game;
this.state = state;
this.board = state.board;
this.row = data.row;
this.col = data.col;
this.data = data;
this.anchor.setTo(0.5);
//this.inputEnabled = true;
//this.input.pixelPerfectClick = true;
//this.events.onInputDown.add(this.showMovementOptions, this);
};
HexGame.Unit.prototype = Object.create(Phaser.Sprite.prototype);
HexGame.Unit.prototype.constructor = HexGame.Unit;
HexGame.Unit.prototype.showMovementOptions = function(){
this.state.clearSelection();
//only if the UI is free
if(this.state.uiBlocked) {
return;
}
//get current tile
var currTile = this.board.getFromRowCol(this.row, this.col);
//get the adjacent cells
var adjacentCells = this.board.getAdjacent(currTile, true);
adjacentCells.forEach(function(tile){
tile.alpha = 0.7;
//add input
tile.events.onInputDown.add(this.moveUnit, this);
}, this);
};
HexGame.Unit.prototype.moveUnit = function(tile){
this.state.clearSelection();
this.state.uiBlocked = true;
//target position
var pos = this.board.getXYFromRowCol(tile.row, tile.col);
var unitMovement = this.game.add.tween(this);
unitMovement.to(pos, 200);
unitMovement.onComplete.add(function(){
this.state.uiBlocked = false;
this.row = tile.row;
this.col = tile.col;
//check for battles
this.checkBattle();
//check for game ending
//prepare the next unit
this.state.prepareNextUnit();
}, this);
unitMovement.start();
};
HexGame.Unit.prototype.attack = function(attacked) {
var attacker = this;
//both units attack each other
var damageAttacked = Math.max(0, attacker.data.attack * Math.random() - attacked.data.defense * Math.random());
var damageAttacker = Math.max(0, attacked.data.attack * Math.random() - attacker.data.defense * Math.random());
attacked.data.health -= damageAttacked;
attacker.data.health -= damageAttacker;
if(attacked.data.health <= 0) {
attacked.kill();
}
if(attacker.data.health <= 0) {
attacker.kill();
}
};
HexGame.Unit.prototype.checkBattle = function() {
//get rival army
var rivalUnits = this.isPlayer ? this.state.enemyUnits : this.state.playerUnits;
var fightUnit;
//check rival army units to find a match
rivalUnits.forEachAlive(function(unit){
if(this.row === unit.row && this.col === unit.col) {
console.log('both are in the same cell! -- fight!!!');
fightUnit = unit;
}
}, this);
//fight until death
if(fightUnit) {
while(this.data.health >= 0 && fightUnit.data.health >= 0) {
this.attack(fightUnit);
}
console.log('battle end');
}
};
HexGame.Unit.prototype.playTurn = function() {
if(this.isPlayer) {
this.showMovementOptions();
}
else {
this.aiEnemyMovement();
}
};
HexGame.Unit.prototype.aiEnemyMovement = function() {
//clear previous selection
this.state.clearSelection();
//get the current tile
var currTile = this.board.getFromRowCol(this.row, this.col);
//get the adjacent cells
var adjacentCells = this.board.getAdjacent(currTile, true);
//target tile
var targetTile;
//go through each adjacent cell and find a rival
adjacentCells.forEach(function(tile){
//find out if there is a rival in there
this.state.playerUnits.forEachAlive(function(unit){
if(tile.row === unit.row && tile.col === unit.col) {
console.log('we have found a rival to attack');
targetTile = tile;
}
}, this);
}, this);
//if you didnt find a rival, then move randomly
if(!targetTile) {
var randomIndex = Math.floor(Math.random() * adjacentCells.length);
targetTile = adjacentCells[randomIndex];
}
//move to the target
this.moveUnit(targetTile);
}; | b118c19e47e7b1dedd7c9ee557bec9043281d18f | [
"JavaScript"
] | 26 | JavaScript | svinotavr/JavascriptPhaserTutorials | 265bc4458b3e7cd8b59ef6328a17fb6723d65e34 | 071b5b304034d0449a494eb31e9d6ebdc60849f8 | |
refs/heads/main | <file_sep>perct: int = int(input('Введите количество процентов от 0 до 20: '))
a = [1]
b = [2, 3, 4]
c = [i for i in range(5, 20+1)]
d = [0]
if perct in a:
print(perct, 'Процент')
elif perct in b:
print(perct, 'Процента')
elif perct in c:
print(perct, 'Процентов')
elif perct in d:
print(perct, 'Процентов')
<file_sep>duration = int(input('Введите время в секундах: '))
days = duration // 3600 // 24
hours: int = duration // 3600 - days * 24
minutes = duration // 60 % 60
seconds = duration % 60
print(days, "дн,", hours, "час,", minutes, "мин,", seconds, "сек.", sep=" ")<file_sep>nums = []
summ1 = 0
summ2 = 0
for i in range(1, 1001):
if i % 2 != 0:
nums.append(i ** 3) # создаём список кубов нечётных чисел от 1 до 1000
for idx in range(len(nums)):# идём по индексам этого списка
num_sum = 0
j = nums[idx] # берём очередное число из этого списка
while j:
num_sum += j % 10# считаем для этого числа сумму цифр
j = j // 10
if num_sum % 7 == 0:# если эта сумма цифр делится на 7
summ1 += nums[idx] # то добавляем число в первую сумму
nums[idx] += 17 # прибавляем к этому числу 17, не создавая новый список
num_sum = 0
j = nums[idx]
while j:
num_sum += j % 10 # вычисляем для этого нового числа сумму цифр
j = j // 10
if num_sum % 7 == 0: # если эта новая сумма цифр делится на 7
summ2 += nums[idx]# то добавляем это новое число во вторую сумму
print(summ1)
print(summ2)
| 31e546883776518ab1b2396ec9801bf7c733739b | [
"Python"
] | 3 | Python | CaptainJack79/Saltykov_Evgeniy_Task_1 | a73a4bbf228298db01d64b955318fb9c62b034df | 10bcd233c1f3487e0fa96b28dae52b5b8d8964e7 | |
refs/heads/master | <file_sep>from __future__ import print_function, division
import numpy as np
class Tuning():
"""
Equal temperament tuning - allows to convert between frequency and pitch.
- unit pitch space
- continous, unbounded
- 1.0 ~ one octave
- step pitch space
- continous, unbounded
- N steps ~ one octave
- unit pitch space * N
- unit pitch class space
- continous, bounded [0, 1.0)
- unit pitch space % 1.0
- step pitch class space
- continous, bounded [0, N)
- unit step pitch space % N
- integer step pitch space
- discrete, unbounded
- floor(step pitch space)
- integer step pitch class space
- discrete, bounded {0, 1, .. N - 1}
- floor(step pitch class space)
"""
def __init__(self, base_freq=440, steps_per_octave=12, octave_ratio=2):
self.base_freq = base_freq
self.steps_per_octave = steps_per_octave
self.octave_ratio = octave_ratio
def pitch_to_freq(self, pitch):
factor = self.pitch_to_relative_freq(pitch)
return factor * self.base_freq
def freq_to_pitch(self, freq):
rel_freq = freq / self.base_freq
if self.octave_ratio == 2:
p = np.log2(rel_freq)
else:
p = np.log(rel_freq) / np.log(2)
return p * self.steps_per_octave
def pitch_to_relative_freq(self, pitch):
return pow(self.octave_ratio, pitch / self.steps_per_octave)
class PitchQuantizer():
def __init__(self, tuning, bin_division=1):
self.tuning = tuning
self.bin_division = bin_division
def quantize(self, freqs):
"""
Quantizes frequencies to nearest pitch bins (with optional division of
bins).
"""
return np.round(self.tuning.freq_to_pitch(freqs) * self.bin_division) / self.bin_division
<file_sep>from __future__ import print_function, division
import math
import os
import numpy as np
import scipy
from .spectrogram import db_scale, positive_freq_magnitudes, \
select_positive_freq_fft, fftfreqs, normalized_window, scale_magnitudes
from .signal import SignalFrames
from .tuning import PitchQuantizer, Tuning
from .plots import save_raw_spectrogram_bitmap
class LinearTransform():
def __init__(self, positive_only=True):
# range of normalized frequencies
self.bin_range = (0, 0.5) if positive_only else (0, 1)
def transform_freqs(self, X_inst_freqs, sample_rate):
output_bin_count = X_inst_freqs.shape[1]
X_y = X_inst_freqs
return X_y, output_bin_count, self.bin_range
class PitchTransform():
"""
Perform the proper quantization to pitch bins according to possible
subdivision before the actual histogram computation. Still we need to
move the quantized pitch value a bit from the lower bin edge to ensure
proper floating point comparison. Note that the quantizer rounds values
from both sides towards the quantized value, while histogram2d floors the
values to the lower bin edge. The epsilon is there to prevent log of 0
in the pitch to frequency transformation.
bin_range: range of pitch bins (default: A0 27.5 Hz to E10 21096.16 Hz)
"""
def __init__(self, bin_range=(-48, 67), bin_division=1, tuning=Tuning()):
self.tuning = tuning
self.bin_range = bin_range
self.bin_division = bin_division
def transform_freqs(self, X_inst_freqs, sample_rate):
quantization_border = 1 / (2 * self.bin_division)
pitch_quantizer = PitchQuantizer(self.tuning, bin_division=self.bin_division)
eps = np.finfo(np.float32).eps
# TODO: is it possible to quantize using relative freqs to avoid
# dependency on the fs parameter?
X_y = pitch_quantizer.quantize(np.maximum(sample_rate * X_inst_freqs, eps) + quantization_border)
output_bin_count = (self.bin_range[1] - self.bin_range[0]) * self.bin_division
return X_y, output_bin_count, self.bin_range
class Spectrogram():
"""
Represents spectrogram information of a time-domain signal which can be used
to compute various types of reassigned spectrograms, pitchgrams, etc.
"""
def __init__(self, signal_frames, window=scipy.hanning, positive_only=True):
"""
:param signal_frames: signal represented as SignalFrames instance
:param window: STFT window function - produces 1D window which will
be normalized
"""
self.signal_frames = signal_frames
x_frames = signal_frames.frames
w = normalized_window(window(signal_frames.frame_size))
# complex spectra of windowed blocks of signal - STFT
self.X_complex = np.fft.fft(x_frames * w)
# linear magnitude spectrogram
self.X_mag = abs(self.X_complex) / self.X_complex.shape[1]
# spectra of signal shifted in time
# This fakes looking at the previous frame shifted by one sample.
# In order to work only with one frame of size N and not N + 1, we fill the
# missing value with zero. This should not introduce a large error, since the
# borders of the amplitude frame will go to zero anyway due to applying a
# window function in the STFT tranform.
X_prev_time = np.fft.fft(shift_right(x_frames) * w)
# spectra shifted in frequency
X_prev_freq = shift_right(self.X_complex)
# cross-spectra - ie. spectra of cross-correlation between the
# respective time-domain signals
X_cross_time = cross_spectrum(self.X_complex, X_prev_time)
X_cross_freq = cross_spectrum(self.X_complex, X_prev_freq)
# instantaneous frequency estimates
# normalized frequencies in range [0.0, 1.0] - from DC to sample rate
self.X_inst_freqs = estimate_instant_freqs(X_cross_time)
# instantaneous group delay estimates
# relative coordinates within the frame with range [-0.5, 0.5] where
# 0.0 is the frame center
self.X_group_delays = estimate_group_delays(X_cross_freq)
if positive_only:
self.X_mag = positive_freq_magnitudes(self.X_mag)
self.X_complex, self.X_inst_freqs, self.X_group_delays = [
select_positive_freq_fft(values) for values in
[self.X_complex, self.X_inst_freqs, self.X_group_delays]
]
def reassigned(
self,
output_frame_size=None, transform=LinearTransform(),
reassign_time=True, reassign_frequency=True, magnitudes='power_db'):
"""
Reassigned spectrogram requantized both in frequency and time.
Note it is quantized into non-overlapping output time frames which may be
of a different size than input time frames.
transform - transforms the frequencies
"""
if output_frame_size is None:
output_frame_size = self.signal_frames.hop_size
frame_size = self.signal_frames.frame_size
fs = self.signal_frames.sample_rate
frame_duration = frame_size / fs
frame_center_time = frame_duration / 2
# group delays are in range [-0.5, 0.5] - relative coordinates within the
# frame where 0.0 is the frame center
input_bin_count = self.X_inst_freqs.shape[1]
eps = np.finfo(np.float32).eps
X_time = np.tile(self.signal_frames.start_times + frame_center_time +
eps, (input_bin_count, 1)).T
if reassign_time:
X_time += self.X_group_delays * frame_duration
if reassign_frequency:
X_y = self.X_inst_freqs
else:
X_y = np.tile(fftfreqs(frame_size, fs) / fs, (self.X_inst_freqs.shape[0], 1))
X_y, output_bin_count, bin_range = transform.transform_freqs(X_y,
self.signal_frames.sample_rate)
frame_duration = frame_size / fs
end_input_time = self.signal_frames.duration
output_frame_count = int(math.ceil((end_input_time * fs) / output_frame_size))
print('output_frame_count', output_frame_count)
time_range = (0, output_frame_count * output_frame_size / fs)
output_shape = (output_frame_count, output_bin_count)
X_spectrogram, x_edges, y_edges = np.histogram2d(
X_time.flatten(), X_y.flatten(),
weights=self.X_mag.flatten(),
range=(time_range, bin_range),
bins=output_shape)
X_spectrogram = scale_magnitudes(X_spectrogram, magnitudes)
return X_spectrogram
def cross_spectrum(spectrumA, spectrumB):
"""
Returns a cross-spectrum, ie. spectrum of cross-correlation of two signals.
This result does not depend on the order of the arguments.
Since we already have the spectra of signals A and B and and want the
spectrum of their cross-correlation, we can replace convolution in time
domain with multiplication in frequency domain.
"""
return spectrumA * spectrumB.conj()
def shift_right(values):
"""
Shifts the array to the right by one place, filling the empty values with
zeros.
TODO: use np.roll()
"""
# TODO: this fails for 1D input array!
return np.hstack([np.zeros((values.shape[0], 1)), values[..., :-1]])
def arg(values):
"""
Argument (angle) of complex numbers wrapped and scaled to [0.0, 1.0].
input: an array of complex numbers
output: an array of real numbers of the same shape
np.angle() returns values in range [-np.pi, np.pi].
"""
return np.mod(np.angle(values) / (2 * np.pi), 1.0)
def estimate_instant_freqs(crossTimeSpectrum):
"""
Channelized instantaneous frequency - the vector of simultaneous
instantaneous frequencies computed over a single frame of the digital
short-time Fourier transform.
Instantaneous frequency - derivative of phase by time.
cif = angle(crossSpectrumTime) * sampleRate / (2 * pi)
In this case the return value is normalized (not multiplied by sampleRate)
to the [0.0; 1.0] interval, instead of absolute [0.0; sampleRate].
"""
return arg(crossTimeSpectrum)
def estimate_group_delays(crossFreqSpectrum):
"range: [-0.5, 0.5]"
return 0.5 - arg(crossFreqSpectrum)
def process_spectrogram(filename, frame_size, hop_size, output_frame_size):
"""
Computes three types of spectrograms (normal, frequency reassigned,
time-frequency reassigned) from an audio file and stores and image from each
spectrogram into PNG file.
"""
signal_frames = SignalFrames(filename, frame_size, hop_size, mono_mix=True)
spectrogram = Spectrogram(signal_frames)
image_filename = os.path.basename(filename).replace('.wav', '')
# STFT on overlapping input frames
X_stft = db_scale(spectrogram.X_mag ** 2)
save_raw_spectrogram_bitmap(image_filename + '_stft_frames.png', X_stft)
linear_transform = LinearTransform(positive_only=True)
# STFT requantized to the output frames (no reassignment)
X_stft_requantized = spectrogram.reassigned(output_frame_size,
linear_transform,
reassign_time=False, reassign_frequency=False)
save_raw_spectrogram_bitmap(image_filename + '_stft_requantized.png', X_stft_requantized)
# STFT reassigned in time and requantized to output frames
X_reassigned_t = spectrogram.reassigned(output_frame_size,
linear_transform,
reassign_time=True, reassign_frequency=False)
save_raw_spectrogram_bitmap(image_filename + '_reassigned_t.png', X_reassigned_t)
# STFT reassigned in frequency and requantized to output frames
X_reassigned_f = spectrogram.reassigned(output_frame_size,
linear_transform,
reassign_time=False, reassign_frequency=True)
save_raw_spectrogram_bitmap(image_filename + '_reassigned_f.png', X_reassigned_f)
# STFT reassigned both in time and frequency and requantized to output frames
X_reassigned_tf = spectrogram.reassigned(output_frame_size,
linear_transform,
reassign_time=True, reassign_frequency=True)
save_raw_spectrogram_bitmap(image_filename + '_reassigned_tf.png', X_reassigned_tf)
pitch_transform = PitchTransform(bin_range=(-48, 67), bin_division=1)
# TF-reassigned pitchgram
X_pitchgram_tf = spectrogram.reassigned(output_frame_size,
pitch_transform,
reassign_time=True, reassign_frequency=True)
save_raw_spectrogram_bitmap(image_filename + '_pitchgram_tf.png', X_pitchgram_tf)
# T-reassigned pitchgram
X_pitchgram_t = spectrogram.reassigned(output_frame_size,
pitch_transform,
reassign_time=True, reassign_frequency=False)
save_raw_spectrogram_bitmap(image_filename + '_pitchgram_t.png', X_pitchgram_t)
# F-reassigned pitchgram
X_pitchgram_t = spectrogram.reassigned(output_frame_size,
pitch_transform,
reassign_time=False, reassign_frequency=True)
save_raw_spectrogram_bitmap(image_filename + '_pitchgram_f.png', X_pitchgram_t)
# non-reassigned pitchgram
X_pitchgram = spectrogram.reassigned(output_frame_size,
pitch_transform,
reassign_time=False, reassign_frequency=False)
save_raw_spectrogram_bitmap(image_filename + '_pitchgram_no.png', X_pitchgram)
def reassigned_spectrogram(signal_frames, output_frame_size=None, magnitudes='power_db',
reassign_time=True, reassign_frequency=True):
"""
From frames of audio signal it computes the frequency reassigned spectrogram
requantized back to the original linear bins.
Only the real half of spectrum is given.
"""
return Spectrogram(signal_frames).reassigned(
output_frame_size, LinearTransform(),
reassign_time, reassign_frequency, magnitudes=magnitudes)
# [-48,67) -> [~27.5, 21096.2) Hz
def pitchgram(signal_frames, output_frame_size=None, bin_range=(-48, 67), bin_division=1, magnitudes='power_db'):
"""
From frames of audio signal it computes the frequency reassigned spectrogram
requantized to pitch bins (pitchgram).
"""
return Spectrogram(signal_frames).reassigned(
output_frame_size, PitchTransform(bin_range, bin_division), magnitudes=magnitudes)
if __name__ == '__main__':
import sys
process_spectrogram(filename=sys.argv[1], frame_size=4096, hop_size=1024, output_frame_size=1024)
<file_sep>from __future__ import print_function, division
import numpy as np
import math
import scipy
from .signal import mean_power
from .signal import SignalFrames
def spectrogram(filename, frame_size=2048, hop_size=512, magnitudes='power_db'):
"""
Computes an STFT magnitude power spectrogram from an audio file.
Returns: spectrogram, audio_samples, frame_times
"""
signal_frames = SignalFrames(filename, frame_size, hop_size, mono_mix=True)
x = signal_frames.frames
times = signal_frames.start_times
w = create_window(frame_size)
X = stft_spectrogram(x, w, magnitudes)
return X, x, times
def stft_spectrogram(x, w, magnitudes):
"""
Computes an STFT magnitude power spectrogram from an array of samples
already cut to frames.
Input:
- x - time-domain samples - array of shape (frames, frame_size)
- w - window - array of shape (frame_size)
- magnitudes - indicates whether to scale the
Output: spectrogram
"""
X = magnitude_spectrum(x * w) ** 2
if magnitudes:
X = db_scale(X)
return X
def magnitude_spectrum(x):
'''
Magnitude spectrum scaled so that each bin corresponds to the original sine
amplitude. Only the real part of the spectrum is returned.
x - 1D sampled signal (possibly already windowed)
For signal in range [-1., 1.] the output range is [0., 1.].
The energy is not preserved, it's scaled down
(energy_out = energy_in / (N//2)).
'''
X = np.fft.fft(x)
Xr = positive_freq_magnitudes(X)
N = Xr.shape[-1]
return abs(Xr) / N
def select_positive_freq_fft(X):
"""
Select the positive frequency part of the spectrum in a spectrogram.
"""
N = X.shape[1]
return X[:, :N//2]
# TODO: we should probably multiply the whole result by 2, to conserve energy
def positive_freq_magnitudes(X):
"""
Select magnitudes from positive-frequency half of the spectrum in a
spectrogram. The DC term shared for positive and negative frequencies is
halved.
Note this is not a complete information to reconstruct the full spectrum,
since we throw away the bin at the negative Nyquist frequency (index N/2+1).
"""
X_pos = select_positive_freq_fft(X).copy()
X_pos[:, 0] *= 0.5
return X_pos
def create_window(size):
"""
A normalized Hanning window of given size. Useful for analyzing sinusoidal
signals.
"""
return normalized_window(scipy.hanning(size))
def normalized_window(w):
"""
Normalizes an FFT window so that it has energy equal to its length, and mean
power equal to 1.0.
"""
return w / mean_power(w)
def db_scale(magnitude_spectrum, normalized=False):
"""
Transform linear magnitude to dbFS (full-scale) [-120, 0] (for input range
[0.0, 1.0]) which can be optionally normalized to [0.0, 1.0].
"""
scaled = 20 * np.log10(np.maximum(1e-6, magnitude_spectrum))
# map from raw dB [-120.0, 0] to [0.0, 1.0]
if normalized:
scaled = (scaled / 120) + 1
return scaled
def scale_magnitudes(X_mag, transform):
if transform == 'linear':
return X_mag
elif transform == 'power':
return X_mag ** 2
elif transform == 'power_db':
return db_scale(X_mag ** 2)
elif transform == 'power_db_normalized':
return db_scale(X_mag ** 2, normalized=True)
else:
raise ValueError('Unknown magnitude scaling transform ' + transform)
# -- extras --
def energy_weighted_spectrum(x):
N = x.shape[-1]
X = np.fft.fft(x)
# np.allclose(energy(abs(X) / math.sqrt(N)), energy(x))
# np.allclose(energy(abs(X[:N//2]) / math.sqrt(N//2)), energy(x))
return abs(X) / math.sqrt(N)
def fftfreqs(frame_size, fs):
"""
Positive FFT frequencies from DC (incl.) until Nyquist (excl.).
The size of half of the FTT size.
"""
return np.fft.fftfreq(frame_size, 1/fs)[:frame_size // 2]
def inverse_spectrum(spectrum, window):
'''
inverse_spectrum(np.fft.fft(x * window), window) == x
'''
return np.real(np.fft.ifft(spectrum)) / window
<file_sep>from __future__ import print_function, division
import numpy as np
import os
from tfr import SignalFrames, Tuning, pitchgram, reassigned_spectrogram
from tfr.reassignment import shift_right, arg
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
def test_shift_right():
assert np.allclose(shift_right(np.array([[1, 2, 3]])), np.array([0, 1, 2]))
def test_pitchgram_on_single_tone_should_have_peak_at_that_tone():
pitch = 12 + 7 # G5
f = Tuning().pitch_to_freq(pitch)
fs = 44100
x = sine(sample_time(0, 1, fs=fs), freq=f)
frame_size = 4096
hop_size = 2048
output_frame_size = hop_size
signal_frames = SignalFrames(x, frame_size, hop_size, sample_rate=fs, mono_mix=True)
bin_range = [-48, 67]
x_pitchgram = pitchgram(signal_frames,
output_frame_size, magnitudes='power_db', bin_range=bin_range, bin_division=1)
max_bin_expected = pitch - bin_range[0]
max_bin_actual = x_pitchgram.mean(axis=0).argmax()
assert x_pitchgram.shape == (22, 115), x_pitchgram.shape
assert max_bin_actual == max_bin_expected
def test_arg():
values = np.array([-5.-1.j, -1.-5.j, 2.-2.j, 3.+4.j, 2.+0.j, 2.-5.j, -3.-3.j,
-3.+1.j, -2.-5.j, 0.+2.j])
args = arg(values)
expected_args=np.array([0.53141648, 0.71858352, 0.875 , 0.14758362, 0.,
0.81055947, 0.625 , 0.44879181, 0.68944053, 0.25])
assert np.allclose(args, expected_args)
def test_reassigned_spectrogram_values_should_be_in_proper_range():
frame_size = 4096
hop_size = frame_size
output_frame_size = 1024
audio_file = os.path.join(DATA_DIR, 'she_brings_to_me.wav')
signal_frames = SignalFrames(audio_file, frame_size, hop_size, mono_mix=True)
X_r = reassigned_spectrogram(signal_frames, output_frame_size, magnitudes='power_db')
assert np.all(X_r >= -120), 'min value: %f should be >= -120' % X_r.min()
assert np.all(X_r <= 0), 'max value: %f should be <= 0' % X_r.max()
def test_reassigned_pitchgram_values_should_be_in_proper_range():
frame_size = 4096
hop_size = frame_size
output_frame_size = 1024
audio_file = os.path.join(DATA_DIR, 'she_brings_to_me.wav')
signal_frames = SignalFrames(audio_file, frame_size, hop_size, mono_mix=True)
X_r = pitchgram(signal_frames, output_frame_size, magnitudes='power_db')
assert np.all(X_r >= -120), 'min value: %f should be >= -120' % X_r.min()
assert np.all(X_r <= 0), 'max value: %f should be <= 0' % X_r.max()
# --- helper functions ---
def sample_time(since, until, fs=44100.):
'''
Generates time sample in given interval [since; until]
with given sampling rate (fs).
'''
return np.arange(since, until, 1. / fs)
def sine(t, freq=1., amplitude=1., phase=0.):
'''
Samples the sine function given the time samples t,
frequency (Hz), amplitude and phase [0; 2 * np.pi).
'''
return amplitude * np.sin(2 * np.pi * freq * t + phase)
<file_sep>from __future__ import absolute_import
from .reassignment import Spectrogram, LinearTransform, PitchTransform, \
reassigned_spectrogram, pitchgram
from .signal import SignalFrames
from .sklearn import PitchgramTransformer
from .tuning import Tuning, PitchQuantizer
<file_sep>from __future__ import print_function, division
import numpy as np
from tfr import SignalFrames
def test_split_to_frames():
signal_frames = SignalFrames(np.arange(23), frame_size=8, hop_size=6,
sample_rate=44100)
assert np.allclose(np.array([
[ 0, 1, 2, 3, 4, 5, 6, 7],
[ 6, 7, 8, 9, 10, 11, 12, 13],
[12, 13, 14, 15, 16, 17, 18, 19],
[18, 19, 20, 21, 22, 0, 0, 0],
]), signal_frames.frames)
<file_sep>import numpy as np
import scipy.misc
import sys
def save_raw_spectrogram_bitmap(file_name, spectrogram):
# input:
# rows = frequency bins (low to high)
# columns = time
# output:
# rows = frequency bins (bottom to top)
# columns = time (left to right)
scipy.misc.imsave(file_name, spectrogram.T[::-1])
def spectrogram_to_image(npz_file):
save_raw_spectrogram_bitmap(npz_file + '.png', np.load(npz_file)['arr_0'])
if __name__ == '__main__':
spectrogram_to_image(sys.argv[1])
<file_sep>from __future__ import absolute_import
from sklearn.base import BaseEstimator, TransformerMixin
from .signal import SignalFrames
from .reassignment import pitchgram
class PitchgramTransformer(BaseEstimator, TransformerMixin):
def __init__(self, sample_rate=44100, frame_size=4096, hop_size=2048,
output_frame_size=None,
bin_range=[-48, 67], bin_division=1):
self.sample_rate = sample_rate
self.frame_size = frame_size
self.hop_size = hop_size
# if no output frame size is specified the input hop size is the default
self.output_frame_size = output_frame_size if output_frame_size is not None else hop_size
self.bin_range = bin_range
self.bin_division = bin_division
def transform(self, X, **transform_params):
"""
Transforms audio clip X into a normalized pitchgram.
Input: X - mono audio clip - numpy array of shape (samples,)
Output: X_pitchgram - numpy array of shape (frames, bins)
"""
signal_frames = SignalFrames(X, self.frame_size, self.hop_size,
self.sample_rate, mono_mix=True)
X_pitchgram = pitchgram(
signal_frames,
self.output_frame_size,
magnitudes='power_db_normalized',
bin_range=self.bin_range,
bin_division=self.bin_division)
return X_pitchgram
def fit(self, X, y=None, **fit_params):
return self
<file_sep>for spec_type in stft reassigned pitchgram; do
python -m tfr.spectrogram_features \
she_brings_to_me.wav she_brings_to_me_$spec_type.npz \
-t $spec_type -b 4096 -p 2048 --output-frame-size=2048
done
<file_sep>from __future__ import print_function, division
import math
import numpy as np
import soundfile as sf
class SignalFrames():
"""
Represents frames of time-domain signal of regular size with possible
overlap plus it's metadata.
The signal can be read from an numpy array a file via the soundfile library.
The input array can be of shape `(samples,)` or `(samples, channels)`. By
default the signal is mixed to mono to shape `(samples,)`. This can be
disabled by specifying `mono_mix=False`.
It is split into frames of `frame_size`. In case `hop_size < frame_size` the
frames are overlapping. When the last frame is not fully covered by the
signal it's padded with zeros.
When reading signal from a file the sample rate can usually determined
automatically, otherwise you shoud provide `sample_rate`.
Attributes:
- `frames` - signal split to frame, shape `(frames, frame_size)`
- `frame_size`
- `hop_size`
- `length` - length of the source signal (in samples)
- `duration` - duration of the source signal (in seconds)
- `start_times` - array of start times of each frame (in seconds)
Example usage:
```
signal_frames = SignalFrames('audio.flac', frame_size=4096, hop_size=1024)
spectrogram = np.fft.fft(signal_frames.frames * window)
```
:param source: source of the time-domain signal - numpy array, file name,
file-like object
:param frame_size: size of each frame (in samples)
:param hop_size: hop between frame starts (in samples)
:param sample_rate: sample rate (required when source is an array)
:mono_mix: indicates that multi-channel signal should be mixed to mono
(mean of all channels)
"""
def __init__(self, source, frame_size=4096, hop_size=2048, sample_rate=None,
mono_mix=True):
if type(source) == np.ndarray:
signal = source
self.sample_rate = sample_rate
else:
signal, self.sample_rate = sf.read(source)
if mono_mix:
signal = self._to_mono(signal)
self.frames = self._split_to_frames(signal, frame_size, hop_size)
self.frame_size = frame_size
self.hop_size = hop_size
self.length = len(signal)
self.duration = self.length / self.sample_rate
self.start_times = np.arange(0, self.duration, self.hop_size / self.sample_rate)
def _split_to_frames(self, x, frame_size, hop_size):
"""
Splits the input audio signal to frame of given size (in samples).
Start position of each frame is determined by given hop size.
The last frame is right zero-padded if needed.
input:
x - array-like representing the audio signal
"""
if hop_size is None:
hop_size = frame_size
frame_count = int(math.ceil(len(x) / hop_size))
def pad(x, size, value=0):
padding_size = size - len(x)
if padding_size:
x = np.pad(x, (0, padding_size), 'constant', constant_values=(0, 0))
return x
frames = np.vstack(
pad(x[start:start + frame_size], frame_size) \
for start in np.arange(0, hop_size * frame_count, hop_size))
return frames
def _to_mono(self, samples):
if samples.ndim == 1:
return samples
else:
return samples.mean(axis=-1)
def mean_power(x_frames):
return np.sqrt(np.mean(x_frames**2, axis=-1))
def power(x_frames):
return np.sqrt(np.sum(x_frames**2, axis=-1))
def mean_energy(x_frames):
"""
Example usage:
import matplotlib.pyplot as plt
import soundfile as sf
from analysis import read_frames
def analyze_mean_energy(file, frame_size=1024):
frames, t, fs = read_frames(x, frame_size)
y = mean_energy(frames)
plt.semilogy(t, y)
plt.ylim(0, 1)
"""
return np.mean(x_frames**2, axis=-1)
def energy(x_frames):
return np.sum(x_frames**2, axis=-1)
<file_sep>"""
The goal is to transform an audio signal into an STFT spectrogram in a form
suitable as features for machine learning.
"""
from __future__ import print_function, division
import numpy as np
import os
from .signal import SignalFrames
from .reassignment import reassigned_spectrogram, pitchgram
def spectrogram_features(file_name, frame_size, output_frame_size, hop_size, spectrogram_type, magnitudes='power_db_normalized'):
signal_frames = SignalFrames(file_name, frame_size, hop_size, mono_mix=True)
if spectrogram_type == 'stft':
X = reassigned_spectrogram(signal_frames, output_frame_size,
magnitudes=magnitudes, reassign_time=False, reassign_frequency=False)
elif spectrogram_type == 'reassigned':
X = reassigned_spectrogram(signal_frames, output_frame_size,
magnitudes=magnitudes)
elif spectrogram_type == 'pitchgram':
X = pitchgram(signal_frames, output_frame_size, magnitudes=magnitudes)
else:
raise ValueError('unknown spectrogram type: %s' % spectrogram_type)
return X
def spectrogram_features_to_file(input_filename, output_filename, frame_size, output_frame_size, hop_size, spectrogram_type, magnitudes='power_db'):
X = spectrogram_features(input_filename, frame_size, output_frame_size, hop_size, spectrogram_type, magnitudes)
np.savez_compressed(output_filename, X)
# scipy.misc.imsave(output_filename.replace('.npz', '.png'), X.T[::-1])
def default_output_filename(input_file_name, type):
return os.path.basename(input_file_name).replace('.wav', '_power_spectrogram_%s.npz' % type)
def parse_args():
import argparse
parser = argparse.ArgumentParser(description='Extracts STFT magnitude spectrogram features.')
parser.add_argument('input_file', metavar='INPUT', help='input file in WAV format')
parser.add_argument('output_file', metavar='OUTPUT', nargs='?', help='output file in NumPy npz format')
parser.add_argument('-b', '--frame-size', type=int, default=2048, help='STFT frame size')
parser.add_argument('-p', '--hop-size', type=int, default=512, help='STFT hop size')
parser.add_argument('-o', '--output-frame-size', type=int, default=512, help='output frame size')
parser.add_argument('-t', '--type', default='stft', help='plain "stft", "reassigned" spectrogram or "pitchgram"')
parser.add_argument('-m', '--magnitudes', default='power_db_normalized',
choices=['linear', 'power', 'power_db', 'power_db_normalized'])
return parser.parse_args()
def main():
args = parse_args()
output = args.output_file if args.output_file else default_output_filename(args.input_file, args.type)
spectrogram_features_to_file(args.input_file, output, args.frame_size,
args.output_frame_size, args.hop_size, args.type, args.magnitudes)
if __name__ == '__main__':
main()
<file_sep># tfr - time-frequency reassignment in Python
[](https://pypi.python.org/pypi/tfr)


Spectral audio feature extraction using [time-frequency reassignment](https://en.wikipedia.org/wiki/Reassignment_method).
<img src="examples/multicomponent_animation.gif" alt="reassigned spectrogram illustration" width="516" height="436">
Besides normal spectrograms it allows to compute reassigned spectrograms, transform them (eg. to log-frequency scale) and requantize them (eg. to musical pitch bins). This is useful to obtain good features for audio analysis or machine learning on audio data.
A reassigned spectrogram often provides more precise localization of energy in the time-frequency plane than a plain spectrogram. Roughly said in the reassignment method we use the phase (which is normally discarded) and move the samples on the time-frequency plane to a more suitable place computed from derivatives of the phase.
This library supports reassignment in both frequency and time (both are optional). As well it does requantization from the input overlapping grid to an non-overlapping output grid.
It is a good building block to compute [chromagram features](https://en.wikipedia.org/wiki/Chroma_feature) (aka pitch class profiles) where pitch is transformed into pitch class by ignoring the octave. See also [harmonic pitch class profiles](https://en.wikipedia.org/wiki/Harmonic_pitch_class_profiles).
## Installation
```
pip install tfr
```
Or for development (all code changes will be available):
```
git clone https://github.com/bzamecnik/tfr.git
pip install -e tfr
```
## Usage
### Split audio signal to frames
You can read time-domain signal from an audio file (using the `soundfile` library) and split it into frames for spectral processing.
```
import tfr
signal_frames = tfr.SignalFrames('audio.flac')
```
`SignalFrames` instance contains the signal split into frames and some metadata useful for further processing.
The signal values are normalized to [0.0, 1.0] and the channels are converted to mono.
It is possible to provide the signal a numpy array as well.
```
import tfr
x = np.sin(2 * np.pi * 10 * np.linspace(0, 1, 1000))
signal_frames = tfr.SignalFrames(x)
```
### Minimal example - pitchgram from audio file
```
import tfr
x_pitchgram = tfr.pitchgram(tfr.SignalFrames('audio.flac'))
```
From audio frames it computes a reassigned pitchgram of shape `(frame_count, bin_count)` with values being log-magnitudes in dBFS `[-120.0, 0.0]`. Sensible parameters are used by default, but you can change them if you wish.
### Reassigned spectrogram
Like normal one but sharper and requantized.
```
import tfr
x_spectrogram = tfr.reassigned_spectrogram(tfr.SignalFrames('audio.flac'))
```
### Signal frames with specific parameters
- `frame_size` - affects the FFT size - trade-off between frequency and time resolution, good to use powers of two, eg. 4096
- `hop_size` - affects the overlap between frames since a window edges fall to zero, eg. half of frame_size (2048)
```
import tfr
signal_frames = tfr.SignalFrames('audio.flac', frame_size=1024, hop_size=256)
```
### General spectrogram API
The `pitchgram` and `reassigned_spectrogram` functions are just syntax sugar for the `Spectrogram` class. You can use it directly to gain more control.
General usage:
```
x_spectrogram = tfr.Spectrogram(signal_frames).reassigned()
```
From one Spectrogram instance you can efficiently compute reassigned spectrograms with various parameters.
```
s = tfr.Spectrogram(signal_frames)
x_spectrogram_tf = s.reassigned(output_frame_size=4096)
x_spectrogram_f = s.reassigned(output_frame_size=512)
```
Different window function (by default we use Hann window):
```
import scipy
x_spectrogram = tfr.Spectrogram(signal_frames, window=scipy.blackman).reassigned()
```
Different output frame size (by default we make it the same as input hop size):
```
x_spectrogram = tfr.Spectrogram(signal_frames).reassigned(output_frame_size=512)
```
Disable reassignment of time and frequency separately:
```
s = tfr.Spectrogram(signal_frames)
x_spectrogram = s.reassigned(reassign_time=False, reassign_frequency=False)
x_spectrogram_t = s.reassigned(reassign_frequency=False)
x_spectrogram_f = s.reassigned(reassign_time=False)
x_spectrogram_tf = s.reassigned()
```
Disable decibel transform of output values:
```
x_spectrogram = tfr.Spectrogram(signal_frames).reassigned(magnitudes='power')
```
Magnitudes in the spectrogram can be transformed at the end in multiple ways given by the `magnitudes` parameter:
- `linear` - energy spectrum
- `power` - power spectrum
- `power_db` - power spectrum in decibels, range: [-120, 0]
- `power_db_normalized` - power spectrum in decibels normalized to range: [0, 1]
- this is useful as a feature
Use some specific transformation of the output values. `LinearTransform` (default) is just for normal spectrogram, `PitchTransform` is for pitchgram. Or you can write your own.
```
x_spectrogram = tfr.Spectrogram(signal_frames).reassigned(transform=LinearTransform())
```
```
x_pitchgram = tfr.Spectrogram(signal_frames).reassigned(transform=PitchTransform())
```
```
class LogTransform():
def __init__(self, bin_count=100)
self.bin_count = bin_count
def transform_freqs(self, X_inst_freqs, sample_rate):
X_y = np.log10(np.maximum(sample_rate * X_inst_freqs, eps))
bin_range = (0, np.log10(sample_rate))
return X_y, self.bin_count, bin_range
x_log_spectrogram = tfr.Spectrogram(signal_frames).reassigned(transform=LogTransform())
```
### Pitchgram parameters
In pitchgram the frequencies are transformed into pitches in some tuning and then quantized to bins. You can specify the tuning range of pitch bins and their subdivision.
- `tuning` - instance of `Tuning` class, transforms between pitch and frequency
- `bin_range` is in pitches where 0 = 440 Hz (A4), 12 is A5, -12 is A3, etc.
- `bin_division` - bins per each pitch
### Extract features via CLI
```
# basic STFT spectrogram
python -m tfr.spectrogram_features audio.flac spectrogram.npz
# reassigned STFT spectrogram
python -m tfr.spectrogram_features audio.flac -t reassigned reassigned_spectrogram.npz
# reassigned pitchgram
python -m tfr.spectrogram_features audio.flac -t pitchgram pitchgram.npz
```
Look for other options:
```
python -m tfr.spectrogram_features --help
```
### scikit-learn transformer
In order to extract pitchgram features within a sklearn pipeline, we can use `PitchgramTransformer`:
```
import soundfile as sf
x, fs = sf.read('audio.flac')
from tfr.signal import to_mono
from tfr.sklearn import PitchgramTransformer
ct = PitchgramTransformer(sample_rate=fs)
x_pitchgram = ct.transform(x)
# output:
# - shape: (frame_count, bin_count)
# - values in dBFB normalized to [0.0, 1.0]
```
## Status
Currently it's alpha. I'm happy to extract it from some other project into a separate repo and package it. However, the API must be completely redone to be more practical and obvious.
## About
- Author: <NAME> ([@bzamecnik](http://twitter.com/bzamecnik))
- License: MIT
### Support the project
Need some consulting or coding work regarding audio processing, machine learning or big data? Drop me a message via [email](mailto:<EMAIL>?subject=Work+inquiry+-+based+on+tfr) or [LinkedIn](https://www.linkedin.com/in/bohumirzamecnik). Or just say hello :).
## Literature
- [A Unified Theory of Time-Frequency Reassignment](https://arxiv.org/abs/0903.3080) - <NAME>, <NAME>, Digital Signal Processing 30 September 2005
- [Algorithms for computing the time-corrected instantaneous frequency (reassigned) spectrogram, with applications](http://acousticslab.org/learnmoresra/files/fulopfitz2006jasa119.pdf) - <NAME>, <NAME>, Journal of Acoustical Society of America, Jan 2006
- [Time Frequency Reassignment: A Review and Analysis](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.4.1053&rep=rep1&type=pdf) - <NAME>, <NAME>,
Technical Report, Cambridge University Engineering Dept.
- [Improving the Readability of Time-Frequency and Time-Scale Representations by the Reassignment Method](http://perso.ens-lyon.fr/patrick.flandrin/IEEE_SP1995.pdf) - <NAME>, <NAME>, IEEE Transactions on Signal Processing, vol. 43, no. 5, May 1995
- [Time–frequency reassignment: from principles to algorithms](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.331.5416&rep=rep1&type=pdf) - <NAME>, <NAME>, <NAME>, CRC Press 2003
- [Time-frequency toolbox for Matlab, user’s guide and reference guide](http://iut-saint-nazaire.univ-nantes.fr/~auger/tftb.html) - F.Auger, P.Flandrin, P.Goncalves, O.Lemoine
<file_sep>"""
An example showing analysis of multicomponent sound with and without time or
frequency reassignment. With both TF-reassignment the resulting spectrogram is
much sharper altough it still cannot resolve inseparable places.
"""
import numpy as np
import soundfile as sf
import subprocess
import tfr
def sample_time(since, until, fs=44100.):
"""
Generates time sample in given interval [since; until]
with given sampling rate (fs).
"""
return np.arange(since, until, 1. / fs)
def freq_mod_sine(t, carrier_freq, mod_freq, mod_amp):
return np.sin(2 * np.pi * carrier_freq * t
+ mod_amp * np.sin(2 * np.pi * mod_freq * t))
def sinusoid(t, freq):
"""
t - array of time samples
freq - frequency - constant or array of same size as t
"""
return np.sin(2 * np.pi * freq * t)
def linear_chirp(t, start_freq, end_freq):
slope = (end_freq - start_freq) / (t[-1] - t[0])
return np.sin(2 * np.pi * (start_freq * t + 0.5 * slope * t**2))
def generate_example_sound(fs=44100, duration=3, carrier_freq=2000, mod_freq=1, mod_amp=1000, click_freq=10):
t = sample_time(0, duration, fs)
component_count = 4
amplitude = 1 / component_count
# FM component
x = amplitude * freq_mod_sine(t, carrier_freq, mod_freq, mod_amp)
# constant tone component
x += amplitude * sinusoid(t, carrier_freq)
# linear chirps
x += amplitude * linear_chirp(t, carrier_freq-mod_amp, carrier_freq+mod_amp)
x += amplitude * linear_chirp(t, carrier_freq+mod_amp, carrier_freq-mod_amp)
# sawtooth pulse-train component
for i in range(duration*click_freq):
idx = int(i * fs / click_freq)
x[idx:idx+10] = 1
sf.write('multicomponent.wav', x, fs)
return x, fs
def compute_example_spectrograms():
x, fs = generate_example_sound()
signal_frames = tfr.SignalFrames(x, sample_rate=fs, frame_size=4096, hop_size=512)
def band_pass(X, y_range):
return X[:, slice(*y_range)]
y_range = (84, 287) # 900-3100 Hz
# plain non-reassigned spectrogram
X_stft = band_pass(
tfr.reassigned_spectrogram(
signal_frames,
reassign_time=False,
reassign_frequency=False),
y_range)
tfr.plots.save_raw_spectrogram_bitmap('multicomponent_stft.png', X_stft)
# time-reassigned spectrogram
X_t = band_pass(
tfr.reassigned_spectrogram(signal_frames, reassign_frequency=False),
y_range)
tfr.plots.save_raw_spectrogram_bitmap('multicomponent_t.png', X_t)
# frequency-reassigned spectrogram
X_f = band_pass(
tfr.reassigned_spectrogram(signal_frames, reassign_time=False),
y_range)
tfr.plots.save_raw_spectrogram_bitmap('multicomponent_f.png', X_f)
# time-frequency reassigned spectrogram
X_tf = band_pass(tfr.reassigned_spectrogram(signal_frames), y_range)
tfr.plots.save_raw_spectrogram_bitmap('multicomponent_tf.png', X_tf)
def make_animation():
def add_label_and_resize(source_file, dest_file, label):
subprocess.call([
'convert',
source_file,
# '-filter', 'point', '-resize', '200%',
# '-pointsize', '30',
'label:%s' % label, '+swap', '-gravity', 'Center', '-append',
dest_file])
for name, label in [('stft', 'no'), ('f', 'frequency'), ('t', 'time'), ('tf', 'time-frequency')]:
add_label_and_resize(
'multicomponent_%s.png' % name,
'multicomponent_%s_label.png' % name,
'%s reassignment' % label)
subprocess.call([
'convert',
'-delay', '100',
'-loop', '0',
'multicomponent_stft_label.png',
'multicomponent_f_label.png',
'multicomponent_tf_label.png',
'multicomponent_stft_label.png',
'multicomponent_t_label.png',
'multicomponent_tf_label.png',
'multicomponent_animation.gif'
])
if __name__ == '__main__':
compute_example_spectrograms()
make_animation()
<file_sep>from __future__ import print_function, division
from tfr import Tuning
def test_pitch_to_relative_freq():
tuning_step1 = Tuning(steps_per_octave=1)
tuning_step12 = Tuning(steps_per_octave=12)
assert 1. == tuning_step1.pitch_to_relative_freq(0.)
assert 2. == tuning_step1.pitch_to_relative_freq(1.)
assert 4. == tuning_step1.pitch_to_relative_freq(2.)
assert 0.5 == tuning_step1.pitch_to_relative_freq(-1.)
assert 1. == tuning_step12.pitch_to_relative_freq(0.)
assert 2. == tuning_step12.pitch_to_relative_freq(12.)
assert 4. == tuning_step12.pitch_to_relative_freq(24.)
assert 0.5 == tuning_step12.pitch_to_relative_freq(-12.)
def test_pitch_to_freq():
tuning = Tuning()
assert 440. == tuning.pitch_to_freq(0.)
assert 880. == tuning.pitch_to_freq(12.)
assert 1760. == tuning.pitch_to_freq(24.)
assert 220. == tuning.pitch_to_freq(-12.)
assert abs(466.1637615180899 - tuning.pitch_to_freq(1.)) < 1e-10
assert abs(415.3046975799451 - tuning.pitch_to_freq(-1.)) < 1e-10
assert abs(1318.5102276514797 - tuning.pitch_to_freq(12 + 7)) < 1e-10
# TODO: test:
# - freq_to_pitch()
# - PitchQuantizer
# - various configurations of Tuning
<file_sep>"""
A regression test for computing three kinds of spectrogram.
Just to ensure we didn't break anything.
"""
from __future__ import print_function, division
import numpy as np
import os
from tfr.spectrogram_features import spectrogram_features
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
def test_spectrograms():
for spectrogram_type in ['stft', 'reassigned', 'pitchgram']:
yield assert_spectrogram_is_ok, spectrogram_type
def assert_spectrogram_is_ok(spectrogram_type):
audio_file = os.path.join(DATA_DIR, 'she_brings_to_me.wav')
X = spectrogram_features(audio_file, frame_size=4096, hop_size=2048,
output_frame_size=2048,
spectrogram_type=spectrogram_type, magnitudes='power_db_normalized')
npz_file = os.path.join(DATA_DIR, 'she_brings_to_me_%s.npz' % spectrogram_type)
X_expected = np.load(npz_file)['arr_0']
assert X.shape == X_expected.shape, (X.shape, X_expected.shape)
print('spectrogram [%s]: max abs error' % spectrogram_type, abs(X - X_expected).max())
assert np.allclose(X, X_expected)
<file_sep>from __future__ import print_function, division
import numpy as np
import os
from tfr import SignalFrames
from tfr.signal import energy, mean_power
from tfr.spectrogram import create_window, stft_spectrogram
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
def test_window_should_be_normalized():
def assert_ok(size):
w = create_window(size)
assert np.allclose(energy(w), len(w))
assert np.allclose(mean_power(w), 1.0)
for size in [16, 100, 512, 777, 4096]:
yield assert_ok, size
def test_spectrogram_db_magnituds_should_be_in_proper_range():
frame_size = 4096
hop_size = 4096
audio_file = os.path.join(DATA_DIR, 'she_brings_to_me.wav')
signal_frames = SignalFrames(audio_file, frame_size, hop_size, mono_mix=True)
w = create_window(frame_size)
X = stft_spectrogram(signal_frames.frames, w, magnitudes='power_db')
assert np.all(X >= -120), 'min value: %f should be >= -120' % X.min()
assert np.all(X <= 0), 'max value: %f should be <= 0' % X.max()
| aa1f583f8de2b57ebb41d4055e7dd73a11848433 | [
"Markdown",
"Python",
"Shell"
] | 16 | Python | ruohoruotsi/tfr | 5a7490dc9e7cb4ff6143987065cfd7a6ffb14cd5 | e4963e4dfa0f7c1f7aa9b63336a620a33d977be9 | |
refs/heads/master | <repo_name>maks-io/change-git-author-info-script-extended<file_sep>/README.md
# change-git-author-info-script-extended
This is a clone of the script, that changes the author info in a git repository. The original script can be found here: https://help.github.com/en/articles/changing-author-info - i made a minor adjustment to replace multiple users/emails in a single run.
1) Put the script in the root folder of your repo
2) Adjust the values of the variables `ARRAY_OF_OLD_EMAILS`, `CORRECT_NAME` and `CORRECT_EMAIL`
3) Run the script
<file_sep>/rename.sh
#!/bin/sh
git filter-branch --env-filter '
ARRAY_OF_OLD_EMAILS=("YOUR_OLD_EMAIL_ONE" "YOUR_OLD_EMAIL_TWO" "YOUR_OLD_EMAIL_THREE")
CORRECT_NAME="YOUR_NEW_NAME"
CORRECT_EMAIL="YOUR_NEW_EMAIL"
containsElement () {
local e match="$1"
shift
for e; do [[ "$e" == "$match" ]] && return 0; done
return 1
}
if containsElement "$GIT_COMMITTER_EMAIL" "${ARRAY_OF_OLD_EMAILS[@]}";
then
export GIT_COMMITTER_NAME="$CORRECT_NAME"
export GIT_COMMITTER_EMAIL="$CORRECT_EMAIL"
fi
if containsElement "$GIT_AUTHOR_EMAIL" "${ARRAY_OF_OLD_EMAILS[@]}";
then
export GIT_AUTHOR_NAME="$CORRECT_NAME"
export GIT_AUTHOR_EMAIL="$CORRECT_EMAIL"
fi
' --tag-name-filter cat -- --branches --tags
| 24ffb47f90957ea0fbe3350fcdd1dcdd977c8217 | [
"Markdown",
"Shell"
] | 2 | Markdown | maks-io/change-git-author-info-script-extended | 939d237124f7d66c83111dda404d1c232fac9c60 | 98c752200944c444690b64f06e95d256cc1c68e0 | |
refs/heads/master | <file_sep>package com.testthread.personnage;
public class BatailleAttaque implements Runnable {
private Personnage perso;
private Personnage viktim;
private int cooldown;
public BatailleAttaque(Personnage perso, Personnage viktim, int cooldown){
this.perso = perso;
this.viktim=viktim;
this.cooldown = cooldown;
}
public void run() {
for(int i = 0;i<100;i++){
this.perso.attaquer(viktim);
try {
Thread.sleep(this.cooldown);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public Personnage getPerso() {
return perso;
}
public void setPerso(Personnage perso) {
this.perso = perso;
}
public Personnage getViktim() {
return viktim;
}
public void setViktim(Personnage viktim) {
this.viktim = viktim;
}
public int getCooldown() {
return cooldown;
}
public void setCooldown(int cooldown) {
this.cooldown = cooldown;
}
}
<file_sep># threads
TP sur les threads
| d12e5cf383d0e7caf6ca666786d68554c67c51d3 | [
"Markdown",
"Java"
] | 2 | Java | Bartparr/threads | bcd112aaa6545e3642166463dc8878aac8b6b7b7 | 4dd26e644130e1df7e68558a4f33600d637d8986 | |
refs/heads/master | <repo_name>jbashar/wkst1-8.answered<file_sep>/worksheets-9-12-jbashar/ACTG306_Tutorial_A.R
#---
#title: "ACTG306_Tutorial_A.R"
#author: "<NAME>"
#date: "October, 2016"
#history: modified from "P<NAME>nings" and DataJoy
#student's information:
#name:
#date:
#---
###########################################################
#INTRO TO TUTORIAL ACTG306 A (partly merged 1 & 2 from last year)
###########################################################
#In this tutorial, we are going to look at the HIV sequences from
#patients in clinical trial ACTG306
#ACTG stands for Aids Clinical Trials Group.
#The data from the trial were described in this paper:
#<http://hivdb.stanford.edu/pages/pdf/kuritzkes_15097303.pdf>
#J Acquir Immune Defic Syndr. 2004 May 1;36(1):600-3.
#Rate of thymidine analogue resistance mutation accumulation with zidovudine- or stavudine-based regimens.
#Kuritzkes DR, Bassett RL, Hazelwood JD, <NAME>, Rhodes RA, Young RK, Johnson VA; Adult ACTG Protocol 306 370 Teams.
###########################################################
#CODE FROM TUTORIAL
###########################################################
#Let's load our sequences and start taking a look at them
############ CODE
#load the library seqinr
library(seqinr)
#read the fasta file
read.fasta("ACTG306_RT_fasta.txt")->SeqData
#This piece of code is to get more meaningful names for the sequences - instead of just "PtID." Don't worry if it doesn't make much sense. You can execute the entire chunk of code by using the little "chunks" menu on the upper right of this window or by clicking option-command-C.
annotations<-c();seqnames<-c()
for (i in 1:length(SeqData)){
annot = attr(SeqData[[i]],"Annot")
annotations<-c(annotations, annot)
patlocstart = regexpr("pat",annot)[1]+3
patlocend = regexpr("\\|",substr(annot,patlocstart,patlocstart+10))[1]+patlocstart
pat = substr(annot,patlocstart,patlocend-3)
weekstart = regexpr("Week",annot)[1]+5
week = substr(annot,weekstart,weekstart+10)
attr(SeqData[[i]],"name")<-paste("pat_",pat,"_week_",week,sep="")
seqnames<-c(seqnames,attr(SeqData[[i]],"name"))
}
#This piece of code is to give you an overview of the data we have organized by patient (pat) & week:
for (i in 1:30){
print(attr(SeqData[[i]],"name"))
}
#For which weeks do we have data for patient 29?
#The sequences are all from the reverse transcriptase gene of HIV.
#We are going to look at a single sequence and use the function "table"
#to determine which letters are most common in the sequence.
############ CODE
#Look at the first sequence
SeqData[[1]]
#Use "table" to determine the frequency of all the nucleotides.
table(SeqData[[1]])
############ TASK 1
#As you can see, there are other letters than just CTG and A.
#Google "ambiguity codes DNA".
#Write down the letters from the first sequence and what they mean.
#In this case, the "." probably means that the base was not sequenced.
############ CODE
#In R, we often run multiple functions on one object at the same time.
#For example, I'd like to taburalize SeqData[[1]] and then sort the resulting table.
#I could do this in the following way:
sort(table(SeqData[[1]]))
#To find out more about a function, type ?function
?sort
############ TASK 2
#Which nucleotide is most common in the sequence?
#The package "seqinr" comes with a handy function called "translate".
#It translates a DNA sequence into amino acids.
############ CODE
translate(SeqData[[1]])
############ TASK 3
#Why does the translation start with a bunch of X's?
#How many amino acids are in the sequence?
############ CODE
# You can use the function length() to ask R how many amino acids are in the sequence
length(translate(SeqData[[1]]))
#Next, we're going compare two amino acid sequences.
#First, let's save the amino acid sequence of patient 7, week 0 into a variable called AA_Pat7_week0.
#Next, let's save the amino acid sequence of patient 7, week 41 into a variable called AA_Pat7_week41.
############ CODE
AA_Pat7_week0<-translate(SeqData[[3]])
AA_Pat7_week41<-translate(SeqData[[5]])
#Look at the amino acid sequences, by simply typing the name of the variable and running that line of code.
AA_Pat7_week0
#You could also write
print(AA_Pat7_week0)
############ TASK 4
#How many amino acids changed for the virus of patient 7 between day 0 and day 41?
#Which amino acids?
#We use the which() function here and also the != (is not equal) operator
which(AA_Pat7_week0!=AA_Pat7_week41)
#If you are too lazy to count (and lazy is a good thing in this case!), you can use length() again,
# to determine how many differences there are.
############ TASK 5: Looking at patient 18.
#Sequences 15 and 16 come from patient 18.
#Find out from which weeks they are.
#Save the amino acid sequences into variables with appropriate names (like we did for patient 7 before)
#Determine which amino acids have changed between the two time points.
#We now now which amino acids changed in the RT gene of the HIV virus of patient 7 between week 0 (when they started treatment) and week 41.
#Some of these amino acids may be important for drug resistance.
#With the other files you downloaded (and uploaded to DataJoy), there is also a simple text file
# with the most important drug resistance mutations for NRTI drugs.
#All the drugs in the 306 trial are NRTI drugs.
ListDRMs <- read.table("DRM_file_WHO_NRTI.txt", header = T, stringsAsFactors = F)
#ListDRMs is a dataframe
#If you want to look at the first row of the dataframe, type
ListDRMs[1,]
#If you want to look at the first column of the dataframe, type
ListDRMs[,1]
#If you want to look at the 3rd row of the 6th column, you type
ListDRMs[3,6]
#If you want to know the names of the columns, type
names(ListDRMs)
#If you know the name of the column you're interested in, but not the number, you type
ListDRMs$AminoAcid
#And for the 5th row of the column with that name, you type
ListDRMs$AminoAcid[5]
############ TASK 6
#How many amino acids are listed in ListDRMs?
#What code would you write to output the 3rd row of the column with name "Resistant"?
#For position 67, which amino acid is listed as the normal amino acid (wild type) and which is/are listed as resistant?
#Which of the amino acids that changed between week 0 and week 41 in patient 7 are important for resistance?
#Let's try to do the last question using the R function called grepl() and another function called match()
#grepl(pattern,string) asks whether a pattern is found in a string
#match(element, list, nomatch = 0)>0 asks whether an element is found in a list
#Link to information about grepl fundtion <http://www.endmemo.com/program/R/grepl.php>
grepl("g", "dog")
grepl("g", "cat")
MyListAnimals<-c("dog","cat","monkey","bird")
MyListFruit<-c("strawberry","lemon","apple", "banana")
#is dog found in the first or second list?
match("dog", MyListAnimals,nomatch = 0)>0
match("dog", MyListFruit,nomatch = 0)>0
#an easier way to find whether an element is found in a list, is to do
"dog" %in% MyListAnimals
############ TASK 7
#Write two different lines of code to ask whether apple is part of the list of animals or the list of fruits.
#OK, now we'll use grepl and %in% on the sequences from patient 7 and the list of resistance mutations.
#For each of the differences between the week 0 sequence and the week 41 sequence for patient 7, let's check if it leads to resistance
ListOfDifferences<-which(AA_Pat7_week0!=AA_Pat7_week41)
ListOfDifferences
#Let's look at the first amino acid that is different between week 0 and week 41:
AminoAcidPosition<-91
#if not in the list, print "not in list"
if (!(AminoAcidPosition %in% ListDRMs$AminoAcid)){
print(c("Amino acid",AminoAcidPosition,"not in ListDRMs"))
}
#if the amino acid position is in the ListDRM list, check whether resistant
if(AminoAcidPosition %in% ListDRMs$AminoAcid){
#check whether the new amino acid in the sequence is listed as a resistant one
#First let's get the amino acid from the patient's sequence
newAminoAcid<-AA_Pat7_week41[AminoAcidPosition]
#Now let's determine which row in the ListDRMs dataframe we should be comparing with
rowListDRMs<-which(ListDRMs$AminoAcid==AminoAcidPosition)
#Now let's look at the resistant amino acids listed in that row in the column "Resistant"
ResistantAminoAcids<-ListDRMs$Resistant[rowListDRMs]
#now we use grepl to determine whether the pattern newAminoAcid is found in the string ResistantAminoAcids
resTrueFalse<-grepl(newAminoAcid, ResistantAminoAcids)
print(c("Amino acid",AminoAcidPosition,"resistant?",resTrueFalse))
}
############ TASK 8
#Replace the "91" in the code we provided above and look at all of the amino acids in the ListOfDifferences.
#Is any of the amino acids relevant for drug resistance?
#We can do the same task using a loop, that way we don't have to type the numbers of the amino acids one by one.
for (i in ListOfDifferences){
#if not in the list, print "not in list"
if (!(i %in% ListDRMs$AminoAcid)){
print(c("Amino acid",i,"not in ListDRMs"))
}
#if the amino acid position is in the ListDRM list, check whether resistant
if(i %in% ListDRMs$AminoAcid){
#check whether the new amino acid in the sequence is listed as a resistant one
newAminoAcid<-AA_Pat7_week41[i]
resTrueFalse<-grepl(newAminoAcid,ListDRMs$Resistant[which(ListDRMs$AminoAcid==i)])
print(c("Amino acid",i,"resistant?",resTrueFalse))
}
}
############ TASK 9
#Now it is your task to write the code to do the same analysis for patient 18, week 70.
#Remember that you have already saved the amino acid sequence from this patient's week 70 sample in a variable.
#OK, lets now look at all sequences and determine whether they have resistance amino acids.
# First we make a dataframe
#make empty dataframe (just with the sequence names)
PatData<-data.frame(name=seqnames)
# ... and add with the DRM (DRM = drug resistance mutation) states in each patient
#now the next piece of code (until } around 20 lines down) is repeated for every one of the 15 DRMs in the DRM list
for (DRMnum in 1:length(ListDRMs[,1])){
#make new column for the next aa position, fill it with "TRUE"s
PatData[,length(names(PatData))+1]<-TRUE
#change the name of the new column so that it reflects the position of the DRM
names(PatData)[length(names(PatData))]<-ListDRMs$AminoAcid[DRMnum]
#for each sequence determine whether resistant or not at that position
# the next piece of code is repeated for each sequence
for (SeqNum in 1:length(PatData[,1])){
#get the amino acid for sequence SeqNum
AAseq <-translate(SeqData[[SeqNum]])
#and only keep the amino acids at position DRMnum
AAatDRMPos<-AAseq[ListDRMs$AminoAcid[DRMnum]]
#print(AAatDRMPos)
#is this AA resistant? compare it with the listed amino acids in the ListDRMs
TF<-grepl(AAatDRMPos,ListDRMs$Resistant[DRMnum])
#print(TF)
# add a true (for resistant) or false (for not resistant) to the PatData dataframe
PatData[SeqNum,length(names(PatData))]<-TF
}
}
############ TASK 10
#Print the first row of the PatData dataframe
#The information in this row comes from which patient and which week? How many trues and falses do you see? What do they mean?
#How about the 5th row?
#If you want to just look at the first few rows, you can use the head() function.
head (PatData)
#Often researchers are interested in the total number of resistance mutations in a sequence.
#Add a column to sum the number of DRMs per sequence.
#Find which columns of the dataframe contain resistance information
DRMcolumns<-which(names(PatData) == ListDRMs$AminoAcid[1]) : which(names(PatData) == ListDRMs$AminoAcid[length(ListDRMs[,1])])
#Create a new column, called NumDRMs, fill with 0s
PatData$NumDRMs<-0
#Sum number of TRUEs in the DRMcolumns for each sequence
for (i in 1:length(PatData[,1])){
PatData$NumDRMs[i] <- length(which(PatData[i,DRMcolumns]==TRUE))
}
############ Task 11
# What is the minimun and maximum number of resistance mutations you see?
# How does the number of DRMs change over time for patient 5 and patient 7?
#The dataframe has data from different weeks in the trial. It is nicer if there would be a column with the week number. That way it is easier to select all the week 0 sequences or all the later sequences.
# Add column for week}
#Create new column
PatData$week<-0
for (i in 1:length(PatData[,1])){
print(i)
name = PatData$name[i]
#position of "week" in the "name"
start_week_number = regexpr("week_",name)[1]+5
week = substr(name,start_week_number,start_week_number+10)
PatData$week[i] <-week
}
#Make a histogram to show the distribution of the number of DRMs.
#Check out this link to learn more about histograms in R: <http://blog.datacamp.com/make-histogram-basic-r/>
hist(PatData$NumDRMs)
#Now compare the histogram to running the table() function
table(PatData$NumDRMs)
#To get rid of the gaps in the histogram, we can add a "breaks" argument
hist(PatData$NumDRMs, breaks = 0:5)
############ Task 12
#What is the most commonly found number of DRMs in the dataset?
#Write the code to make a histogram for the weeks of the samples in the dataset.
#We can also look at the distribution of the number of DRMs at week 0 (this is when they started the treatment)
#To do this, we subset the data, to include only those rows where PatData$week==0.
hist(PatData$NumDRMs[PatData$week==0], breaks = 0:5, main="NumDRMs at week 0")
############ Task 13
#Write the code to make a histogram with the distribution of DRMs from patients who have been on treatment at least half a year.
#What do you notice? How is this plot different from the previous one?
#Finally, we'll use the function write.csv() to save the dataframe we made in a csv file.
#The next time we work on the data, we can read in this csv file.
write.csv(PatData, "PatData306.csv",row.names = FALSE)
?write.csv()
<file_sep>/worksheets-9-12-jbashar/README.md
# Worksheets 9-12
In this repository you can find the .md file with worksheets 9-12, and the sequence tutorial and data files for worksheet 10.
On Thursday 10/27 in class we will convert worksheet-10's tutorial from .R (plain R script) to .Rmd (R markdown), merge it into the file for worksheets 9-12, save it as .Rmd, and finally upload/commit it to GitHub to turn it in.
**Remember** to enclose blocks of code triple backticks, such as:
```
some lines of code
in this code block
```
You can look at the raw (unformatted) text of this README.md file to see how the formatting is encoded.
**Also**, remember to use double spaces at the end of a line to get a new line *if* you're not already adding an empty line in between.
...**AND** an empty line in between is ***often*** required in markdown to get the formatting to work, e.g. an empty line is needed before a list or a code block for them to be properly formatted.
<file_sep>/JBashar_HW1-4.md
# Worksheet 2: #
1. **What is the R user's name?**
<NAME>
2. **What topic do they work on**?
Neurobiology of Social behavior with most recent paper studying social structure hierarchy of lab mice and the association of dominant status with specific gene expression in the brain.
3. **Which answer did you find most surprising / what did you learn from reading their story?**
James was very descriptive in the ways you can use R and the aspects that are helpful. I like how he highlighted the ability to reproduce analysis with R and and visualize data. I am excited to see this myself.
###Story #2:###
1. **What is the R user's name?**:
<NAME>
2. **What topic do they work on?**:
Neuroscience of Drosophila behavior
3. **Which answer did you find most surprising / what did you learn from reading their story?**
I have to look into what ggplot is, as he recommends starting it right away.
###Story #3:
1. What is the R user's name?
<NAME>
2. W**hat topic do they work on?**
Neurobiology of HIV and AIDS in populations
3. W**hich answer did you find most surprising / what did you learn from reading their story?**
I am going to look at the resource they recommended and do my best to do play around with R and document my attempts.
### Story #4: ###
1. **What is the R user's name?**
<NAME>
2. W**hat topic do they work on?**
Climate change on physiology of Harvested Shellfish
3. W**hich answer did you find most surprising / what did you learn from reading their story?**
Again, Carina's interview provided resources that could help my R training and I will look into tutorials by Bodo Winter
# Task 2: summarize #
After reading several stories about people who use R, is there anything problem that you think R can help you to solve? Is there anything particular that caught your interest?
I think the R language is very flexible and I am very excited to brainstorm ideas on how to apply it to research. I particular want to visualize my data.
Is there anything that didn’t make sense to you or was unexpected?
There is a lot I am trying to learn such as syntax
# Worksheet 3: Chapter 1 #
1. Differencs from perl
- The use of "==" to define equal
- Being able to use T/F shorthands for true and false respectively
- In perl you store values like $_= value but in R you store values using arrow to direct where value is to be stored
- you do not need to perl in command line to show variable contents
2.
Multiply: 7*8
Divide : 7/8
3.
A. 3 < 4: Yes
B. 3 == 4: Yes
C. x <- 4: No
D. 4 -> x: No
4.
Yes:y <- FALSE
5.
- Sum( ): adds up numbers within bracket
- rep( ): repeats value within bracket
- list.files(): calls list files saved similar to ls in perl
- sqrt(#): returns the square root of number value in bracket
6. ```list.files() ```
7. ```source("file.R")```
8. No
# Worksheet 4: Chapter 2 #
1.
Boolean:is a type of programming logic used to describe the relationship between different data sets. The Boolean data types include AND, OR, <, > etc. Adding the boolean will generate true or false statement after comparing the relationship between the data sets.
Numeric: value that is numeric and is considered a number that can be evaluated
String: usually word sets or non-numeric values; constant
2. ```vector <- c(1, TRUE, "Hello")```
combining different data structures into a vector changes them to the same data type for example all become string type
3. ```seq(1,2,0.1)```
This command prints sequence between 1 to 2 in increments of 0.1.
4. ```vector <- c(apple, orange, pear)```
5.
these elements are indexed and can be called vector[1] returns "apple"
5.
[1] "I" "am" "learning"
[4] "R"
R starts counting at 1 index while perl starts at 0
6. 2 plots
barplot: barplot(vector);
x values are the assigned labels of values and y values are the count
Scatterplot: plot(x,y); x and y are variables that have values stored in them and are respectively used for horizontal axis and verticle axis
<file_sep>/worksheets-9-12-jbashar/worksheets_9_12.md
#Worksheet 9: Work in a group to make a poster
**Note**: programming is often done by one person in silence, but a lot of research shows that we learn better when we talk about what we are learning. That's why we do a group activity today.
1. Pick a random letter between A and D using the `sample()` function.
(*hint 1*: first make a vector with the letters A, B, C and D, then pass that vector to `sample()`; *hint 2*: sample() takes two arguments. The first is a vector and the second is the size of the sample you want, in this case 1)
```
vector <- c("A","B","C","D")
sample(vector,1)
```
2. Your random letter determines what is your topic for today:
* A. Functions in R
* B. Datatypes in R
* C. Plots in R
* D. For loops in R
#####There are two types of loops in R: "For each" & "while"
##### For each is used to specify functions that should be performed on each item of a list, array,
##### While loops: repeats workflow of code as long as some condition holds true; when it is no longer true condition then the loop will end and move to next flow statment
Name:Janna
Date: 11/3
***Yay! I hope you learned something from your colleagues***
***
#Worksheet 10 `ACTG306_Tutorial_A.R`: working with sequence data
Download the tutorial and go through its steps and tasks.
***
#Worksheet 11 Do a Swirl tutorial in Rstudio
1. Open Rstudio, install the swirl package using the `install.packages()` function and then load the swirl package using the `library()` command.
2. Open swirl by typing `swirl()`. When swirl asks you what course you want to do, **choose 1: R Programming**.
* Then there will be the choice between 15 different tutorials. Pick one that looks useful to you. I would not start with the apply functions, but any other tutorial is fine.
3. Make notes here about the most important things you learn in the tutorial you chose:
Awesome! I hope you like swirl and come back to it later!
Name:
Date:
***
#Worksheet 12: Do codeschool TryR chapter 6
1. Why do you think is it more practical to have data in a data.frame then in individual vectors?
2. You've learned three ways to extract a column from a data.frame. Which are they?
3. The `read.table()` and `read.csv()` functions are extremely useful. Which are the different arguments for these functions used in the tutorial?
***Great. Almost done with all the codeschool stuff!***
Name:
Date: <file_sep>/Jbashar_HW5-8.md
# Worksheet 5: chapter 3#
1. ```matrix(1, 2, 5) ```
2. there are two possible ways
```a <- c(10, 14, 20, 30)
matrix(a, 2, 2) ```
or
```dim(a) <- c(2,2)
print(a)```
3. Dim function sets dimensions to place your vectors
4. One example of big data that can be converted into matrix is collection of weather temperature patterns. Where there could be 12 columns for each month and 31/30 days or vice versa
5.
- contour map plot:contour(matrix)
- 3D perspective plot: persp(matrix)
- heat map:image (matrix)
The best matrix plot is the 3D perspective with limited expansion because it shows the best visualization for numbers on a 3D map. It clearly shows the dip representative of the change in elevation number which is harder to read in a 2D contour or heat map
# Worksheet 6: Chapter 4 #
1.
1) **Mean(vector)**: takes the average of numeric values in vector
2) **albine(h/v= mean(limbs))**: makes line representing the mean either as horizontal or verticle parameter
3)**Barplot(vector)**: graph vector on x and y plane; x-axis shows the labels and y axis the values
4) **deviation sd(vector)**: how much an individual value set differs the average value. Going one standard deviation up to the mean value gives the top of normal range and opposite for bottom of normal range.
5)**median(vector)** takes the middle value of the total data set
2. Vectors include:
limbs: vector
names(limbs)
pounds: vector
meanValue: numeric
deviation: numeric
3. I think you can only create an abline line when the mean of the vector will equal 10. For example:
```limbs <- c(10,10)
mean(limbs) == 10
barplot(limbs)
abline(h=mean(limbs))```
for abline(v=...) = 10 the x-axis needs values that are dataset with mean value 10. In the given example we could state:
```abline(v=mean(y))```
# Worksheet 7: Rstudio #
##Task 6 ##
#Creating vectors
```x <- c(1,2,3,4,5,6,7,8,9,10)```
```y <- c(1, 4,9,16,25,36,49,64,81,100)```
#Creating scatterplots
```plot(x,y)```
#Creating barplot
windows() #keeps old graphs and doesn't overwrite
```barplot(x, width = 2, space=NULL)```
windows()
```barplot(y, width=2, space=NULL)```
#make them horizontal
windows()
```barplot(x, width = 2, space=NULL, horiz = TRUE)```
windows()
```barplot(y, width=2, space=NULL, horiz = TRUE)```
#Create matrix
```w <- rbind(x,y)```
#Barplot of matrix
windows()
```barplot(w, width = 2, space = NULL)```
## Worksheet 8: chapter 5 ##
Task 1. Data set factors
- type of car involved in accidents
- type of plants
- Grades: A,B,C,D,fail
- types of food at a party
Task 2.
- plot: scatter plot values within the brackets
- weights is the vector containing the values mapped on the x-axis
- prices is the vector containing the price values on y-axis
-types: is vector containing values made into same factor category
pch=as.interger
-converts the factor values in 'types' into integers to plot onto scatterplot | 23a9178fbed5dfe1a886a027358a2243e29c23bf | [
"Markdown",
"R"
] | 5 | R | jbashar/wkst1-8.answered | fc1170114dbfc5092c2564e86d945d7d1344b6c7 | 684df9e737172ed4ed11197986fe6a7bee87bb97 | |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Dom
{
class House
{
public string Address;
public int Length;
public int WindowsAmount;
public int People;
public House(string address, int length, int windowsAmount, int people)
{
Address = address;
Length = length;
WindowsAmount = windowsAmount;
People = people;
}
public void Tax()
{
Console.WriteLine("Podatek za dom wynosi: " + Length*10);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading.Tasks;
namespace Dom
{
class Program
{
static void Main(string[] args)
{
House house1 = new House("klonowa", 50, 8, 4);
house1.Tax();
}
}
}
<file_sep># Dom
let's go
| 836a8da9b0ccf831c01020a1f093dcb6f121d804 | [
"Markdown",
"C#"
] | 3 | C# | IceBlade193/Dom | 3fea93cea9bcb9bf7ffa6d65a051e53483f60d35 | a7ed0298e6b390ef778aebbda537255599a2cc6d | |
refs/heads/master | <file_sep>require('../tree-node/tree-node');
const css = require('./tree-component.css');
const node = (scope) => `
<tree-component data='${JSON.stringify(scope.item)}'></tree-component>
`;
const label = (data) => {
return data instanceof Array ? `[${[].concat(data).join(', ')}]` : data
}
const template = (scope) => `
<style>${css}</style>
${scope.data instanceof Array
? `<ul class="tree">
${scope.data.map(item => `<li class="collapsed"><tree-node text='${label(item)}'></tree-node>${node({item})}</li>`).join('')}
</ul>`
: scope.data
}`;
class Tree extends HTMLElement {
connectedCallback(){
this.data = JSON.parse(this.getAttribute('data'));
this.render();
this.addEventListener('click', (e) => {
e.target.parentElement.classList.toggle('collapsed');
e.target.parentElement.classList.toggle('expanded');
})
}
render() {
this.innerHTML = template({data: this.data});
}
}
customElements.define('tree-component', Tree);
<file_sep># tree-component
<file_sep>require('./tree-component/tree-component');
| 0bb7a52024b446b238a8c3ac74e78733a4ccf40d | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | pankajpatel/tree-component | 48d9f47c1ff20bf0395c031728a8eacc1916fd03 | a06c0837e42ca1acee9aa9dda4bbe648cb3eb6b6 | |
refs/heads/master | <file_sep>Gem::Specification.new do |s|
s.name = "streamtagger"
s.version = "0.1.6"
s.authors = "<NAME>"
s.email = "dramsay on github"
s.summary = "Streamtagger allows you to tag the current stream playing in iTunes, storing info to a text file"
s.homepage = "http://github.com/dramsay/streamtagger"
s.description = s.summary
s.has_rdoc = false
s.files = ["README.rdoc", "Changelog", "LICENSE"]
s.executables = ['stag']
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dramsay-rubyosa>, ["0.4.0"])
else
s.add_dependency(%q<dramsay-rubyosa>, ["0.4.0"])
end
else
s.add_dependency(%q<dramsay-rubyosa>, ["0.4.0"])
end
end
<file_sep>#!/usr/bin/env ruby
require 'rubygems'
require 'rbosa'
if ENV["TAGGER_FILE"]
tagger_file = File.expand_path(ENV["TAGGER_FILE"])
else
tagger_file = File.expand_path("~/Desktop/streamtagger.txt")
end
if ARGV[0] == 'list'
print `cat #{tagger_file}`
elsif ARGV[0] == 'clear'
`> #{tagger_file}`
elsif ARGV[0] == 'info'
itunes = OSA.app("iTunes")
puts itunes.current_stream_title
else
itunes = OSA.app("iTunes")
puts itunes.current_stream_title if ENV["TAGGER_INFO"]
File.open(tagger_file, "a") do |f|
f.puts itunes.current_stream_title
end
end
| 4e983463eb6547a760feadc1ae66cdf50942a06a | [
"Ruby"
] | 2 | Ruby | dramsay/streamtagger | 2dc120f7bb1a322c5cad5f6d0c9ae14566462910 | c7b8258e3a690489640bd916d5275a4809edbd91 | |
refs/heads/master | <file_sep>import React from 'react';
import './contact_list.css';
import PropTypes from 'prop-types';
function compare(a, b){
if (a.firstName.toLowerCase() > b.firstName.toLowerCase()) return 1;
if (b.firstName.toLowerCase() > a.firstName.toLowerCase()) return -1;
if (a.lastName.toLowerCase() > b.lastName.toLowerCase()) return 1;
if (b.lastName.toLowerCase() > a.lastName.toLowerCase()) return -1;
return 0;
}
/**
* Takes a list of contacts an display it
* You can also select or remove, that will run a function in Book component
*/
const ContactList = (props) => (
<ul className="contact-list">
{props.contacts.length > 0 ? (
props.contacts.sort(compare).map((contact, index) => (
<li
key={ index }
onClick={() => props.selectContact(contact)}
className={props.selectedId === contact.id ? 'selected' : ''}
>
{contact.firstName} {contact.lastName} <button onClick={() => props.removeContact(contact.id)}>Remove</button>
</li>
))
) : (
<li>No contacts</li>
)}
</ul>
);
ContactList.propTypes = {
contacts: PropTypes.arrayOf(
PropTypes.shape({
id: PropTypes.number.isRequired,
firstName: PropTypes.string.isRequired,
lastName: PropTypes.string.isRequired
}).isRequired
).isRequired,
selectedId: PropTypes.number,
selectContact: PropTypes.func.isRequired,
removeContact: PropTypes.func.isRequired
};
export default ContactList;
<file_sep>export const initialContacts = [
{ firstName: 'Obi-Wan', lastName: 'Kenobi', id: Math.random() },
{ firstName: 'Leia', lastName: 'Organa', id: Math.random() },
{ firstName: 'Han', lastName: 'Solo', id: Math.random() },
{ firstName: 'Luke', lastName: 'Skywalker', id: Math.random() },
{ firstName: 'Darth', lastName: 'Vader', id: Math.random() },
{ firstName: 'Master', lastName: 'Yoda', id: Math.random() },
];
<file_sep>import React from 'react';
import './contact_form.css';
import PropTypes from 'prop-types';
import darth from './darth.ico';
import c3p0 from './C3PO-icon.png';
/**
* This component fill the contact data and callback `addContact` or `editContact` depending if you passed `selectedContact`
*/
class ContactForm extends React.Component {
initialState = {
id: null, firstName: '', lastName: '', errorMessage: '',
};
constructor(props) {
super(props);
const selectedContact = props.selectedContact;
this.state = (selectedContact && {
id: selectedContact.id,
firstName: selectedContact.firstName,
lastName: selectedContact.lastName,
errorMessage: '',
}) || this.initialState;
}
/**
* Filling contact info when a contact is selected
*/
componentWillReceiveProps(nextProps) {
const selectedContact = nextProps.selectedContact;
if (selectedContact !== this.props.selectedContact) {
this.setState({
id: selectedContact.id, firstName: selectedContact.firstName, lastName: selectedContact.lastName,
});
}
}
handleInputChange = (e) => {
const { name, value } = e.target;
this.setState({
[name]: value,
});
};
handleSubmit = (e) => {
e.preventDefault();
if (!this.state.firstName || !this.state.lastName) {
this.showErrorMessage();
return;
}
if (this.state.id) {
this.props.editContact(this.state.id, this.state.firstName, this.state.lastName);
} else {
this.props.addContact(this.state.firstName, this.state.lastName);
this.setState(this.initialState); // Re-initialising the state after adding a new contact
}
};
showErrorMessage = () => {
this.setState({ errorMessage: 'First name & Last name must be defined' });
setTimeout(() => {
this.setState({ errorMessage: '' });
}, 3000);
};
render() {
return (
<form className="contact-form" onSubmit={this.handleSubmit}>
<h2>Contact Form</h2>
<div className="field">
<label><img width="20" height="20" src={darth} alt="address book" /> First Name: </label>
<input type="text" name="firstName" placeholder="e.g: Eder"
value={this.state.firstName} onChange={this.handleInputChange} />
</div>
<div className="field">
<label><img width="20" height="20" src={c3p0} alt="address book" /> Last Name: </label>
<input type="text" name="lastName" placeholder="e.g: Bodelon"
value={this.state.lastName} onChange={this.handleInputChange} />
</div>
<button>{!this.state.id ? 'Add new' : 'Save'} contact</button>
<div className="error">{this.state.errorMessage}</div>
</form>
)
}
}
ContactForm.propTypes = {
selectedContact: PropTypes.object,
addContact: PropTypes.func.isRequired,
editContact: PropTypes.func.isRequired
};
export default ContactForm
<file_sep>import React from 'react';
import ReactDOM from 'react-dom';
import { Provider } from 'react-redux';
import { combineReducers, createStore } from 'redux';
import { BrowserRouter, Route } from 'react-router-dom';
import * as reducers from '../reducers';
import Book from './book';
const store = createStore(combineReducers(reducers));
it('renders without crashing', () => {
const div = document.createElement('Book');
ReactDOM.render(
<Provider store={store}>
<BrowserRouter>
<Route path="/" component={Book}/>
</BrowserRouter>
</Provider>
, div);
ReactDOM.unmountComponentAtNode(div);
});
<file_sep># React Book Address Example
Basic book address created in `React`, `Redux`. Just for fun.
<file_sep>import { matchesText, makeContactFromAction } from './utils';
import { addContact, editContact } from '../actions/book_actions';
it('#matchesText should work', () => {
expect(matchesText('text', 't')).toBe(true);
expect(matchesText('text', 'bla')).toBe(false);
expect(matchesText('text', '')).toBe(true);
});
it('#makeContactFromAction should work', () => {
const addContactAction = addContact('test', 'test');
expect(makeContactFromAction(addContactAction)).toEqual(jasmine.objectContaining({
firstName: 'test', lastName: 'test',
}));
expect(makeContactFromAction(editContact(1, 'test', 'test'))).toEqual({
firstName: 'test', lastName: 'test', id: 1,
});
});
<file_sep>import * as types from '../constants/action_types';
import * as actions from './book_actions';
describe('Book actions', () => {
const mockContact = {
id: 1,
firstName: 'eder',
lastName: 'bodelon',
};
it(`selectContact should create ${types.SELECT_CONTACT} action`, () => {
expect(actions.selectContact(mockContact.id)).toEqual({
type: types.SELECT_CONTACT,
id: mockContact.id,
})
});
it(`addContact should create ${types.ADD_CONTACT} action`, () => {
expect(actions.addContact(mockContact.firstName, mockContact.lastName)).toEqual({
type: types.ADD_CONTACT,
firstName: mockContact.firstName,
lastName: mockContact.lastName,
})
});
it(`removeContact should create ${types.REMOVE_CONTACT} action`, () => {
expect(actions.removeContact(mockContact.id)).toEqual({
type: types.REMOVE_CONTACT,
id: mockContact.id,
});
});
it(`editContact should create ${types.EDIT_CONTACT} action`, () => {
expect(actions.editContact(mockContact.id, mockContact.firstName, mockContact.lastName)).toEqual({
type: types.EDIT_CONTACT,
id: mockContact.id,
firstName: mockContact.firstName,
lastName: mockContact.lastName,
});
});
});
<file_sep>/**
* Helper that matches a text inside a string
* @param text
* @param match
* @returns {boolean}
*/
export function matchesText(text, match) {
return text.toLowerCase().indexOf(match.toLowerCase()) > -1;
}
/**
* Helper function that creates a Contact object from action
* @see ADD_CONTACT | EDIT_CONTACT
* @param action(ADD_CONTACT || EDIT_CONTACT)
* @returns {{firstName: string, lastName: string, id: number}}
*/
export function makeContactFromAction(action) {
return {
firstName: action.firstName,
lastName: action.lastName,
id: action.id || Math.random(),
}
}
<file_sep>export const ADD_CONTACT = 'ADD_CONTACT';
export const REMOVE_CONTACT = 'REMOVE_CONTACT';
export const EDIT_CONTACT = 'EDIT_CONTACT';
export const SELECT_CONTACT = 'SELECT_CONTACT';
<file_sep>export { default as book } from './book_reducer';
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import './book.css';
import { addContact, removeContact, editContact, selectContact } from '../actions/book_actions';
import ContactList from '../components/contact_list';
import ContactForm from '../components/contact_form';
import Search from '../components/search';
import { matchesText } from '../utils/utils';
import {Link} from "react-router-dom";
class Book extends React.Component {
constructor(props) {
super(props);
this.state = {
search: '',
};
}
search(e) {
e.preventDefault();
const text = e.target.value;
this.setState({ search: text });
}
render() {
const contacts = this.props.book.contacts.filter((c) => {
return matchesText(c.firstName, this.state.search) || matchesText(c.lastName, this.state.search);
});
const selectedContact = this.props.book.selectedContact;
return (
<div className="flex-container book">
<section>
<Link to="/">Go to Readme</Link>
<h2>Address Book</h2>
<Search value={this.state.search} search={(e) => this.search(e)}/>
<ContactList
contacts={contacts}
selectedId={selectedContact && selectedContact.id}
selectContact={(contact) => this.props.dispatch(selectContact(contact.id))}
removeContact={(id) => this.props.dispatch(removeContact(id))}
/>
</section>
<section className='contact-form-container'>
<ContactForm
addContact={(firstName, lastName) => this.props.dispatch(addContact(firstName, lastName))}
editContact={(id, firstName, lastName) => this.props.dispatch(editContact(id, firstName, lastName))}
selectedContact={selectedContact}
/>
</section>
</div>
);
}
}
export default connect(state => ({
book: state.book
}))(Book);
| 46291e64bb2903c82cbf8f30f95d73b8d894bf8e | [
"JavaScript",
"Markdown"
] | 11 | JavaScript | deftone42/react-book-address | ef2764ee155520cef7c099d2bd01956d3acd3d57 | d17688910a59ee3a1ecaf23cbf0cef54cdaed576 | |
refs/heads/master | <repo_name>Aacharya1/automation_framework<file_sep>/src/main/java/page/LoginPage.java
package page;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
//import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.How;
public class LoginPage {
WebDriver driver;
public LoginPage(WebDriver Driver) {
this.driver = Driver;
}
// Library of elements
// WebElement USERNAME_FIELD_ELEMENT = driver.findElement(By.id("accg"));
// By PASSWORD_FIELD_LOCATOR = By.id("bbb");
@FindBy(how = How.ID, using = "username")
WebElement USERNAME_FIELD;
@FindBy(how = How.ID, using = "password")
WebElement PASSWORD_FIELD;
@FindBy(how = How.NAME, using = "login")
WebElement SUBMIT_FIELD;
//METHODS TO INTERACT WITH THE ELEMENT
public void enterUsername(String userName){
USERNAME_FIELD.sendKeys(userName);
}
public void enterPassword(String password){
PASSWORD_FIELD.sendKeys(password);
}
public void clickOnSignInButton(){
SUBMIT_FIELD.click();
}
}
| fb079f7dada91ebf62f72b2d7c40eca561889424 | [
"Java"
] | 1 | Java | Aacharya1/automation_framework | 5452146b41fbac19aec15eedc8d28d8fc0fb34e4 | 911f368a69f9a0e740413a433a1ed916a258ddc9 | |
refs/heads/master | <repo_name>bizarrecoding/PokemonGOU<file_sep>/app/src/main/java/com/example/herik21/pokemongo/TeamActivity.java
package com.example.herik21.pokemongo;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import com.raizlabs.android.dbflow.sql.language.Select;
import java.util.List;
import java.util.Objects;
public class TeamActivity extends AppCompatActivity {
public ListView lv;
public CustomAdapter cAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_team);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
Trainer tr= new Select().from(Trainer.class).querySingle();
getSupportActionBar().setTitle(tr.name+"'s team");
lv = (ListView)findViewById(R.id.listView);
List<TeamPokemon> myTeam = new Select().from(TeamPokemon.class).orderBy(TeamPokemon_Table.main,false).queryList();
cAdapter = new CustomAdapter(this,myTeam);
Log.d("size",""+cAdapter.getCount());
lv.setAdapter(cAdapter);
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
CharSequence request[] = new CharSequence[3];
request[0]="Pokeballs";
request[1]="Potions";
request[2]="Superpotions";
AlertDialog.Builder adb = new AlertDialog.Builder(TeamActivity.this);
adb.setTitle("Request");
adb.setItems(request, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Trainer tr= new Select().from(Trainer.class).querySingle();
switch (which){
case 0:
tr.pokeball+=5;
break;
case 1:
tr.potion+=5;
break;
case 2:
tr.superpotion+=5;
break;
}
tr.save();
Snackbar.make(TeamActivity.this.lv, "Request done", Snackbar.LENGTH_LONG)
.setAction("Action", null).show();
}
}).create().show();
}
});
lv.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Intent i = new Intent(TeamActivity.this, PokemonActivity.class);
if(cAdapter.team.get(position)!=null){
TeamPokemon tpk = cAdapter.team.get(position);
i.putExtra("pokemon",tpk);
i.putExtra("base",new Select().from(Pokemon.class).where(Pokemon_Table.id.is(tpk.basePokemon.id)).querySingle());
if(getIntent().getBooleanExtra("change",false)){
i.putExtra("change",true);
}
startActivityForResult(i, 1);
}
}
});
}
public void onActivityResult(int requestCode, int resultCode, Intent data){
cAdapter.setTeam(new Select().from(TeamPokemon.class).orderBy(TeamPokemon_Table.main,false).queryList());
cAdapter.notifyDataSetChanged();
Intent parent = getIntent();
if(parent.getBooleanExtra("change",false)){
Intent in = getIntent();
in.putExtra("newlead",data.getStringExtra("lead"));
setResult(Activity.RESULT_OK,in);
finish();
}
}
}
<file_sep>/app/src/main/java/com/example/herik21/pokemongo/CustomAdapter.java
package com.example.herik21.pokemongo;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import java.io.InputStream;
import java.net.URL;
import java.util.List;
/**
* Created by Herik21 on 19/09/2016.
*/
public class CustomAdapter extends BaseAdapter {
public List<TeamPokemon> team;
public Context ctx;
public CustomAdapter(Context context,List<TeamPokemon> myTeam){
this.team = myTeam;
this.ctx = context;
}
public void setTeam(List<TeamPokemon> team) {
this.team = team;
}
@Override
public int getCount() {
return team.size();
}
@Override
public Object getItem(int position) {
return team.get(position);
}
@Override
public long getItemId(int position) {
return team.get(position).id;
}
@Override
public View getView(int position, View view, ViewGroup parent) {
TeamPokemon tpk = team.get(position);
if(view==null) {
LayoutInflater linf = (LayoutInflater) ctx.getSystemService(ctx.LAYOUT_INFLATER_SERVICE);
view = linf.inflate(R.layout.pkmnrow, null);
}
ImageView icon = (ImageView) view.findViewById(R.id.iconPk);
new DownloadImageTask(icon).execute(tpk.basePokemon.imgFront);
TextView name = (TextView) view.findViewById(R.id.name);
name.setText(tpk.basePokemon.name);
TextView chp = (TextView) view.findViewById(R.id.current);
chp.setText(tpk.currenthp+"");
TextView thp = (TextView) view.findViewById(R.id.total);
thp.setText(tpk.hp+"");
return view;
}
private class DownloadImageTask extends AsyncTask<String, Void, Bitmap> {
private ImageView target;
public DownloadImageTask(ImageView img) {
target = img;
}
protected Bitmap doInBackground(String... urls) {
String imageURL = urls[0];
Bitmap image = null;
try {
InputStream in = new URL(imageURL).openStream();
image = BitmapFactory.decodeStream(in);
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
return image;
}
protected void onPostExecute(Bitmap result) {
target.setImageBitmap(result);
}
}
}
<file_sep>/app/src/main/java/com/example/herik21/pokemongo/PokemonActivity.java
package com.example.herik21.pokemongo;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.raizlabs.android.dbflow.sql.language.SQLite;
import com.raizlabs.android.dbflow.sql.language.Select;
public class PokemonActivity extends AppCompatActivity {
public TextView pname,hp,atk,def,type;
public ImageView pkimg;
public Button leader;
public TeamPokemon tpk;
public Pokemon base;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pokemon);
leader= (Button)findViewById(R.id.leader);
pname = (TextView)findViewById(R.id.pkname);
type = (TextView)findViewById(R.id.type);
hp = (TextView)findViewById(R.id.hp);
atk = (TextView)findViewById(R.id.atk);
def = (TextView)findViewById(R.id.def);
Intent parent = getIntent();
tpk = (TeamPokemon) parent.getSerializableExtra("pokemon");
base = (Pokemon) parent.getSerializableExtra("base");
Log.d("name",base.name);
hp.setText(tpk.currenthp+"/"+tpk.hp);
atk.setText(tpk.atk+"/"+base.atk_max);
def.setText(tpk.def+"/"+base.def_max);
pname.setText(base.name);
pkimg = (ImageView)findViewById(R.id.pkimg);
if(tpk.main){
leader.setEnabled(false);
leader.setText("Leader");
}
if(tpk.currenthp<1){
leader.setEnabled(false);
}
Glide.with(this).load(base.imgFront).into(pkimg);
type.setText(base.type);
int color=0;
switch (base.type.toLowerCase()){
case "fire":
color = android.R.color.holo_red_light;
break;
case "water":
color = android.R.color.holo_blue_dark;
break;
case "grass":
color = android.R.color.holo_green_dark;
break;
case "electric":
color = R.color.yellow;
break;
case "dragon":
color = R.color.purple;
break;
case "psychic":
color = R.color.pink;
break;
case "ghost":
color = android.R.color.holo_purple;
break;
default:
color = android.R.color.black;
break;
}
type.setTextColor(getResources().getColor(color));
if(tpk.currenthp==tpk.hp){
findViewById(R.id.heal).setEnabled(false);
}
}
public void refreshUI(){
hp.setText(tpk.currenthp+"/"+tpk.hp);
atk.setText(tpk.atk+"/"+base.atk_max);
def.setText(tpk.def+"/"+base.def_max);
}
public void onClickHeal(View v){
Trainer tr = new Select().from(Trainer.class).querySingle();
int potion = tr.potion;
int superpotion = tr.superpotion;
CharSequence potions[];
if(potion>0 || superpotion>0){
if(potion>0 && superpotion>0) {
potions = new CharSequence[2];
potions[0] = "Potion x" + potion;
potions[1] = "Superpotion x" + superpotion;
}else{
potions = new CharSequence[1];
if(potion==0){
potions[0] ="Superpotion x"+superpotion;
}
if(superpotion==0){
potions[0] ="Potion x"+potion;
}
}
final AlertDialog.Builder adb = new AlertDialog.Builder(this);
adb.setTitle("Use which potion?");
adb.setItems(potions, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Trainer tr = new Select().from(Trainer.class).querySingle();
if(which==0){
tpk.currenthp+=20;
SQLite.update(Trainer.class).set(Trainer_Table.potion.eq(tr.potion-1)).execute();
}else{
tpk.currenthp+=50;
SQLite.update(Trainer.class).set(Trainer_Table.potion.eq(tr.superpotion-1)).execute();
}
if(tpk.currenthp>=tpk.hp){
tpk.currenthp=tpk.hp;
findViewById(R.id.heal).setEnabled(false);
}
//tr.save();
tpk.save();
refreshUI();
}
}).create().show();
}else{
v.setEnabled(false);
}
}
public void onClickLeader(View v){
TeamPokemon oldlead = new Select().from(TeamPokemon.class).where(TeamPokemon_Table.main.is(true)).querySingle();
if(oldlead!=null) {
oldlead.main = false;
oldlead.save();
}
TeamPokemon newlead = new Select().from(TeamPokemon.class).where(TeamPokemon_Table.id.is(tpk.id)).querySingle();
newlead.main = true;
newlead.save();
leader.setEnabled(false);
leader.setText("Leader");
if(getIntent().getBooleanExtra("change",false)){
Intent in = getIntent();
in.putExtra("newlead","go");
setResult(Activity.RESULT_OK,in);
finish();
}
}
}
<file_sep>/app/src/main/java/com/example/herik21/pokemongo/WildMarker.java
package com.example.herik21.pokemongo;
import com.google.android.gms.maps.model.Marker;
/**
* Created by Herik21 on 16/09/2016.
*/
public class WildMarker {
public int pkid,id;
public double[] loc;
public boolean visible;
public Marker mk;
public WildMarker(int pid,double[] markerloc,boolean visible,int listid){
this.pkid=pid;
this.id= listid;
this.loc=markerloc;
this.visible=visible;
}
public void setMarker(Marker marker){
this.mk=marker;
}
}
<file_sep>/app/src/main/java/com/example/herik21/pokemongo/TeamPokemon.java
package com.example.herik21.pokemongo;
import com.raizlabs.android.dbflow.annotation.Column;
import com.raizlabs.android.dbflow.annotation.ForeignKey;
import com.raizlabs.android.dbflow.annotation.PrimaryKey;
import com.raizlabs.android.dbflow.annotation.Table;
import com.raizlabs.android.dbflow.structure.BaseModel;
import java.io.Serializable;
@Table(database = AppDatabase.class)
public class TeamPokemon extends BaseModel implements Serializable {
@Column
@PrimaryKey (autoincrement = true )
public long id;
@Column
@ForeignKey(saveForeignKeyModel = false)
public Pokemon basePokemon;
@Column
public int hp;
@Column
public int currenthp;
@Column
public int atk;
@Column
public int def;
@Column
public boolean main;
public TeamPokemon(){}
public TeamPokemon(Pokemon pk, int hp, int atk, int def, boolean main){
this.basePokemon=pk;
this.hp=hp;
this.currenthp=hp;
this.atk=atk;
this.def=def;
this.main=main;
}
}
<file_sep>/app/src/main/java/com/example/herik21/pokemongo/MapsActivity.java
package com.example.herik21.pokemongo;
import android.Manifest;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentSender;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.location.Location;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentActivity;
import android.os.Bundle;
import android.support.v7.app.AlertDialog;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ImageView;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.location.LocationSettingsRequest;
import com.google.android.gms.location.LocationSettingsResult;
import com.google.android.gms.location.LocationSettingsStatusCodes;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptor;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.raizlabs.android.dbflow.config.FlowConfig;
import com.raizlabs.android.dbflow.config.FlowManager;
import com.raizlabs.android.dbflow.sql.language.Select;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Random;
public class MapsActivity extends FragmentActivity implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener,
LocationListener, ResultCallback<LocationSettingsResult>,OnMapReadyCallback {
private static final long UPDATE_INTERVAL_IN_MILLISECONDS = 1000;
private static final long FASTEST_UPDATE_INTERVAL_IN_MILLISECONDS = 500 ;
private static final int MY_PERMISSIONS_REQUEST_LOCATION = 1;
private static final int REQUEST_CHECK_SETTINGS = 123;
private static final double STOPDISTANCE = 10;
private GoogleApiClient mApiClient;
private LocationRequest mLocationRequest;
private GoogleMap gmap;
private Marker trainer;
private ProgressDialog pDialog;
private ArrayList<WildMarker> wildMarkers = new ArrayList<>();
private ArrayList<Marker> stops = new ArrayList<>();
private TextView log;
private LatLng mLocation;
private boolean moveCam, near;
private boolean doubleBackToExitPressedOnce = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_maps);
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
moveCam = true;
near = true;
log = (TextView)findViewById(R.id.log);
FlowManager.init(new FlowConfig.Builder(this).openDatabasesOnInit(true).build());
mApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(LocationServices.API)
.build();
mLocationRequest = new LocationRequest();
mLocationRequest.setInterval(UPDATE_INTERVAL_IN_MILLISECONDS);
mLocationRequest.setFastestInterval(FASTEST_UPDATE_INTERVAL_IN_MILLISECONDS);
mLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY);
onGO();
ConnectivityManager connMgr = (ConnectivityManager) getSystemService(MapsActivity.CONNECTIVITY_SERVICE);
NetworkInfo ni = connMgr.getActiveNetworkInfo();
if(!ni.isConnected()){
Toast.makeText(this,"Not Connected to Internet",Toast.LENGTH_LONG).show();
}
}
public void append(String entry){
if(entry.equals("wild pokemons near you")){
if(near) {
String logText = log.getText().toString();
log.setText(logText + "\n" + entry);
}
near=false;
}else{
String logText = log.getText().toString();
log.setText(logText+"\n"+entry);
near=true;
}
((ScrollView)findViewById(R.id.scrollView)).fullScroll(ScrollView.FOCUS_DOWN);
}
public void onGO(){
LocationSettingsRequest.Builder builder = new LocationSettingsRequest.Builder();
builder.addLocationRequest(mLocationRequest);
LocationSettingsRequest nLocationSettingsRequest = builder.build();
PendingResult<LocationSettingsResult> result =
LocationServices.SettingsApi.checkLocationSettings(
mApiClient,
nLocationSettingsRequest);
result.setResultCallback(this);
}
@Override
public void onMapReady(GoogleMap googleMap) {
append("Welcome trainer");
gmap = googleMap;
gmap.setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(final Marker marker) {
Log.d("MARKERS","touched id"+marker.getTag()+" title "+marker.getTitle());
if(marker.getTitle()!=null){
if(marker.getTitle().equals("Trainer")) {
Intent i = new Intent(MapsActivity.this,TeamActivity.class);
startActivity(i);
}else if (marker.getTitle().equals("Pokestop")){
if(calcDist(marker.getPosition(),mLocation,STOPDISTANCE)){
LayoutInflater factory = LayoutInflater.from(MapsActivity.this);
View view = factory.inflate(R.layout.stoprewards,null);
Random r = new Random();
final int pkb = r.nextInt(5)+1;
final int pot = r.nextInt(5)+1;
final int spot = r.nextInt(2)+1;
((TextView)view.findViewById(R.id.pokeballs)).setText("Pokeballs x"+pkb);
((TextView)view.findViewById(R.id.potions)).setText("Potions x"+pot);
((TextView)view.findViewById(R.id.superpotions)).setText("Spuerpotions x"+spot);
new AlertDialog.Builder(MapsActivity.this)
.setTitle("PokeStop")
.setView(view)
.setIcon(R.drawable.pkcenter)
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
Trainer tr = new Select().from(Trainer.class).querySingle();
tr.pokeball+=pkb;
tr.potion+=pot;
tr.superpotion+=spot;
tr.save();
Toast.makeText(MapsActivity.this, "Items acquired", Toast.LENGTH_SHORT).show();
stops.remove(marker);
marker.remove();
}})
.setNegativeButton(android.R.string.no, null)
.create().show();
if(stops.isEmpty()){
setpkStops(mLocation.latitude,mLocation.longitude);
}
return true;
}else{
Toast.makeText(MapsActivity.this,"Pokestop too far",Toast.LENGTH_LONG).show();
return false;
}
}else {
int mid = (int)marker.getTag();
Log.d("Marker","touched mk"+mid);
Intent i = new Intent(MapsActivity.this, BattleActivity.class);
TeamPokemon starter = new Select().from(TeamPokemon.class).where(TeamPokemon_Table.main.is(true)).and(TeamPokemon_Table.currenthp.greaterThan(0)).querySingle();
if (starter == null) {
starter = new Select().from(TeamPokemon.class).where(TeamPokemon_Table.currenthp.greaterThan(0)).querySingle();
if (starter != null) {
starter.main = true;
starter.save();
} else {
return false;
}
}
Pokemon wild = new Select().from(Pokemon.class).where(Pokemon_Table.name.is(marker.getTitle())).querySingle();
Log.d("starter", starter.basePokemon.name);
i.putExtra("pokemon", starter);
i.putExtra("base", starter.basePokemon);
i.putExtra("wild", wild);
if(mid!=-2) {
WildMarker wm = wildMarkers.get(mid);
if (wm.mk != null) {
wm.mk.setVisible(false);
wm.mk.remove();
}
wm.visible = false;
wm.mk = null;
wm.id = -2;
}else{
marker.setVisible(false);
marker.remove();
}
if(marker.getTitle()!= null){
marker.setVisible(false);
marker.remove();
}
Log.d("wmarker","marker mk"+mid+"released");
marker.remove();
i.putExtra("markerid",mid);
startActivityForResult(i, 1);
return true;
}
}else{
marker.setVisible(false);
int mid = (int)marker.getTag();
if(mid!=-2) {
wildMarkers.get(mid).mk=null;
}
}
return false;
}
});
if (ActivityCompat.checkSelfPermission(MapsActivity.this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(MapsActivity.this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MapsActivity.this,
new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
MY_PERMISSIONS_REQUEST_LOCATION);
return;
}else{
gmap.setMyLocationEnabled(true);
gmap.setOnMyLocationButtonClickListener(new GoogleMap.OnMyLocationButtonClickListener() {
@Override
public boolean onMyLocationButtonClick() {
if (ActivityCompat.checkSelfPermission(MapsActivity.this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(MapsActivity.this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MapsActivity.this,
new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
MY_PERMISSIONS_REQUEST_LOCATION);
return false;
}
Location loc = LocationServices.FusedLocationApi.getLastLocation(mApiClient);
LatLng myLocation = new LatLng(loc.getLatitude(),loc.getLongitude());
updateUI(myLocation,moveCam);
return false;
}
});
try{
LocationServices.FusedLocationApi.requestLocationUpdates(mApiClient,mLocationRequest,this);
Location loc = LocationServices.FusedLocationApi.getLastLocation(mApiClient);
LatLng myLocation = new LatLng(loc.getLatitude(), loc.getLongitude());
new HttpAsyncTask(loc.getLatitude(),loc.getLongitude()).execute();
updateUI(myLocation,moveCam);
}catch (Exception ex){
Log.d("GMAPS","no location, api restricted");
}
}
}
public void setpkStops(double lat, double lng){
for(int i=0;i<8;i++){
Double ltd,longtd;
ltd = lat+ (Math.random()*((0.015)))-0.0075;
longtd = lng+ (Math.random()*((0.015)))-0.0075;
try{
Marker stop = gmap.addMarker(new MarkerOptions()
.position(new LatLng(ltd,longtd))
.title("Pokestop")
.icon(BitmapDescriptorFactory.fromResource(R.drawable.stop))
);
stops.add(stop);
}catch(Exception e){
Log.d("Sht Happened",e.getLocalizedMessage());
}
}
}
public static String getStops(double lat, double lng){
String response = "";
try{
URL url = new URL("http://192.168.127.12/function3.php?lat="+lat+"&lng="+lng);
URLConnection uc = url.openConnection();
BufferedReader in = new BufferedReader(new InputStreamReader(uc.getInputStream()));
String result;
while ((result = in.readLine())!=null){
Log.d("HTTPGET","result = "+result);
response = result;
}
in.close();
return response;
}catch (Exception ex){
Log.d("HTTPGET",ex.getLocalizedMessage());
return null;
}
}
public void updateUI(LatLng location, boolean move) {
gmap.setMaxZoomPreference(19.5f);
gmap.setMinZoomPreference(16.5f);
if(move){
moveCam = false;
gmap.moveCamera(CameraUpdateFactory.newLatLngZoom(location, 18));
}
if(trainer!=null){
trainer.setPosition(location);
}else{
setpkStops(location.latitude,location.longitude);
trainer = gmap.addMarker(new MarkerOptions()
.icon(BitmapDescriptorFactory.fromResource(R.mipmap.ic_pkmn_loc))
.position(location)
.title("Trainer"));
}
mLocation = location;
setWildPokemons();
}
public void setWildPokemons(){
for(WildMarker wMarker : wildMarkers) {
if(wMarker.mk==null && wMarker.id!=-2) {
//wMarker.visible=true;
wMarker.mk = gmap.addMarker(new MarkerOptions()
.position(new LatLng(wMarker.loc[0],wMarker.loc[1]))
.visible(false));
wMarker.mk.setTag(wMarker.id);
if(wMarker.mk.getTitle()!=null){
wMarker.mk.setVisible(true);
Log.d("MARKERS-S","new marker visible id="+wMarker.id);
}
Log.d("MARKERS-S","new marker id="+wMarker.id);
}
if (wMarker.visible && wMarker.mk!=null && wMarker.id!=-2) {
wMarker.mk.setVisible(true);
Pokemon pk = new Select().from(Pokemon.class).where(Pokemon_Table.id.is(wMarker.pkid)).querySingle();
if(pk!=null) {
wMarker.mk.setPosition(new LatLng(wMarker.loc[0], wMarker.loc[1]));
wMarker.mk.setTitle(pk.name);
new MarkerIconAsyncTask(wMarker.mk).execute(pk.imgFront);
}
Log.d("MARKERS-S","marker visible id="+wMarker.id);
}else if(wMarker.mk!=null && !wMarker.visible){
wMarker.mk.setVisible(false);
wMarker.visible=false;
}else{
wMarker.visible=false;
}
}
}
public void refreshMarkers(){
for(WildMarker wMarker : wildMarkers) {
if (false && wMarker.visible && wMarker.mk!=null && wMarker.mk.getTitle()!=null) {
wMarker.mk.setVisible(true);
wMarker.mk.remove();
wMarker.id=-2;
wMarker.mk=null;
}
}
}
@Override
protected void onStart() {
super.onStart();
if (mApiClient != null) {
mApiClient.connect();
Toast.makeText(this, "connected", Toast.LENGTH_SHORT).show();
}
}
@Override
protected void onResume() {
super.onResume();
mApiClient.connect();
}
@Override
protected void onPause() {
mApiClient.disconnect();
super.onPause();
}
@Override
public void onConnected(@Nullable Bundle bundle) {}
@Override
public void onConnectionSuspended(int i) {}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {}
@Override
public void onLocationChanged(Location location) {
LatLng loc = new LatLng(location.getLatitude(),location.getLongitude());
updateUI(loc,moveCam);
new DistanceAsyncTask(loc).execute();
}
@Override
public void onResult(@NonNull LocationSettingsResult locationSettingsResult) {
final Status status = locationSettingsResult.getStatus();
Log.d("GPS", "onResult" + status.getStatusCode());
switch (status.getStatusCode()) {
case LocationSettingsStatusCodes.SUCCESS:
if (mApiClient.isConnected()) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
MY_PERMISSIONS_REQUEST_LOCATION);
return;
}
gmap.setMyLocationEnabled(true);
gmap.setOnMyLocationButtonClickListener(new GoogleMap.OnMyLocationButtonClickListener() {
@Override
public boolean onMyLocationButtonClick() {
if (ActivityCompat.checkSelfPermission(MapsActivity.this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(MapsActivity.this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MapsActivity.this,
new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
MY_PERMISSIONS_REQUEST_LOCATION);
return false;
}
Location loc = LocationServices.FusedLocationApi.getLastLocation(mApiClient);
LatLng myLocation = new LatLng(loc.getLatitude(),loc.getLongitude());
updateUI(myLocation,moveCam);
int i =0;
for (WildMarker wm : wildMarkers){
if(wm.mk!=null && wm.mk.getTitle()!=null) {
i++;
wm.mk.setVisible(true);
}
}
Log.d("MARKERS-Click","current wild pkmons "+i);
return false;
}
});
LocationServices.FusedLocationApi.requestLocationUpdates(mApiClient, mLocationRequest, this);
Location loc = LocationServices.FusedLocationApi.getLastLocation(mApiClient);
new HttpAsyncTask(loc.getLatitude(),loc.getLongitude()).execute();
LatLng myLocation = new LatLng(loc.getLatitude(),loc.getLongitude());
updateUI(myLocation,moveCam);
}
break;
case LocationSettingsStatusCodes.RESOLUTION_REQUIRED:
try {
status.startResolutionForResult(MapsActivity.this, REQUEST_CHECK_SETTINGS);
} catch (IntentSender.SendIntentException e) {
e.printStackTrace();
}
break;
case LocationSettingsStatusCodes.SETTINGS_CHANGE_UNAVAILABLE:
Toast.makeText(this, "LocationSettingsStatusCodes.SETTINGS_CHANGE_UNAVAILABLE", Toast.LENGTH_LONG).show();
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(resultCode == RESULT_OK){
if(requestCode == 1){
Pokemon newPk = (Pokemon) data.getSerializableExtra("captured");
int i = data.getIntExtra("markerid",-2);
if(i!=-2){
WildMarker wm= wildMarkers.get(i);
if(wm.mk!=null){
Log.d("MARKERS-onResults", "removing marker "+wm.mk.getTitle());
wm.mk.setVisible(false);
wm.mk.remove();
}
wm.mk=null;
wm.id=-2;
wm.visible=false;
}
if(newPk!=null){
long size = new Select().from(TeamPokemon.class).count();
Log.d("SIZE","before: "+size);
int newHP = data.getIntExtra("hp",20);
int newAtk = data.getIntExtra("atk",20);
int newDef = data.getIntExtra("def",20);
Pokemon wild = (Pokemon) data.getSerializableExtra("bwild");
TeamPokemon newTeam = new TeamPokemon(wild,newHP,newAtk,newDef,false);
newTeam.save();
Log.d("SIZE","after: "+size);
append(wild.name+" Captured!");
}
if(!data.getBooleanExtra("lose",true)){
TeamPokemon tpk = new Select().from(TeamPokemon.class).where(TeamPokemon_Table.main.is(true)).querySingle();
Random r = new Random();
int hpbonus = 2+r.nextInt(4);
int atkbonus = 2+r.nextInt(4);
int defbonus = 2+r.nextInt(4);
if(tpk.basePokemon.hp_max > (tpk.hp + hpbonus)) {
tpk.hp += hpbonus;
}else{
tpk.hp += hpbonus;
}
tpk.currenthp += hpbonus;
if(tpk.currenthp>tpk.hp){
tpk.currenthp=tpk.hp;
}
if(tpk.basePokemon.atk_max > (tpk.atk + atkbonus)) {
tpk.atk += atkbonus;
}else{
tpk.atk = tpk.basePokemon.atk_max;
}
if(tpk.basePokemon.def_max > (tpk.def + defbonus)) {
tpk.def += defbonus;
}else{
tpk.def = tpk.basePokemon.def_max;
}
tpk.save();
checkEvo(tpk);
}
//updateVisibility();
}else{
//code
}
int wildcount =0;
for (WildMarker wm : wildMarkers){
if(wm.mk!=null && wm.mk.getTitle()!=null) {
wildcount++;
wm.mk.setVisible(true);
}else{
if(wm.mk!=null) {
wm.mk.remove();
wm.visible=false;
wm.id=-2;
}
}
}
Log.d("MARKERS-afterbatle","current wild pkmons "+wildcount);
if(wildcount==0){
requestNewLocation();
updateVisibility();
setWildPokemons();
}
}
}
public void checkEvo(TeamPokemon tpk){
if (tpk.atk == tpk.basePokemon.atk_max && tpk.def == tpk.basePokemon.def_max && tpk.basePokemon.ev_id != -1){
Pokemon evo = new Select().from(Pokemon.class).where(Pokemon_Table.id.is(tpk.basePokemon.ev_id)).querySingle();
String prename = tpk.basePokemon.name;
tpk.basePokemon=evo;
tpk.save();
LayoutInflater factory = LayoutInflater.from(this);
View view = factory.inflate(R.layout.evolution,null);
Glide.with(this).load(evo.imgFront).into((ImageView) view.findViewById(R.id.evopic));
((TextView)view.findViewById(R.id.message)).setText(prename+" evolved into "+evo.name);
AlertDialog.Builder adb = new AlertDialog.Builder(this);
adb.setTitle("Congratulations");
adb.setView(view);
adb.setPositiveButton("Continue", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
adb.create().show();
}
}
public boolean calcDist(LatLng loc1, LatLng loc2,final double BOUNDARY){
double dx = Math.pow(loc1.latitude - loc2.latitude, 2);
double dy = Math.pow(loc1.longitude - loc2.longitude, 2);
double distance = Math.sqrt(dx+dy)*10000;
return distance<=BOUNDARY;
}
@Override
public void onBackPressed(){
if (doubleBackToExitPressedOnce){
super.onBackPressed();
return;
}
this.doubleBackToExitPressedOnce=true;
Toast.makeText(this,"Please click BACK again to exit",Toast.LENGTH_SHORT).show();
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
doubleBackToExitPressedOnce = false;
}
},2000);
}
private class DistanceAsyncTask extends AsyncTask<Void,Void,Boolean>{
private static final double REFRESHDISTANCE = 7;
private LatLng myLocation;
public DistanceAsyncTask(LatLng loc){
this.myLocation = loc;
}
@Override
protected Boolean doInBackground(Void... params) {
boolean near = false;
double lat = myLocation.latitude;
double lon = myLocation.longitude;
int count=0;
for (WildMarker wildpkmn : wildMarkers){
double distance = Math.sqrt(Math.pow(lat - wildpkmn.loc[0], 2) + Math.pow(lon - wildpkmn.loc[1], 2));
//Log.d("DISTANCE","distance "+10000*distance);
if(distance*10000<=REFRESHDISTANCE){
count++;
//Log.d("DISTANCE","Marker with id "+wildpkmn.pkid+" visible, distance "+10000*distance+" visible "+count+" out of "+wildMarkers.size());
wildpkmn.visible = true;
near = true;
}else if(distance*10000>REFRESHDISTANCE){
wildpkmn.visible = false;
near = false;
}
}
Log.d("DISTANCE","visible "+count+" out of "+wildMarkers.size());
return near;
}
@Override
protected void onPostExecute(Boolean near) {
super.onPostExecute(near);
if(near){
append("wild pokemons near you");
}else{
Log.d("REQUESTLOC","requesting more wild pkmons (near = "+near+")");
requestNewLocation();
}
}
}
private void requestNewLocation() {
LatLng loc = mLocation;
Log.d("Request", "new wild pokemons");
new HttpAsyncTask(loc.latitude, loc.longitude).execute();
}
public void updateVisibility(){
Log.d("Request", "updates");
new DistanceAsyncTask(mLocation).execute();
refreshMarkers();
updateUI(mLocation,false);
}
private class HttpAsyncTask extends AsyncTask<Void, Void, Void> {
private final double lng;
private final double lat;
public HttpAsyncTask(double lat, double lng){
this.lat = lat;
this.lng = lng;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
try {
pDialog = new ProgressDialog(MapsActivity.this);
pDialog.setMessage("Please Wait...");
pDialog.setCancelable(false);
pDialog.show();
}catch (Exception ex){
}
}
@Override
protected Void doInBackground(Void... Voids) {
String obj = MapsActivity.getStops(lat,lng);
/*for(WildMarker wm : wildMarkers){
if(wm.mk!=null) {
//wm.mk.setVisible(false);
wm.mk.remove();
}
wm.visible=false;
wm.mk=null;
}*/
wildMarkers.removeAll(wildMarkers);
if(obj != null){
try {
JSONArray jsonArray = new JSONArray(obj);
Random r = new Random();
for (int i = 0 ; i<jsonArray.length();i++) {
JSONObject stop = jsonArray.getJSONObject(i);
String lt = stop.getString("lt");
String lng = stop.getString("lng");
double[] loc = {Double.parseDouble(lt), Double.parseDouble(lng)};
wildMarkers.add(new WildMarker(r.nextInt(15)+1,loc,false,wildMarkers.size()));
}
Log.d("MARKERS-HTTP","filled wildmarkers "+wildMarkers.size());
} catch (JSONException e) {
e.printStackTrace();
}
}else{
Log.d("HTTPGET","null response");
}
return null;
}
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
if(pDialog.isShowing()){
pDialog.dismiss();
}
//setWildPokemons();
}
}
private class MarkerIconAsyncTask extends AsyncTask<String,Void,Bitmap>{
public Marker mk;
public MarkerIconAsyncTask(Marker marker){
this.mk = marker;
}
@Override
protected Bitmap doInBackground(String... urls) {
String imageURL = urls[0];
Bitmap image = null;
try {
InputStream in = new URL(imageURL).openStream();
image = BitmapFactory.decodeStream(in);
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
return image;
}
@Override
protected void onPostExecute(Bitmap bitmap) {
super.onPostExecute(bitmap);
Log.d("ICONS","bitmap setted on marker");
mk.setIcon(BitmapDescriptorFactory.fromBitmap(bitmap));
}
}
} | 6b026890286be6bd05095c98b680474e861c35f2 | [
"Java"
] | 6 | Java | bizarrecoding/PokemonGOU | e8e0e8548abf19e40f87529cee9ab8187cec4a21 | ad8f4d374ed1f7d986287492d0c81082df25fec1 | |
refs/heads/master | <repo_name>Jack5079/broken-seed<file_sep>/worker.js
/* eslint-env worker */
self.onmessage = async () => {
const seedstxt = await fetch("./seeds.txt", {
cache: "force-cache", // 101 MB JESUS
}).then((res) => res.text());
const seeds = seedstxt.split("\n").filter(Boolean).map((line) => {
const parts = line.split(" ");
return {
seed: parts[0],
repeats: {
every: Number(parts[1].substring(0, parts[1].length - 1)),
axis: parts[1].replace(/\d/g, ""),
},
};
})
self.postMessage(seeds[Math.floor(Math.random() * seeds.length)])
};
| 3bae3c02c00338ab677944659707c388df265111 | [
"JavaScript"
] | 1 | JavaScript | Jack5079/broken-seed | f41721540065419d948169b503e5a8e583b62be9 | 2e9f426bfdc7901b0f11ab6e51ffca4c305542ab | |
refs/heads/master | <file_sep>#! /bin/bash
REPO=embano1
NAME=vcsim
TAG=0.23.0
docker build -t ${REPO}/${NAME}:${TAG} --build-arg RELEASE=${TAG} .
<file_sep># govcsim
Build and Dockerfile for https://github.com/vmware/govmomi/tree/master/vcsim
# Usage
- Clone or `go get` https://github.com/vmware/govmomi/tree/master/vcsim
- Place Dockerfile and build.sh in the vcsim subfolder
- Change `-t` Docker <TAGNAME> in `build.sh` to your needs
- Create Docker image with `sh build.sh`
- Run with `docker run --rm -it -p 8989:8989 <TAGNAME>`
- Test endpoint with `curl -k https://localhost:8989/about`
| d47c38adee4bf123511bf34adf6c99cb114d1604 | [
"Markdown",
"Shell"
] | 2 | Shell | shadowkrusha/govcsim | a076e66ef9bf726b954fc0547c7e5798bfee76dc | ce052a9fef65e1121a13926cbddffaea30c43a33 | |
refs/heads/master | <repo_name>gdgly/JK508HAND<file_sep>/User/APP/history.c
/**
******************************************************************************
* @file sysytem.c
* @author fire
* @version V1.0
* @date 2018-04-29
* @brief 手持多路温度测试仪系统配置界面
******************************************************************************
* @attention
*
* 实验平台:秉火 STM32 F429 开发板
* 论坛 :http://www.firebbs.cn
* 淘宝 :http://firestm32.taobao.com
*
******************************************************************************
*/
#include "./led/bsp_led.h"
#include "./lcd/bsp_lcd.h"
#include "./key/bsp_key.h"
#include "./beep/bsp_beep.h"
#include "./tim/bsp_basic_tim.h"
#include "./flash/bsp_spi_flash.h"
#include "jk508.h"
#include "./RTC/bsp_rtc.h"
u8 Data_buf[16][16];
float hisdata[16][500];
u8 histime[10][7];
float YLIMIT[3];
u16 his_config[10];
u8 his_time[10][7];
//读取的ID存储位置
extern __IO uint32_t DeviceID;
extern __IO uint32_t FlashID;
extern union
{
unsigned char FLAG_VAL7;
struct
{
unsigned char FN:2; //字体
unsigned char SP:2; //速度
unsigned char BD:3; //波特
unsigned char LG:1; //语言
}BIT_FLAG;
}FLAG7;
void page_his(void)
{
range = (YHLIMIT - YLLIMIT) / 7;
enrate = 350/(float)(YHLIMIT - YLLIMIT);
u8 i;
u16 j;
char timetemp[100];
char buf[10];
/*初始化后默认使用前景层*/
LCD_SetLayer(LCD_FOREGROUND_LAYER);
/*默认设置不透明 ,该函数参数为不透明度,范围 0-0xff ,0为全透明,0xff为不透明*/
LCD_SetTransparency(0xFF);
LCD_Clear(LCD_COLOR_BACK);
/*经过LCD_SetLayer(LCD_FOREGROUND_LAYER)函数后,
以下液晶操作都在前景层刷新,除非重新调用过LCD_SetLayer函数设置背景层*/
LCD_SetTextColor(LCD_COLOR_HLT);
LCD_SetBackColor(LCD_COLOR_BACK);
LCD_DisplayStringLine(0,10, "< >");
LCD_DisplayStringLine(5,26, "历史曲线");
DrawMenu();
Drawhishmenu();
LCD_SetTextColor(LCD_COLOR_RED);
LCD_SetBackColor(LCD_COLOR_BACK);
for(i = 0;i < 7;i ++)
{
sprintf(buf,"%d",(int)(YHLIMIT - range * i));
DISP_CNL_S(40 + 50*i,10/*90*/,(uint8_t* )buf);
}
DrawGridLine();
// for(j = 0; j < 500; j++)
// {
// LCD_SetColors(LCD_COLOR_RED,LCD_COLOR_BACK);
// LCD_DrawUniLine(111 + j,(int)(400-((Data_buf[j] - YLLIMIT)*enrate)),112+j,(400-((Data_buf[j+1] - YLLIMIT)*enrate)));
// }
for(j = 0; j < 498; j++)
{
if(CH1_SW == ch_on && hisdata[0][j] < YHLIMIT && hisdata[0][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_RED,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[0][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[0][j+1] - YLLIMIT)*enrate)));
}
if(CH2_SW == ch_on && hisdata[1][j] < YHLIMIT && hisdata[1][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GREEN,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[1][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[1][j+1] - YLLIMIT)*enrate)));
}
if(CH3_SW == ch_on && hisdata[2][j] < YHLIMIT && hisdata[2][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_MAGENTA,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[2][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[2][j+1] - YLLIMIT)*enrate)));
}
if(CH4_SW == ch_on && hisdata[3][j] < YHLIMIT && hisdata[3][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_CYAN,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[3][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[3][j+1] - YLLIMIT)*enrate)));
}
if(CH5_SW == ch_on && hisdata[4][j] < YHLIMIT && hisdata[4][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_YELLOW,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[4][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[4][j+1] - YLLIMIT)*enrate)));
}
if(CH6_SW == ch_on && hisdata[5][j] < YHLIMIT && hisdata[5][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_HLT,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[5][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[5][j+1] - YLLIMIT)*enrate)));
}
if(CH7_SW == ch_on && hisdata[6][j] < YHLIMIT && hisdata[6][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_BT,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[6][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[6][j+1] - YLLIMIT)*enrate)));
}
if(CH8_SW == ch_on && hisdata[7][j] < YHLIMIT && hisdata[7][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_BLUE,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[7][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[7][j+1] - YLLIMIT)*enrate)));
}
if(CH9_SW == ch_on && hisdata[8][j] < YHLIMIT && hisdata[8][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[8][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[8][j+1] - YLLIMIT)*enrate)));
}
if(CH10_SW == ch_on && hisdata[1][9] < YHLIMIT && hisdata[1][9] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GR1,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[9][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[9][j+1] - YLLIMIT)*enrate)));
}
if(CH11_SW == ch_on && hisdata[10][j] < YHLIMIT && hisdata[10][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GR2,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[10][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[10][j+1] - YLLIMIT)*enrate)));
}
if(CH12_SW == ch_on && hisdata[11][j] < YHLIMIT && hisdata[11][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GR3,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[11][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[11][j+1] - YLLIMIT)*enrate)));
}
if(CH13_SW == ch_on && hisdata[12][j] < YHLIMIT && hisdata[12][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GR4,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[12][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[12][j+1] - YLLIMIT)*enrate)));
}
if(CH14_SW == ch_on && hisdata[13][j] < YHLIMIT && hisdata[13][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GR5,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[13][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[13][j+1] - YLLIMIT)*enrate)));
}
if(CH15_SW == ch_on && hisdata[14][j] < YHLIMIT && hisdata[14][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GR6,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[14][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[14][j+1] - YLLIMIT)*enrate)));
}
if(CH16_SW == ch_on && hisdata[15][j] < YHLIMIT && hisdata[15][j] > YLLIMIT)
{
LCD_SetColors(LCD_COLOR_GR7,LCD_COLOR_BACK);
LCD_DrawUniLine(111-80 + j,(int)(400-((hisdata[15][j] - YLLIMIT)*enrate)),112 - 80 + j,(400-((hisdata[15][j+1] - YLLIMIT)*enrate)));
}
}
LCD_SetTextColor(LCD_COLOR_WHITE);
LCD_SetBackColor(LCD_COLOR_BACK);
sprintf(timetemp,"%d%0.2d-%0.2d-%0.2d",
histime[0][0],
histime[0][1],
histime[0][2],
histime[0][3]);
LCD_DisplayStringLine(10,200,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[0][4],
histime[0][5],
histime[0][6]);
DISP_CNL_S(402,80-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[1][4],
histime[1][5],
histime[1][6]);
DISP_CNL_S(420,130-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[2][4],
histime[2][5],
histime[2][6]);
LCD_SetTextColor(LCD_COLOR_WHITE);
DISP_CNL_S(402,180-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[3][4],
histime[3][5],
histime[3][6]);
DISP_CNL_S(420,230-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[4][4],
histime[4][5],
histime[4][6]);
DISP_CNL_S(402,280-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[5][4],
histime[5][5],
histime[5][6]);
DISP_CNL_S(420,330-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[6][4],
histime[6][5],
histime[6][6]);
DISP_CNL_S(402,380-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[7][4],
histime[7][5],
histime[7][6]);
DISP_CNL_S(420,430-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[8][4],
histime[8][5],
histime[8][6]);
DISP_CNL_S(402,480-80,(uint8_t *)timetemp);
sprintf(timetemp,"%0.2d:%0.2d:%0.2d",
histime[9][4],
histime[9][5],
histime[9][6]);
DISP_CNL_S(420,530-80,(uint8_t *)timetemp);
LCD_SetColors(LCD_COLOR_RED,LCD_COLOR_RED);
LCD_DrawLine(540,100-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(90-50,10+540,"001");
LCD_SetColors(LCD_COLOR_GREEN,LCD_COLOR_GREEN);
LCD_DrawLine(540,120-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(110-50,10+540,"002");
LCD_SetColors(LCD_COLOR_MAGENTA,LCD_COLOR_GREEN);
LCD_DrawLine(540,140-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(130-50,10+540,"003");
LCD_SetColors(LCD_COLOR_CYAN,LCD_COLOR_GREEN);
LCD_DrawLine(540,160-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(150-50,10+540,"004");
LCD_SetColors(LCD_COLOR_YELLOW,LCD_COLOR_GREEN);
LCD_DrawLine(540,180-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(170-50,10+540,"005");
LCD_SetColors(LCD_COLOR_HLT,LCD_COLOR_GREEN);
LCD_DrawLine(540,200-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(190-50,10+540,"006");
LCD_SetColors(LCD_COLOR_BT,LCD_COLOR_GREEN);
LCD_DrawLine(540,220-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(210-50,10+540,"007");
LCD_SetColors(LCD_COLOR_BLUE,LCD_COLOR_GREEN);
LCD_DrawLine(540,240-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(230-50,10+540,"008");
LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_GREEN);
LCD_DrawLine(540,260-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(250-50,10+540,"009");
LCD_SetColors(LCD_COLOR_GR1,LCD_COLOR_GREEN);
LCD_DrawLine(540,280-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(270-50,10+540,"010");
LCD_SetColors(LCD_COLOR_GR2,LCD_COLOR_GREEN);
LCD_DrawLine(540,300-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(290-50,10+540,"011");
LCD_SetColors(LCD_COLOR_GR3,LCD_COLOR_GREEN);
LCD_DrawLine(540,320-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(310-50,10+540,"012");
LCD_SetColors(LCD_COLOR_GR4,LCD_COLOR_GREEN);
LCD_DrawLine(540,340-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(330-50,10+540,"013");
LCD_SetColors(LCD_COLOR_GR5,LCD_COLOR_GREEN);
LCD_DrawLine(540,360-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(350-50,10+540,"014");
LCD_SetColors(LCD_COLOR_GR6,LCD_COLOR_GREEN);
LCD_DrawLine(540,380-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(370-50,10+540,"015");
LCD_SetColors(LCD_COLOR_GR7,LCD_COLOR_GREEN);
LCD_DrawLine(540,400-50,10,LCD_DIR_HORIZONTAL);
LCD_SetBackColor(LCD_COLOR_BACK);
DISP_CNL_S(390-50,10+540,"016");
page_flag = history;
}
void Save_history(u16 rec)
{
// SPI_FLASH_SectorErase(rec*4096);
SPI_FLASH_BufferWrite((void*)hisdata,SPI_FLASH_PageSize*16*rec, sizeof(hisdata));
}
void Read_history(void)
{
SPI_FLASH_BufferRead((void *)hisdata,SPI_FLASH_PageSize*16, sizeof(hisdata));
}
void Save_Sflag(void)
{
SPI_FLASH_SectorErase(1*4096);
SPI_FLASH_BufferWrite((void*)his_config,SPI_FLASH_PageSize*16, sizeof(his_config));
}
void Read_Sflag(void)
{
SPI_FLASH_BufferRead((void *)his_config,SPI_FLASH_PageSize*16, sizeof(his_config));
}
<file_sep>/User/main.c
/**
******************************************************************************
* @file main.c
* @author fire
* @version V1.0
* @date 2015-xx-xx
* @brief LTDC液晶显示汉字(显示任意大小)
******************************************************************************
* @attention
*
* 实验平台:秉火 STM32 F429 开发板
* 论坛 :http://www.firebbs.cn
* 淘宝 :http://firestm32.taobao.com
*
******************************************************************************
*/
#include "stm32f4xx.h"
#include "./led/bsp_led.h"
#include "./usart/bsp_debug_usart.h"
#include "./sdram/bsp_sdram.h"
#include "./lcd/bsp_lcd.h"
#include "./key/bsp_key.h"
#include "./beep/bsp_beep.h"
#include "./tim/bsp_basic_tim.h"
#include "./flash/bsp_spi_flash.h"
#include "./systick/bsp_SysTick.h"
#include "./RTC/bsp_rtc.h"
#include "jk508.h"
#include "usbd_hid_core.h"
#include "usbd_usr.h"
#include "usbd_desc.h"
#include "./internalFlash/bsp_internalFlash.h"
#include "./ch376/ch376.h"
#include "touchscreen.h"
#include "./FILESYS/FILESYS.h"
#include <string.h>
/** @defgroup APP_HID_Private_Variables
* @{
*/
void TempDisplay(void);
u8 usbbuf[0x40];
u8 usbsendbuf[0x40];
u8 savedata[80];
u8 uinfo[64];
//u8 usbreadtime = 0;
u8 eqmtstatus;
u16 crcwatch;
uint16_t readcrc;
u8 *crec;
u8 *csend;
u16 datasize;
u8 usbstatus = UNKNOWN;
u16 watch;
//u8 p1,p2,p3,p4,p5,p6,p7,p8;
#ifdef USB_OTG_HS_INTERNAL_DMA_ENABLED
#if defined ( __ICCARM__ ) /*!< IAR Compiler */
#pragma data_alignment=4
#endif
#endif /* USB_OTG_HS_INTERNAL_DMA_ENABLED */
__ALIGN_BEGIN USB_OTG_CORE_HANDLE USB_OTG_dev __ALIGN_END;
union
{
unsigned char FLAG_VAL1;
struct
{
unsigned char SW1:1;
unsigned char SW2:1;
unsigned char SW3:1;
unsigned char SW4:1;
unsigned char SW5:1;
unsigned char SW6:1;
unsigned char SW7:1;
unsigned char SW8:1;
}BIT_FLAG;
}FLAG1;
union
{
unsigned char FLAG_VAL2;
struct
{
unsigned char SW9:1;
unsigned char SW10:1;
unsigned char SW11:1;
unsigned char SW12:1;
unsigned char SW13:1;
unsigned char SW14:1;
unsigned char SW15:1;
unsigned char SW16:1;
}BIT_FLAG;
}FLAG2;
union
{
unsigned char FLAG_VAL3;
struct
{
unsigned char SW17:1;
unsigned char SW18:1;
unsigned char SW19:1;
unsigned char SW20:1;
unsigned char SW21:1;
unsigned char SW22:1;
unsigned char SW23:1;
unsigned char SW24:1;
}BIT_FLAG;
}FLAG3;
union
{
unsigned char FLAG_VAL4;
struct
{
unsigned char SW25:1;
unsigned char SW26:1;
unsigned char SW27:1;
unsigned char SW28:1;
unsigned char SW29:1;
unsigned char SW30:1;
unsigned char SW31:1;
unsigned char SW32:1;
}BIT_FLAG;
}FLAG4;
union
{
unsigned char FLAG_VAL5;
struct
{
unsigned char SW33:1;
unsigned char SW34:1;
unsigned char SW35:1;
unsigned char SW36:1;
unsigned char SW37:1;
unsigned char SW38:1;
unsigned char SW39:1;
unsigned char SW40:1;
}BIT_FLAG;
}FLAG5;
union
{
unsigned char FLAG_VAL6;
struct
{
unsigned char TC:4; //热电偶类型
unsigned char FT:1; //分选/比较
unsigned char BP:1; //讯响
unsigned char UT:2; //单位
}BIT_FLAG;
}FLAG6;
union
{
unsigned char FLAG_VAL7;
struct
{
unsigned char FN:2; //字体
unsigned char SP:2; //速度
unsigned char BD:3; //波特
unsigned char LG:1; //语言
}BIT_FLAG;
}FLAG7;
union
{
unsigned char FLAG_VAL8;
struct
{
unsigned char BR:3; //字体
unsigned char SP:3; //速度
}BIT_FLAG;
}FLAG8;
typedef __packed struct {
u8 uCmd;
u8 Size;
u32 u32Arg1;
u32 u32Arg2;
u32 u32Signature;
u32 u32Checksum;
}CMD_T;
//读取的ID存储位置
__IO uint32_t DeviceID = 0;
__IO uint32_t FlashID = 0;
void Delay(__IO u32 nCount);
void LCD_Test(void);
void Printf_Charater(void);
int i;
u16 count = 0;
float ch_temp[40];
/**
* @brief 主函数
* @param 无
* @retval 无
*/
int main(void)
{
u8 test[9] = {0X01,0X03,0X02,0X58,0X00,0X01,0X02,0X00,0X05};
__IO uint32_t i = 0;
// u8 res;
/*!< At this stage the microcontroller clock setting is already configured,
this is done through SystemInit() function which is called from startup
file (startup_stm32fxxx_xx.s) before to branch to application main.
To reconfigure the default setting of SystemInit() function, refer to
system_stm32fxxx.c file
*/
/*初始化SDRAM模块*/
SDRAM_Init();
/*初始化液晶屏*/
LCD_Init();
LCD_LayerInit();
LTDC_Cmd(ENABLE);
LCD_SetLayer(LCD_FOREGROUND_LAYER);
LCD_Clear(LCD_COLOR_BLACK);
power_on();
SysTick_Init();
Delay(500);
/* LED 端口初始化 */
// LED_GPIO_Config();
/* 16M串行flash W25Q128初始化 */
SPI_FLASH_Init();
/*串口初始化*/
Debug_USART_Config();
/* 获取 Flash Device ID */
DeviceID = SPI_FLASH_ReadDeviceID();
Touch_GPIO_Config();
Delay(200);
/*按键初始化*/
Key_GPIO_Config();
// /* 初始化系统滴答定时器 */
/*蜂鸣器端口初始化 */
Beep_GPIO_Config();
//设置芯片读保护
// Flash_EnableReadProtection();
// /* 初始化通用定时器定时,20m+s产生一次中断 */
TIMx_Configuration();
/*CH376引脚配置初始化*/
CH376_GPIO_Init();
// Delay_ms(100);
Init_CH376();
// if(res == USB_INT_SUCCESS)
// {
// udisk = udisk_scan();
// }
/* RTC配置:选择时钟源,设置RTC_CLK的分频系数 */
RTC_CLK_Config();
if (RTC_ReadBackupRegister(RTC_BKP_DRX) != RTC_BKP_DATA)
{
/* 设置时间和日期 */
RTC_TimeAndDate_Set();
}
else
{
/* 检查是否电源复位 */
if (RCC_GetFlagStatus(RCC_FLAG_PORRST) != RESET)
{
printf("\r\n 发生电源复位....\r\n");
}
/* 检查是否外部复位 */
else if (RCC_GetFlagStatus(RCC_FLAG_PINRST) != RESET)
{
printf("\r\n 发生外部复位....\r\n");
}
printf("\r\n 不需要重新配置RTC....\r\n");
/* 使能 PWR 时钟 */
RCC_APB1PeriphClockCmd(RCC_APB1Periph_PWR, ENABLE);
/* PWR_CR:DBF置1,使能RTC、RTC备份寄存器和备份SRAM的访问 */
PWR_BackupAccessCmd(ENABLE);
/* 等待 RTC APB 寄存器同步 */
RTC_WaitForSynchro();
}
USBD_Init(&USB_OTG_dev,USB_OTG_HS_CORE_ID,
&USR_desc,&USBD_HID_cb,&USR_cb);
/* 获取 SPI Flash ID */
FlashID = SPI_FLASH_ReadID();
if (FlashID == sFLASH_ID)
{
Delay(500);
Read_flag();
Read_Sflag();
}
watch = CRC16(test,9);
// page_home();
// watch = sizeof(TempHLimits);
// SECTOR_REC = 2;
while(1)
{
// watch = GPIO_ReadInputDataBit(TOUCH_YPLUS_GPIO_PORT,TOUCH_YPLUS_GPIO_PIN);
/* 显示时间和日期 */
RTC_TimeAndDate_Show();
/*按键扫描*/
Key_Function();
// CH1TEMP = (RecBuff[21] * 256 + RecBuff[22])/10.0;
DrawBattery(battery);
TempDisplay();
// DCD_EP_PrepareRx(&USB_OTG_dev,HID_OUT_EP,usbbuf,64);//接收PC数据
// if(UsbHidReceiveComplete) //接收到数据
// {
// UsbHidReceiveComplete=0;
// UsbDataHandle();
// }
// Draw_graph();
// LCD_Test();
}
}
/*用于测试各种液晶的函数*/
void LCD_Test(void)
{
// static uint32_t testCNT=0;
// char dispBuff[100];
// char buf1[5];
// char buf2[5];
// char buf3[5];
// char buf4[5];
// char buf5[5];
// char buf6[5];
// char buf7[5];
// char buf8[5];
// p1 = GPIO_ReadInputDataBit(P1_GPIO_PORT,P1_PIN);
// p2 = GPIO_ReadInputDataBit(P2_GPIO_PORT,P2_PIN);
// p3 = GPIO_ReadInputDataBit(P3_GPIO_PORT,P3_PIN);
// p4 = GPIO_ReadInputDataBit(P4_GPIO_PORT,P4_PIN);
// p5 = GPIO_ReadInputDataBit(P5_GPIO_PORT,P5_PIN);
// p6 = GPIO_ReadInputDataBit(P6_GPIO_PORT,P6_PIN);
// p7 = GPIO_ReadInputDataBit(LTDC_BL_GPIO_PORT,LTDC_BL_GPIO_PIN);
//
// u8 p1o = GPIO_ReadOutputDataBit(P1_GPIO_PORT,P1_PIN);
// u8 p2o = GPIO_ReadOutputDataBit(P2_GPIO_PORT,P2_PIN);
// u8 p3o = GPIO_ReadOutputDataBit(P3_GPIO_PORT,P3_PIN);
// u8 p4o = GPIO_ReadOutputDataBit(P4_GPIO_PORT,P4_PIN);
// u8 p5o = GPIO_ReadOutputDataBit(P3_GPIO_PORT,P5_PIN);
//
// /*使用不透明前景层*/
//// LCD_SetLayer(LCD_FOREGROUND_LAYER);
//// LCD_SetTransparency(0xff);
//
//// LCD_Clear(LCD_COLOR_BLACK); /* 清屏,显示全黑 */
//// /*设置字体颜色及字体的背景颜色(此处的背景不是指LCD的背景层!注意区分)*/
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BLACK);
//
/*选择字体,使用中英文显示时,尽量把英文选择成16*24的字体,
*中文字体大小是24*24的,需要其它字体请自行制作字模*/
/*这个函数只对英文字体起作用*/
// LCD_SetFont(&Font16x24);
// if(GPIO_ReadInputDataBit(P6_GPIO_PORT,P6_PIN))
// sprintf(buf1,"%d",sizeof(u64));
// sp rintf(buf2,"%d",GPIO_ReadInputDataBit(P2_GPIO_PORT,P2_PIN));
// sprintf(buf3,"%d",GPIO_ReadInputDataBit(P3_GPIO_PORT,P3_PIN));
// sprintf(buf4,"%d",GPIO_ReadInputDataBit(P4_GPIO_PORT,P4_PIN));
// sprintf(buf5,"%d",GPIO_ReadInputDataBit(P5_GPIO_PORT,P5_PIN));
// sprintf(buf6,"%d",GPIO_ReadInputDataBit(P6_GPIO_PORT,P6_PIN));
// sprintf(buf8,"%d",GPIO_ReadInputDataBit(LTDC_BL_GPIO_PORT,LTDC_BL_GPIO_PIN));
// sprintf(buf7,"%d",count);
// LCD_DisplayStringLine(50,200,(uint8_t* )buf1);
// LCD_DisplayStringLine(70,200,(uint8_t* )buf2);
// LCD_DisplayStringLine(90,200,(uint8_t* )buf3);
// LCD_DisplayStringLine(110,200,(uint8_t* )buf4);
// LCD_DisplayStringLine(130,200,(uint8_t* )buf5);
// LCD_DisplayStringLine(150,200,(uint8_t* )buf6);
// LCD_DisplayStringLine(170,200,(uint8_t* )buf7);
// LCD_DisplayStringLine(190,200,(uint8_t* )buf8);
}
/*GERNERAL CODES*/
void Delay(__IO uint32_t nCount) //简单的延时函数
{
for(; nCount != 0; nCount--);
}
uint16_t CRC16(uint8_t *puchMsg, uint8_t Len)
{
uint8_t t, m,n,p;
uint8_t uchCRCHi=0xFF; /* 高CRC字节初始化*/
uint8_t uchCRCLo =0xFF; /* 低CRC 字节初始化*/
for(t=0;t<Len;t++)
{
uchCRCLo=uchCRCLo^puchMsg[t];
for(n=0;n<8;n++)
{
m=uchCRCLo&1;p=uchCRCHi&1;uchCRCHi>>=1;
uchCRCLo>>=1;
if(p)
{
uchCRCLo|=0x80;
}
if(m)
{
uchCRCHi=uchCRCHi^0xa0;
uchCRCLo=uchCRCLo^1;
}
}
}
return (uchCRCHi<<8|uchCRCLo);
}
//开机亮度
void InitBrt(void)
{
if(BRTS == L0)
{
brightness = 10;
}else if(BRTS == L1){
brightness = 20;
}else if(BRTS == L2){
brightness = 40;
}else if(BRTS == L3){
brightness = 60;
}else if(BRTS == L1){
brightness = 80;
}
TIM_PWMOUTPUT_Config(brightness);
}
//温度显示
void TempDisplay(void)
{
char buf[10];
static u8 eqmtstatus;
u8 i;
if(page_flag == display)
{
// LCD_SetColors(LCD_COLOR_YELLOW,LCD_COLOR_BACK);
// sprintf(buf,"%03d",charge);
// LCD_DisplayStringLine_48(47,200,(uint8_t*)buf);
// sprintf(buf,"%03d",battery);
// LCD_DisplayStringLine_48(47,260,(uint8_t*)buf);
if(eqmtstatus < 60)
{
if(LANG == chs)
{
LCD_SetColors(LCD_COLOR_RED,LCD_COLOR_BACK);
LCD_DisplayStringLine(5,180,"数据采集中");
}else if(LANG == eng){
LCD_SetColors(LCD_COLOR_RED,LCD_COLOR_BACK);
DISP_INS(10,130,"Collecting data");
}
eqmtstatus++;
}else if(eqmtstatus >= 60 && eqmtstatus < 120){
if(LANG == chs)
{
LCD_SetColors(LCD_COLOR_BACK,LCD_COLOR_BACK);
LCD_DrawFullRect(180,5,130,32);
}else if(LANG == eng){
LCD_SetColors(LCD_COLOR_BACK,LCD_COLOR_BACK);
LCD_DrawFullRect(130,10,235,25);
}
eqmtstatus++;
}else if(eqmtstatus >= 120){
eqmtstatus = 0;
}
if(FONT == big)
{
if(ch_page == page1)
{
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
Check_limits(1);
sprintf(buf,"%.1f",CH1TEMP - COR1);
if(CH1TEMP < 100)
{
strcat(buf," ");
}else if(CH1TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(95,150,(uint8_t*)buf,CH1_SW);
Check_limits(2);
sprintf(buf,"%.1f",CH2TEMP - COR2);
if(CH2TEMP < 100)
{
strcat(buf," ");
}else if(CH2TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(165,150,(uint8_t*)buf,CH2_SW);
Check_limits(3);
sprintf(buf,"%.1f",CH3TEMP - COR3);
if(CH3TEMP < 100)
{
strcat(buf," ");
}else if(CH3TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(235,150,(uint8_t*)buf,CH3_SW);
Check_limits(4);
sprintf(buf,"%.1f",CH4TEMP - COR4);
if(CH4TEMP < 100)
{
strcat(buf," ");
}else if(CH4TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(305,150,(uint8_t*)buf,CH4_SW);
Check_limits(5);
sprintf(buf,"%.1f",CH5TEMP - COR5);
if(CH5TEMP < 100)
{
strcat(buf," ");
}else if(CH5TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(95,470,(uint8_t*)buf,CH5_SW);
Check_limits(6);
sprintf(buf,"%.1f",CH6TEMP - COR6);
if(CH6TEMP < 100)
{
strcat(buf," ");
}else if(CH6TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(165,470,(uint8_t*)buf,CH6_SW);
Check_limits(7);
sprintf(buf,"%.1f",CH7TEMP - COR7);
if(CH7TEMP < 100)
{
strcat(buf," ");
}else if(CH7TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(235,470,(uint8_t*)buf,CH7_SW);
Check_limits(8);
sprintf(buf,"%.1f",CH8TEMP - COR7);
if(CH8TEMP < 100)
{
strcat(buf," ");
}else if(CH8TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(305,470,(uint8_t*)buf,CH8_SW);
}else if(ch_page == page2){
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
Check_limits(9);
sprintf(buf,"%.1f",CH9TEMP - COR9);
if(CH9TEMP < 100)
{
strcat(buf," ");
}else if(CH9TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(95,150,(uint8_t*)buf,CH9_SW);
Check_limits(10);
sprintf(buf,"%.1f",CH10TEMP - COR10);
if(CH10TEMP < 100)
{
strcat(buf," ");
}else if(CH10TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(165,150,(uint8_t*)buf,CH10_SW);
Check_limits(11);
sprintf(buf,"%.1f",CH11TEMP - COR11);
if(CH11TEMP < 100)
{
strcat(buf," ");
}else if(CH11TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(235,150,(uint8_t*)buf,CH11_SW);
Check_limits(12);
sprintf(buf,"%.1f",CH12TEMP - COR12);
if(CH12TEMP < 100)
{
strcat(buf," ");
}else if(CH12TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(305,150,(uint8_t*)buf,CH12_SW);
Check_limits(13);
sprintf(buf,"%.1f",CH13TEMP - COR13);
if(CH13TEMP < 100)
{
strcat(buf," ");
}else if(CH13TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(95,470,(uint8_t*)buf,CH13_SW);
Check_limits(14);
sprintf(buf,"%.1f",CH14TEMP);
if(CH14TEMP < 100)
{
strcat(buf," ");
}else if(CH14TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(165,470,(uint8_t*)buf,CH14_SW);
Check_limits(15);
sprintf(buf,"%.1f",CH15TEMP - COR15);
if(CH15TEMP < 100)
{
strcat(buf," ");
}else if(CH15TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(235,470,(uint8_t*)buf,CH15_SW);
Check_limits(16);
sprintf(buf,"%.1f",CH16TEMP - COR16);
if(CH16TEMP < 100)
{
strcat(buf," ");
}else if(CH16TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_L(305,470,(uint8_t*)buf,CH16_SW);
}else if(ch_page == page3){
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH17TEMP);
DISP_TEMP_L(95,150,(uint8_t*)buf,CH17_SW);
sprintf(buf,"%.1f",CH18TEMP);
DISP_TEMP_L(165,150,(uint8_t*)buf,CH18_SW);
sprintf(buf,"%.1f",CH19TEMP);
DISP_TEMP_L(235,150,(uint8_t*)buf,CH19_SW);
sprintf(buf,"%.1f",CH20TEMP);
DISP_TEMP_L(305,150,(uint8_t*)buf,CH20_SW);
sprintf(buf,"%.1f",CH21TEMP);
DISP_TEMP_L(95,470,(uint8_t*)buf,CH21_SW);
sprintf(buf,"%.1f",CH22TEMP);
DISP_TEMP_L(165,470,(uint8_t*)buf,CH22_SW);
sprintf(buf,"%.1f",CH23TEMP);
DISP_TEMP_L(235,470,(uint8_t*)buf,CH23_SW);
sprintf(buf,"%.1f",CH24TEMP);
DISP_TEMP_L(305,470,(uint8_t*)buf,CH24_SW);
}else if(ch_page == page4){
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH25TEMP);
DISP_TEMP_L(95,150,(uint8_t*)buf,CH25_SW);
sprintf(buf,"%.1f",CH26TEMP);
DISP_TEMP_L(165,150,(uint8_t*)buf,CH26_SW);
sprintf(buf,"%.1f",CH27TEMP);
DISP_TEMP_L(235,150,(uint8_t*)buf,CH27_SW);
sprintf(buf,"%.1f",CH28TEMP);
DISP_TEMP_L(305,150,(uint8_t*)buf,CH28_SW);
sprintf(buf,"%.1f",CH29TEMP);
DISP_TEMP_L(95,470,(uint8_t*)buf,CH29_SW);
sprintf(buf,"%.1f",CH30TEMP);
DISP_TEMP_L(165,470,(uint8_t*)buf,CH30_SW);
sprintf(buf,"%.1f",CH31TEMP);
DISP_TEMP_L(235,470,(uint8_t*)buf,CH31_SW);
sprintf(buf,"%.1f",CH32TEMP);
DISP_TEMP_L(305,470,(uint8_t*)buf,CH32_SW);
}else if(ch_page == page5){
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH33TEMP);
DISP_TEMP_L(95,150,(uint8_t*)buf,CH33_SW);
sprintf(buf,"%.1f",CH34TEMP);
DISP_TEMP_L(165,150,(uint8_t*)buf,CH34_SW);
sprintf(buf,"%.1f",CH35TEMP);
DISP_TEMP_L(235,150,(uint8_t*)buf,CH35_SW);
sprintf(buf,"%.1f",CH36TEMP);
DISP_TEMP_L(305,150,(uint8_t*)buf,CH36_SW);
sprintf(buf,"%.1f",CH37TEMP);
DISP_TEMP_L(95,470,(uint8_t*)buf,CH37_SW);
sprintf(buf,"%.1f",CH38TEMP);
DISP_TEMP_L(165,470,(uint8_t*)buf,CH38_SW);
sprintf(buf,"%.1f",CH39TEMP);
DISP_TEMP_L(235,470,(uint8_t*)buf,CH39_SW);
sprintf(buf,"%.1f",CH40TEMP);
DISP_TEMP_L(305,470,(uint8_t*)buf,CH40_SW);
}
}else if(FONT == middle){
if(reflag == 1)
{
LCD_SetColors(LCD_COLOR_BACK,LCD_COLOR_BACK);
LCD_DrawFullRect(90,100,130,305);
LCD_DrawFullRect(290,100,130,305);
LCD_DrawFullRect(490,100,130,305);
}
if(ch_page == page1)
{
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
Check_limits(1);
sprintf(buf,"%.1f",CH1TEMP - COR1);
if(CH1TEMP < 100)
{
strcat(buf," ");
}else if(CH1TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(100,90,(uint8_t*)buf,CH1_SW);
Check_limits(2);
sprintf(buf,"%.1f",CH2TEMP - COR2);
if(CH2TEMP < 100)
{
strcat(buf," ");
}else if(CH2TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(140,90,(uint8_t*)buf,CH2_SW);
Check_limits(3);
sprintf(buf,"%.1f",CH3TEMP - COR3);
if(CH3TEMP < 100)
{
strcat(buf," ");
}else if(CH3TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(180,90,(uint8_t*)buf,CH3_SW);
Check_limits(4);
sprintf(buf,"%.1f",CH4TEMP - COR4);
if(CH4TEMP < 100)
{
strcat(buf," ");
}else if(CH4TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(220,90,(uint8_t*)buf,CH4_SW);
Check_limits(5);
sprintf(buf,"%.1f",CH5TEMP - COR5);
if(CH5TEMP < 100)
{
strcat(buf," ");
}else if(CH5TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(260,90,(uint8_t*)buf,CH5_SW);
Check_limits(6);
sprintf(buf,"%.1f",CH6TEMP - COR6);
if(CH6TEMP < 100)
{
strcat(buf," ");
}else if(CH6TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(300,90,(uint8_t*)buf,CH6_SW);
Check_limits(7);
sprintf(buf,"%.1f",CH7TEMP - COR7);
if(CH7TEMP < 100)
{
strcat(buf," ");
}else if(CH7TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(340,90,(uint8_t*)buf,CH7_SW);
Check_limits(8);
sprintf(buf,"%.1f",CH8TEMP - COR8);
if(CH8TEMP < 100)
{
strcat(buf," ");
}else if(CH8TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(380,90,(uint8_t*)buf,CH8_SW);
Check_limits(9);
sprintf(buf,"%.1f",CH9TEMP - COR9);
if(CH9TEMP < 100)
{
strcat(buf," ");
}else if(CH9TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(100,290,(uint8_t*)buf,CH9_SW);
Check_limits(10);
sprintf(buf,"%.1f",CH10TEMP - COR10);
if(CH10TEMP < 100)
{
strcat(buf," ");
}else if(CH10TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(140,290,(uint8_t*)buf,CH10_SW);
Check_limits(11);
sprintf(buf,"%.1f",CH11TEMP - COR11);
if(CH11TEMP < 100)
{
strcat(buf," ");
}else if(CH11TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(180,290,(uint8_t*)buf,CH11_SW);
Check_limits(12);
sprintf(buf,"%.1f",CH12TEMP - COR12);
if(CH12TEMP < 100)
{
strcat(buf," ");
}else if(CH12TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(220,290,(uint8_t*)buf,CH12_SW);
Check_limits(13);
sprintf(buf,"%.1f",CH13TEMP - COR13);
if(CH13TEMP < 100)
{
strcat(buf," ");
}else if(CH13TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(260,290,(uint8_t*)buf,CH13_SW);
Check_limits(14);
sprintf(buf,"%.1f",CH14TEMP - COR14);
if(CH14TEMP < 100)
{
strcat(buf," ");
}else if(CH14TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(300,290,(uint8_t*)buf,CH14_SW);
Check_limits(15);
sprintf(buf,"%.1f",CH15TEMP - COR15);
if(CH15TEMP < 100)
{
strcat(buf," ");
}else if(CH15TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(340,290,(uint8_t*)buf,CH15_SW);
Check_limits(16);
sprintf(buf,"%.1f",CH16TEMP - COR16);
if(CH16TEMP < 100)
{
strcat(buf," ");
}else if(CH16TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_M(380,290,(uint8_t*)buf,CH16_SW);
sprintf(buf,"%.1f",CH17TEMP);
DISP_TEMP_M(100,490,(uint8_t*)buf,CH17_SW);
sprintf(buf,"%.1f",CH18TEMP);
DISP_TEMP_M(140,490,(uint8_t*)buf,CH18_SW);
sprintf(buf,"%.1f",CH19TEMP);
DISP_TEMP_M(180,490,(uint8_t*)buf,CH19_SW);
sprintf(buf,"%.1f",CH20TEMP);
DISP_TEMP_M(220,490,(uint8_t*)buf,CH20_SW);
sprintf(buf,"%.1f",CH21TEMP);
DISP_TEMP_M(260,490,(uint8_t*)buf,CH21_SW);
sprintf(buf,"%.1f",CH22TEMP);
DISP_TEMP_M(300,490,(uint8_t*)buf,CH22_SW);
sprintf(buf,"%.1f",CH23TEMP);
DISP_TEMP_M(340,490,(uint8_t*)buf,CH23_SW);
sprintf(buf,"%.1f",CH24TEMP);
DISP_TEMP_M(380,490,(uint8_t*)buf,CH24_SW);
}else if(ch_page == page2){
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH25TEMP);
DISP_TEMP_M(100,90,(uint8_t*)buf,CH25_SW);
sprintf(buf,"%.1f",CH26TEMP);
DISP_TEMP_M(140,90,(uint8_t*)buf,CH26_SW);
sprintf(buf,"%.1f",CH27TEMP);
DISP_TEMP_M(180,90,(uint8_t*)buf,CH27_SW);
sprintf(buf,"%.1f",CH28TEMP);
DISP_TEMP_M(220,90,(uint8_t*)buf,CH28_SW);
sprintf(buf,"%.1f",CH29TEMP);
DISP_TEMP_M(260,90,(uint8_t*)buf,CH29_SW);
sprintf(buf,"%.1f",CH30TEMP);
DISP_TEMP_M(300,90,(uint8_t*)buf,CH30_SW);
sprintf(buf,"%.1f",CH31TEMP);
DISP_TEMP_M(340,90,(uint8_t*)buf,CH31_SW);
sprintf(buf,"%.1f",CH32TEMP);
DISP_TEMP_M(380,90,(uint8_t*)buf,CH32_SW);
sprintf(buf,"%.1f",CH33TEMP);
DISP_TEMP_M(100,290,(uint8_t*)buf,CH33_SW);
sprintf(buf,"%.1f",CH34TEMP);
DISP_TEMP_M(140,290,(uint8_t*)buf,CH34_SW);
sprintf(buf,"%.1f",CH35TEMP);
DISP_TEMP_M(180,290,(uint8_t*)buf,CH35_SW);
sprintf(buf,"%.1f",CH37TEMP);
DISP_TEMP_M(220,290,(uint8_t*)buf,CH36_SW);
sprintf(buf,"%.1f",CH37TEMP);
DISP_TEMP_M(260,290,(uint8_t*)buf,CH37_SW);
sprintf(buf,"%.1f",CH38TEMP);
DISP_TEMP_M(300,290,(uint8_t*)buf,CH38_SW);
sprintf(buf,"%.1f",CH39TEMP);
DISP_TEMP_M(340,290,(uint8_t*)buf,CH39_SW);
sprintf(buf,"%.1f",CH40TEMP);
DISP_TEMP_M(380,290,(uint8_t*)buf,CH40_SW);
}
}else if(FONT == small){
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
Check_limits(1);
sprintf(buf,"%.1f",CH1TEMP - COR1);
if(CH1TEMP < 100)
{
strcat(buf," ");
}else if(CH1TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(100,60,(uint8_t*)buf,CH1_SW);
Check_limits(2);
sprintf(buf,"%.1f",CH2TEMP - COR2);
if(CH2TEMP < 100)
{
strcat(buf," ");
}else if(CH2TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(140,60,(uint8_t*)buf,CH2_SW);
Check_limits(3);
sprintf(buf,"%.1f",CH3TEMP - COR3);
if(CH3TEMP < 100)
{
strcat(buf," ");
}else if(CH3TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(180,60,(uint8_t*)buf,CH3_SW);
Check_limits(4);
sprintf(buf,"%.1f",CH4TEMP - COR4);
if(CH4TEMP < 100)
{
strcat(buf," ");
}else if(CH4TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(220,60,(uint8_t*)buf,CH4_SW);
Check_limits(5);
sprintf(buf,"%.1f",CH5TEMP - COR5);
if(CH5TEMP < 100)
{
strcat(buf," ");
}else if(CH5TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(260,60,(uint8_t*)buf,CH5_SW);
Check_limits(6);
sprintf(buf,"%.1f",CH6TEMP - COR6);
if(CH6TEMP < 100)
{
strcat(buf," ");
}else if(CH6TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(300,60,(uint8_t*)buf,CH6_SW);
Check_limits(7);
sprintf(buf,"%.1f",CH7TEMP - COR7);
if(CH7TEMP < 100)
{
strcat(buf," ");
}else if(CH7TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(340,60,(uint8_t*)buf,CH7_SW);
Check_limits(8);
sprintf(buf,"%.1f",CH8TEMP - COR8);
if(CH8TEMP < 100)
{
strcat(buf," ");
}else if(CH8TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(380,60,(uint8_t*)buf,CH8_SW);
Check_limits(9);
sprintf(buf,"%.1f",CH9TEMP - COR9);
if(CH9TEMP < 100)
{
strcat(buf," ");
}else if(CH9TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(100,184,(uint8_t*)buf,CH9_SW);
Check_limits(10);
sprintf(buf,"%.1f",CH10TEMP - COR10);
if(CH10TEMP < 100)
{
strcat(buf," ");
}else if(CH10TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(140,184,(uint8_t*)buf,CH10_SW);
Check_limits(11);
sprintf(buf,"%.1f",CH11TEMP - COR11);
if(CH11TEMP < 100)
{
strcat(buf," ");
}else if(CH11TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(180,184,(uint8_t*)buf,CH11_SW);
Check_limits(12);
sprintf(buf,"%.1f",CH12TEMP - COR12);
if(CH12TEMP < 100)
{
strcat(buf," ");
}else if(CH12TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(220,184,(uint8_t*)buf,CH12_SW);
Check_limits(13);
sprintf(buf,"%.1f",CH13TEMP - COR13);
if(CH13TEMP < 100)
{
strcat(buf," ");
}else if(CH13TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(260,184,(uint8_t*)buf,CH13_SW);
Check_limits(14);
sprintf(buf,"%.1f",CH14TEMP - COR14);
if(CH14TEMP < 100)
{
strcat(buf," ");
}else if(CH14TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(300,184,(uint8_t*)buf,CH14_SW);
Check_limits(15);
sprintf(buf,"%.1f",CH15TEMP - COR15);
if(CH15TEMP < 100)
{
strcat(buf," ");
}else if(CH15TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(340,184,(uint8_t*)buf,CH15_SW);
Check_limits(16);
sprintf(buf,"%.1f",CH16TEMP - COR16);
if(CH16TEMP < 100)
{
strcat(buf," ");
}else if(CH16TEMP < 1000){
strcat(buf," ");
}
DISP_TEMP_S(380,184,(uint8_t*)buf,CH16_SW);
sprintf(buf,"%.1f",CH17TEMP);
DISP_TEMP_S(100,308,(uint8_t*)buf,CH17_SW);
sprintf(buf,"%.1f",CH18TEMP);
DISP_TEMP_S(140,308,(uint8_t*)buf,CH18_SW);
sprintf(buf,"%.1f",CH19TEMP);
DISP_TEMP_S(180,308,(uint8_t*)buf,CH19_SW);
sprintf(buf,"%.1f",CH20TEMP);
DISP_TEMP_S(220,308,(uint8_t*)buf,CH20_SW);
sprintf(buf,"%.1f",CH21TEMP);
DISP_TEMP_S(260,308,(uint8_t*)buf,CH21_SW);
sprintf(buf,"%.1f",CH22TEMP);
DISP_TEMP_S(300,308,(uint8_t*)buf,CH22_SW);
sprintf(buf,"%.1f",CH23TEMP);
DISP_TEMP_S(340,308,(uint8_t*)buf,CH23_SW);
sprintf(buf,"%.1f",CH24TEMP);
DISP_TEMP_S(380,308,(uint8_t*)buf,CH24_SW);
sprintf(buf,"%.1f",CH25TEMP);
DISP_TEMP_S(100,432,(uint8_t*)buf,CH25_SW);
sprintf(buf,"%.1f",CH26TEMP);
DISP_TEMP_S(140,432,(uint8_t*)buf,CH26_SW);
sprintf(buf,"%.1f",CH27TEMP);
DISP_TEMP_S(180,432,(uint8_t*)buf,CH27_SW);
sprintf(buf,"%.1f",CH28TEMP);
DISP_TEMP_S(220,432,(uint8_t*)buf,CH28_SW);
sprintf(buf,"%.1f",CH29TEMP);
DISP_TEMP_S(260,432,(uint8_t*)buf,CH29_SW);
sprintf(buf,"%.1f",CH30TEMP);
DISP_TEMP_S(300,432,(uint8_t*)buf,CH30_SW);
sprintf(buf,"%.1f",CH31TEMP);
DISP_TEMP_S(340,432,(uint8_t*)buf,CH31_SW);
sprintf(buf,"%.1f",CH32TEMP);
DISP_TEMP_S(380,432,(uint8_t*)buf,CH32_SW);
sprintf(buf,"%.1f",CH33TEMP);
DISP_TEMP_S(100,556,(uint8_t*)buf,CH33_SW);
sprintf(buf,"%.1f",CH34TEMP);
DISP_TEMP_S(140,556,(uint8_t*)buf,CH34_SW);
sprintf(buf,"%.1f",CH35TEMP);
DISP_TEMP_S(180,556,(uint8_t*)buf,CH35_SW);
sprintf(buf,"%.1f",CH37TEMP);
DISP_TEMP_S(220,556,(uint8_t*)buf,CH36_SW);
sprintf(buf,"%.1f",CH37TEMP);
DISP_TEMP_S(260,556,(uint8_t*)buf,CH37_SW);
sprintf(buf,"%.1f",CH38TEMP);
DISP_TEMP_S(300,556,(uint8_t*)buf,CH38_SW);
sprintf(buf,"%.1f",CH39TEMP);
DISP_TEMP_S(340,556,(uint8_t*)buf,CH39_SW);
sprintf(buf,"%.1f",CH40TEMP);
DISP_TEMP_S(380,556,(uint8_t*)buf,CH40_SW);
}
}else if(page_flag == graph){
LCD_SetColors(LCD_COLOR_RED,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH1TEMP - COR1);
DISP_CNL_S(40,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GREEN,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH2TEMP - COR2);
DISP_CNL_S(60,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_MAGENTA,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH3TEMP - COR3);
DISP_CNL_S(80,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_CYAN,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH4TEMP - COR4);
DISP_CNL_S(100,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_YELLOW,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH5TEMP - COR5);
DISP_CNL_S(120,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_HLT,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH6TEMP - COR6);
DISP_CNL_S(140,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_BT,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH7TEMP - COR7);
DISP_CNL_S(160,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_BLUE,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH8TEMP - COR8);
DISP_CNL_S(180,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH9TEMP - COR9);
DISP_CNL_S(200,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GR1,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH10TEMP - COR10);
DISP_CNL_S(220,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GR2,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH11TEMP - COR11);
DISP_CNL_S(240,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GR3,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH12TEMP - COR12);
DISP_CNL_S(260,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GR4,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH13TEMP - COR13);
DISP_CNL_S(280,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GR5,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH14TEMP - COR14);
DISP_CNL_S(300,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GR6,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH15TEMP - COR15);
DISP_CNL_S(320,585,(uint8_t*)buf);
LCD_SetColors(LCD_COLOR_GR7,LCD_COLOR_BACK);
sprintf(buf,"%.1f",CH16TEMP - COR16);
DISP_CNL_S(340,585,(uint8_t*)buf);
}else if(page_flag == calibrate){
if(cor_page == page1)
{
cal_disp(1,CH1TEMP - COR1);
cal_disp(2,CH2TEMP - COR2);
cal_disp(3,CH3TEMP - COR3);
cal_disp(4,CH4TEMP - COR4);
cal_disp(5,CH5TEMP - COR5);
cal_disp(6,CH6TEMP - COR6);
cal_disp(7,CH7TEMP - COR7);
cal_disp(8,CH8TEMP - COR8);
}else if(cor_page == page2){
cal_disp(1,CH9TEMP - COR9);
cal_disp(2,CH10TEMP - COR10);
cal_disp(3,CH11TEMP - COR11);
cal_disp(4,CH12TEMP - COR12);
cal_disp(5,CH13TEMP - COR13);
cal_disp(6,CH14TEMP - COR14);
cal_disp(7,CH15TEMP - COR15);
cal_disp(8,CH16TEMP - COR16);
}else if(cor_page == page3){
cal_disp(1,CH17TEMP - COR17);
cal_disp(2,CH18TEMP - COR18);
cal_disp(3,CH19TEMP - COR19);
cal_disp(4,CH20TEMP - COR20);
cal_disp(5,CH21TEMP - COR21);
cal_disp(6,CH22TEMP - COR22);
cal_disp(7,CH23TEMP - COR23);
cal_disp(8,CH24TEMP - COR24);
}else if(cor_page == page4){
cal_disp(1,CH25TEMP - COR25);
cal_disp(2,CH26TEMP - COR26);
cal_disp(3,CH27TEMP - COR27);
cal_disp(4,CH28TEMP - COR28);
cal_disp(5,CH29TEMP - COR29);
cal_disp(6,CH30TEMP - COR30);
cal_disp(7,CH31TEMP - COR31);
cal_disp(8,CH32TEMP - COR32);
}
else if(cor_page == page5){
cal_disp(1,CH33TEMP - COR33);
cal_disp(2,CH34TEMP - COR34);
cal_disp(3,CH35TEMP - COR35);
cal_disp(4,CH36TEMP - COR36);
cal_disp(5,CH37TEMP - COR37);
cal_disp(6,CH38TEMP - COR38);
cal_disp(7,CH39TEMP - COR39);
cal_disp(8,CH40TEMP - COR40);
}
}
}
void Save_flag(void)
{
SPI_FLASH_SectorErase(0);
SPI_FLASH_BufferWrite((void*)savedata,SPI_FLASH_PageSize*0, sizeof(savedata));
SPI_FLASH_BufferWrite((void*)TempHLimits,SPI_FLASH_PageSize*1, sizeof(TempHLimits));
SPI_FLASH_BufferWrite((void*)TempLLimits,SPI_FLASH_PageSize*2, sizeof(TempLLimits));
SPI_FLASH_BufferWrite((void*)YLIMIT,SPI_FLASH_PageSize*3, sizeof(YLIMIT));
SPI_FLASH_BufferWrite((void*)Correction,SPI_FLASH_PageSize*4, sizeof(Correction));
// Save_Sflag();
}
void Read_flag(void)
{
SPI_FLASH_BufferRead((void *)savedata,SPI_FLASH_PageSize*0, sizeof(savedata));
SPI_FLASH_BufferRead((void *)TempHLimits,SPI_FLASH_PageSize*1, sizeof(TempHLimits));
SPI_FLASH_BufferRead((void *)TempLLimits,SPI_FLASH_PageSize*2, sizeof(TempLLimits));
SPI_FLASH_BufferRead((void *)YLIMIT,SPI_FLASH_PageSize*3, sizeof(YLIMIT));
SPI_FLASH_BufferRead((void*)Correction,SPI_FLASH_PageSize*4, sizeof(Correction));
// Read_history();
}
/****************************************************************
* Function: Flash_EnableReadProtection
* Description: Enable the read protection of user flash area.
* Input:
* Output:
* Return: 1: Read Protection successfully enable
* 2: Error: Flash read unprotection failed
*****************************************************************/
uint32_t Flash_EnableReadProtection(void)
{
/* Returns the FLASH Read Protection level. */
if( FLASH_OB_GetRDP() == RESET )
{
/* Unlock the Option Bytes */
FLASH_OB_Unlock();
/* Sets the read protection level. */
FLASH_OB_RDPConfig(OB_RDP_Level_1);
/* Start the Option Bytes programming process. */
if (FLASH_OB_Launch() != FLASH_COMPLETE)
{
/* Disable the Flash option control register access (recommended to protect
the option Bytes against possible unwanted operations) */
FLASH_OB_Lock();
/* Error: Flash read unprotection failed */
return (2);
}
/* Disable the Flash option control register access (recommended to protect
the option Bytes against possible unwanted operations) */
FLASH_OB_Lock();
/* Read Protection successfully enable */
return (1);
}
/* Read Protection successfully enable */
return (1);
}
/****************************************************************
* Function: Flash_DisableReadProtection
* Description: Disable the read protection of user flash area.
* Input:
* Output:
* Return: 1: Read Protection successfully disable
* 2: Error: Flash read unprotection failed
*****************************************************************/
uint32_t Flash_DisableReadProtection(void)
{
/* Returns the FLASH Read Protection level. */
if( FLASH_OB_GetRDP() != RESET )
{
/* Unlock the Option Bytes */
FLASH_OB_Unlock();
/* Sets the read protection level. */
FLASH_OB_RDPConfig(OB_RDP_Level_0);
/* Start the Option Bytes programming process. */
if (FLASH_OB_Launch() != FLASH_COMPLETE)
{
/* Disable the Flash option control register access (recommended to protect
the option Bytes against possible unwanted operations) */
FLASH_OB_Lock();
/* Error: Flash read unprotection failed */
return (2);
}
/* Disable the Flash option control register access (recommended to protect
the option Bytes against possible unwanted operations) */
FLASH_OB_Lock();
/* Read Protection successfully disable */
return (1);
}
/* Read Protection successfully disable */
return (1);
}
void UsbDataHandle(void)
{
u8 i;
u8 j;
uint16_t sendcrc;
u8 creclen;
u8 csendlen;
u16 voltage;//电压
u16 current;
u32 power; //功率
u16 frequancy;
u16 PF;//功率因数
if(usbbuf[0] == 0x01)
{
if(usbbuf[1] == 0x03)//读数据
{
free(crec);
free(csend);
readcrc = usbbuf[6] << 8|usbbuf[7];
creclen = 6;
crec = (u8 *)malloc(sizeof(u8) * creclen);
memset(crec, 0, creclen);//初始化,每个元素都为零
for(i = 0;i < 6;i++)
{
crec[i] = usbbuf[i];
}
// crec[0] = 01;
// crec[1] = 03;
// crec[2] = usbbuf[2];
// crec[3] = usbbuf[3];
// crec[4] = usbbuf[4];
// crec[5] = usbbuf[5];
crcwatch = CRC16(crec,creclen);
if(CRC16(crec,creclen) == readcrc)//CRC校验
{
if(usbbuf[2] == 0 && usbbuf[4] == 0 && ((usbbuf[5] < 129)))//读实时数据
{
csendlen = 7 + (usbbuf[5])*2;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = usbbuf[5]*2;
if(usbbuf[5] + usbbuf[3] <= 16)
{
for(i = 0; i < usbbuf[5]; i++)
{
if(RecBuff[5+usbbuf[3]+i*2] == 0X4E && RecBuff[6+usbbuf[3]+i*2] == 0X1F)
{
usbsendbuf[7+i*2] = 0xE0;
usbsendbuf[8+i*2] = 0xE0;
}else{
usbsendbuf[7+i*2] = (u8)((((u16)RecBuff[5+usbbuf[3]+i*2] << 8) + RecBuff[6+usbbuf[3]+i*2] - (int)(Correction[usbbuf[3] + i] * 10)) >> 8);
usbsendbuf[8+i*2] = (u8)(((u16)RecBuff[5+usbbuf[3]+i*2] << 8) + RecBuff[6+usbbuf[3]+i*2] - (int)(Correction[usbbuf[3] + i] * 10));
}
// usbsendbuf[7+i*2] = 0;
// usbsendbuf[8+i*2] = 0;
}
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[7+(usbbuf[5] - usbbuf[3])*2] = (u8)(sendcrc >> 8);
usbsendbuf[8+(usbbuf[5] - usbbuf[3])*2] = (u8)(sendcrc);
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
// for(i = 9 + (usbbuf[5] - usbbuf[3])*2; i < 64 ; i++)
// {
// usbsendbuf[i] = 0;
// }
}else{
for(i = 0; i < usbbuf[5]; i++)
{
usbsendbuf[7+i*2] = 0xE0;
usbsendbuf[8+i*2] = 0xE0;
// usbsendbuf[7+i*2] = 0;
// usbsendbuf[8+i*2] = 0;
}
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[7+(usbbuf[5])*2] = (u8)(sendcrc >> 8);
usbsendbuf[8+(usbbuf[5])*2] = (u8)(sendcrc);
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}
// for(i = 0;i < 8;i++)
// {
// csendlen = 23;
// csend = (u8*)malloc(sizeof(u8) * csendlen);
// memset(csend, 0, csendlen);//初始化,每个元素都为零
// usbsendbuf[0] = 0x01;
// usbsendbuf[1] = 0x03;
// usbsendbuf[2] = 0x00;
// usbsendbuf[3] = 16*(i+1);
// usbsendbuf[4] = 0x00;
// usbsendbuf[5] = 16*(i+1)+16;
// usbsendbuf[6] = 32;
// for(j=0;j<16;j++)
// {
// usbsendbuf[7+j] = 0xee;
//
// }
// for(j = 0;j < csendlen; j++)
// {
// csend[j] = usbsendbuf[j];
// }
// sendcrc = CRC16(csend,csendlen);
// usbsendbuf[23] = (u8)(sendcrc >> 8);
// usbsendbuf[24] = (u8)(sendcrc);
// USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
// }
}else if(usbbuf[2] == 0xC0 && usbbuf[3] == 0x00){//读取时间
csendlen = 15;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbreadtime = 1;
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = 0x08;
usbsendbuf[7] = usbreadtime[0];
usbsendbuf[8] = usbreadtime[1];
usbsendbuf[9] = usbreadtime[2];
usbsendbuf[10] = usbreadtime[3];
usbsendbuf[11] = usbreadtime[4];
usbsendbuf[12] = usbreadtime[5];
usbsendbuf[13] = usbreadtime[6];
usbsendbuf[14] = 0;
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[15] = (u8)(sendcrc >> 8);
usbsendbuf[16] = (u8)(sendcrc);
for(i = 17; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
Delay(200);
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}else if(usbbuf[2] == 0xC0 && usbbuf[3] == 0x20){//读取传感器类型
csendlen = 9;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = 0x02;
usbsendbuf[7] = 0x00;
usbsendbuf[8] = TCTYPE;
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[9] = (u8)(sendcrc >> 8);
usbsendbuf[10] = (u8)(sendcrc);
for(i = 11; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}else if(usbbuf[2] == 0xC0 && usbbuf[3] == 0x10){//读取仪器状态
csendlen = 9;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = 0x02;
usbsendbuf[7] = 0x00;
usbsendbuf[8] = eqmtstatus;
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[9] = (u8)(sendcrc >> 8);
usbsendbuf[10] = (u8)(sendcrc);
for(i = 11; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}else if(usbbuf[2] == 0x01 && usbbuf[3] == 0x01){//读取上下限
csendlen = 11;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = 0x04;
usbsendbuf[7] = (u8)((int)(TempHLimits[(usbbuf[3]-1)/2]) >> 8);
usbsendbuf[8] = (u8)((int)(TempHLimits[(usbbuf[3]-1)/2]));
usbsendbuf[9] = (u8)((int)(TempLLimits[(usbbuf[3]-1)/2]) >> 8);
usbsendbuf[10] = (u8)((int)(TempLLimits[(usbbuf[3]-1)/2]));
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[11] = (u8)(sendcrc >> 8);
usbsendbuf[12] = (u8)(sendcrc);
for(i = 13; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}else if(usbbuf[2] == 0x02 && usbbuf[3] == 0x58){//读取单位
free(csend);
csendlen = 9;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = 0x02;
usbsendbuf[7] = 0x00;
usbsendbuf[8] = UNIT;
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[9] = (u8)(sendcrc >> 8);
usbsendbuf[10] = (u8)(sendcrc);
for(i = 11; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}else if(usbbuf[2] == 0xEE && usbbuf[3] == 0xEE){//检测连接状态
csendlen = 15;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = 0x4A;
usbsendbuf[7] = 0x4E;
usbsendbuf[8] = 0x4B;
usbsendbuf[9] = 0x4F;
usbsendbuf[10] = 0x35;
usbsendbuf[11] = 0x30;
usbsendbuf[12] = 0x38;
usbsendbuf[13] = 0x7C;
usbsendbuf[14] = 0xA3;
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[15] = (u8)(sendcrc >> 8);
usbsendbuf[16] = (u8)(sendcrc);
for(i = 17; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}else if(usbbuf[2] == 0x80 && usbbuf[3] == 0x10){//读取电参数
csendlen = 19;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
//发送数据CRC校验长度
// usbsendbuf[0] = 0x00;
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x03;
usbsendbuf[2] = usbbuf[2];
usbsendbuf[3] = usbbuf[3];
usbsendbuf[4] = usbbuf[4];
usbsendbuf[5] = usbbuf[5];
usbsendbuf[6] = 0x0C;
usbsendbuf[7] = (u8)(voltage >> 8);
usbsendbuf[8] = (u8)voltage;
usbsendbuf[9] = (u8)(current >> 8);
usbsendbuf[10] = (u8)current;
usbsendbuf[11] = (u8)(power >> 24);
usbsendbuf[12] = (u8)(power >> 16);
usbsendbuf[13] = (u8)(power >> 8);
usbsendbuf[14] = (u8)power;
usbsendbuf[15] = (u8)(frequancy >> 8);
usbsendbuf[16] = (u8)frequancy;
usbsendbuf[17] = (u8)(PF >> 8);
usbsendbuf[18] = (u8)PF;
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[19] = (u8)(sendcrc >> 8);
usbsendbuf[20] = (u8)(sendcrc);
for(i = 21; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}
}
}else if(usbbuf[1] == 0x10){
free(crec);
free(csend);
if(usbbuf[2] == 0xC0 && usbbuf[3] == 0x00)//设置时间
{
readcrc = usbbuf[15] << 8|usbbuf[16];
creclen = 15;
crec = (u8 *)malloc(sizeof(u8) * creclen);
memset(crec, 0, creclen);//初始化,每个元素都为零
// crec[0] = 0x01;
// crec[1] = 0x10;
// crec[2] = usbbuf[2];
// crec[3] = usbbuf[3];
// crec[4] = usbbuf[4];
// crec[5] = usbbuf[5];
// crec[6] = usbbuf[6];
// crec[7] = usbbuf[7];
// crec[8] = usbbuf[8];
// crec[9] = usbbuf[9];
// crec[10] = usbbuf[10];
// crec[11] = usbbuf[11];
// crec[12] = usbbuf[12];
// crec[13] = usbbuf[13];
// crec[14] = usbbuf[14];
for(i = 0; i < creclen;i ++)
{
crec[i] = usbbuf[i];
}
crcwatch = CRC16(crec,creclen);
if(CRC16(crec,creclen) == readcrc)
{
csendlen = 6;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x10;
usbsendbuf[2] = 0xC0;
usbsendbuf[3] = 0x00;
usbsendbuf[4] = 0x00;
usbsendbuf[5] = 0x00;
RTC_DateTypeDef RTC_DateStructure;
RTC_TimeTypeDef RTC_TimeStructure;
YEAR = usbbuf[8];
MONTH = usbbuf[9];
DATE = usbbuf[10];
HOURS =usbbuf[11];
MINUTES = usbbuf[12];
SECONDS = usbbuf[13];
RTC_DateStructure.RTC_Date = DATE;
RTC_DateStructure.RTC_Month = MONTH;
RTC_DateStructure.RTC_Year = YEAR;
RTC_SetDate(RTC_Format_BINorBCD, &RTC_DateStructure);
RTC_WriteBackupRegister(RTC_BKP_DRX, RTC_BKP_DATA);
RTC_TimeStructure.RTC_H12 = RTC_H12_AMorPM;
RTC_TimeStructure.RTC_Hours = HOURS;
RTC_TimeStructure.RTC_Minutes = MINUTES;
RTC_TimeStructure.RTC_Seconds = SECONDS;
RTC_SetTime(RTC_Format_BINorBCD, &RTC_TimeStructure);
RTC_WriteBackupRegister(RTC_BKP_DRX, RTC_BKP_DATA);
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[6] = (u8)(sendcrc >> 8);
usbsendbuf[7] = (u8)(sendcrc);
for(i = 8; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}
}else if(usbbuf[2] == 0x01){//设置上下限
readcrc = usbbuf[11] << 8|usbbuf[12];
creclen = 11;
crec = (u8 *)malloc(sizeof(u8) * creclen);
memset(crec, 0, creclen);//初始化,每个元素都为零
// crec[0] = 0x01;
// crec[1] = 0x10;
// crec[2] = usbbuf[2];
// crec[3] = usbbuf[3];
// crec[4] = usbbuf[4];
// crec[5] = usbbuf[5];
// crec[6] = usbbuf[6];
// crec[7] = usbbuf[7];
// crec[8] = usbbuf[8];
// crec[9] = usbbuf[9];
// crec[10] = usbbuf[10];
for(i = 0; i < creclen;i ++)
{
crec[i] = usbbuf[i];
}
crcwatch = CRC16(crec,creclen);
if(CRC16(crec,creclen) == readcrc)
{
csendlen = 6;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x10;
usbsendbuf[2] = 0x01;
usbsendbuf[3] = 0x01;
usbsendbuf[4] = 0x00;
usbsendbuf[5] = 0x02;
TempHLimits[(usbbuf[3]-1)/2] = (float)(usbbuf[7] << 8 | usbbuf[8]);
TempLLimits[(usbbuf[3]-1)/2] = (float)(usbbuf[9] << 8 | usbbuf[10]);
Save_flag();
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[6] = (u8)(sendcrc >> 8);
usbsendbuf[7] = (u8)(sendcrc);
for(i = 8; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}
}else if(usbbuf[2] == 0x02 && usbbuf[3] == 0x58){//设置单位
readcrc = usbbuf[9] << 8|usbbuf[10];
creclen = 9;
crec = (u8 *)malloc(sizeof(u8) * creclen);
memset(crec, 0, creclen);//初始化,每个元素都为零
// crec[0] = 0x01;
// crec[1] = 0x10;
// crec[2] = usbbuf[2];
// crec[3] = usbbuf[3];
// crec[4] = usbbuf[4];
// crec[5] = usbbuf[5];
// crec[6] = usbbuf[6];
// crec[7] = usbbuf[7];
// crec[8] = usbbuf[8];
for(i = 0; i < creclen;i ++)
{
crec[i] = usbbuf[i];
}
crcwatch = CRC16(crec,creclen);
if(CRC16(crec,creclen) == readcrc)
{
csendlen = 6;
csend = (u8*)malloc(sizeof(u8) * csendlen);
memset(csend, 0, csendlen);//初始化,每个元素都为零
usbsendbuf[0] = 0x01;
usbsendbuf[1] = 0x10;
usbsendbuf[2] = 0x02;
usbsendbuf[3] = 0x58;
usbsendbuf[4] = 0x00;
usbsendbuf[5] = 0x01;
UNIT = usbbuf[8];
for(i = 0;i < csendlen; i++)
{
csend[i] = usbsendbuf[i];
}
sendcrc = CRC16(csend,csendlen);
usbsendbuf[6] = (u8)(sendcrc >> 8);
usbsendbuf[7] = (u8)(sendcrc);
for(i = 8; i < 64 ; i++)
{
usbsendbuf[i] = 0;
}
USBD_HID_SendReport(&USB_OTG_dev,usbsendbuf,64);//数据回显
}
}
}
}
}
//U盘检测
u8 udisk_scan(void)
{
static u8 res;
// u8 i;
// char str[64];
if(usbstatus != UNCONNECTED)
{
res = CH376DiskConnect( );
if( res != USB_INT_SUCCESS )/* 检查U盘是否连接,等待U盘插入,对于SD卡,可以由单片机直接查询SD卡座的插拔状态引脚 */
{
DrawUdisk1();
usbstatus = UNCONNECTED;
return NO_CONNECTION;
}
}
// Delay(200);
if(usbstatus != CONNECTED)
{
res = CH376DiskMount( );
if(res == USB_INT_SUCCESS)
{
DrawUdisk2();
usbstatus = CONNECTED;
return UDISK_READY;
}
}
return UDISK_NOTREADY;
}
//上下限监测
void Check_limits(u8 chn)
{
if((ch_temp[chn-1] - Correction[chn-1] < TempLLimits[chn-1] || ch_temp[chn-1] - Correction[chn-1] > TempHLimits[chn-1]) && FILTER == ft_on)
{
LCD_SetColors(LCD_COLOR_RED,LCD_COLOR_BACK);
}else{
LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BACK);
}
}
/*********************************************END OF FILE**********************/
<file_sep>/User/APP/power_on.c
/**
******************************************************************************
* @file sysytem.c
* @author k
* @version V1.0
* @date 2018-04-29
* @brief 手持多路温度测试仪开机界面
******************************************************************************
* @attention
*
* 实验平台:秉火 STM32 F429 开发板
* 论坛 :http://www.firebbs.cn
* 淘宝 :http://firestm32.taobao.com
*
******************************************************************************
*/
#include "./led/bsp_led.h"
#include "./lcd/bsp_lcd.h"
#include "./key/bsp_key.h"
#include "./beep/bsp_beep.h"
#include "./tim/bsp_basic_tim.h"
#include "jk508.h"
void power_on(void)
{
// u8 i;
// char buf[10];
/*初始化后默认使用前景层*/
LCD_SetLayer(LCD_FOREGROUND_LAYER);
/*默认设置不透明 ,该函数参数为不透明度,范围 0-0xff ,0为全透明,0xff为不透明*/
LCD_SetTransparency(0xFF);
LCD_Clear(LCD_COLOR_BLACK);
/*经过LCD_SetLayer(LCD_FOREGROUND_LAYER)函数后,
以下液晶操作都在前景层刷新,除非重新调用过LCD_SetLayer函数设置背景层*/
// for(i=0;i<16;i++)
// {
// LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BLACK);
// DISP_INS(5+i*20,5,"Initializing Channel");
// sprintf(buf,"%03d",i+1);
// DISP_INS(5+i*20,336,(uint8_t*)buf);
// Delay(500);
// }
LCD_SetColors(LCD_COLOR_GR7,LCD_COLOR_BLACK);
DISP_SYS(0,0,"JKOS");
LCD_SetColors(LCD_COLOR_WHITE,LCD_COLOR_BLACK);
DISP_INS(50,0,"REV 1.0");
DISP_INS(200,0,"JK508 Multi-channel Temp.Meter");
DISP_INS(230,0,"REV 1.0");
page_flag = poweron;
}
<file_sep>/User/touchscreen.h
#ifndef __TOUCH_SCREEN_H
#define __TOUCH_SCREEN_H
#include "stm32f4xx.h"
#define TOUCH_XPLUS_GPIO_PORT GPIOA
#define TOUCH_XPLUS_GPIO_CLK RCC_AHB1Periph_GPIOA
#define TOUCH_XPLUS_GPIO_PIN GPIO_Pin_1
#define TOUCH_XPLUS_PINSOURCE GPIO_PinSource1
#define TOUCH_XPLUS_CHANNEL ADC_Channel_1
#define TOUCH_XMINUS_GPIO_PORT GPIOA
#define TOUCH_XMINUS_GPIO_CLK RCC_AHB1Periph_GPIOA
#define TOUCH_XMINUS_GPIO_PIN GPIO_Pin_1
#define TOUCH_XMINUS_PINSOURCE GPIO_PinSource1
#define TOUCH_XMINUS_CHANNEL ADC_Channel_1
#define TOUCH_YPLUS_GPIO_PORT GPIOA
#define TOUCH_YPLUS_GPIO_CLK RCC_AHB1Periph_GPIOA
#define TOUCH_YPLUS_GPIO_PIN GPIO_Pin_0
#define TOUCH_YPLUS_PINSOURCE GPIO_PinSource0
#define TOUCH_YPLUS_CHANNEL ADC_Channel_0
#define TOUCH_YMINUS_GPIO_PORT GPIOA
#define TOUCH_YMINUS_GPIO_CLK RCC_AHB1Periph_GPIOA
#define TOUCH_YMINUS_GPIO_PIN GPIO_Pin_1
#define TOUCH_YMINUS_PINSOURCE GPIO_PinSource1
#define TOUCH_YMINUS_CHANNEL ADC_Channel_1
void Touch_GPIO_Config(void);
#endif /* __TOUCH_SCREEN_H */
<file_sep>/User/tim/bsp_basic_tim.c
/**
******************************************************************************
* @file bsp_basic_tim.c
* @author STMicroelectronics
* @version V1.0
* @date 2015-xx-xx
* @brief 基本定时器定时范例
******************************************************************************
* @attention
*
* 实验平台:秉火 STM32 F429 开发板
* 论坛 :http://www.firebbs.cn
* 淘宝 :http://firestm32.taobao.com
*
******************************************************************************
*/
#include "./tim/bsp_basic_tim.h"
#include "./usart/bsp_debug_usart.h"
#include "./key/bsp_key.h"
#include "./lcd/bsp_lcd.h"
#include "./ch376/ch376.h"
#include "usbd_hid_core.h"
#include "usbd_usr.h"
#include "usbd_desc.h"
#include "jk508.h"
extern u8 key_value;
extern u16 count;
extern u8 count_flag;
u8 tempreq[8] = {0x01,0x03,0x00,0x00,0x00,0x10,0x44,0x06};
u8 reqcode;
u8 brightness;
extern __ALIGN_BEGIN USB_OTG_CORE_HANDLE USB_OTG_dev __ALIGN_END;
/**
* @brief 基本定时器 TIMx,x[6,7]中断优先级配置
* @param 无
* @retval 无
*/
static void TIMx_NVIC_Configuration(void)
{
NVIC_InitTypeDef NVIC_InitStructure;
// 设置中断组为0
NVIC_PriorityGroupConfig(NVIC_PriorityGroup_0);
// 设置中断来源
NVIC_InitStructure.NVIC_IRQChannel = BASIC_TIM_IRQn;
// 设置抢占优先级
NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0;
// 设置子优先级
NVIC_InitStructure.NVIC_IRQChannelSubPriority = 3;
NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;
NVIC_Init(&NVIC_InitStructure);
}
/*
* 注意:TIM_TimeBaseInitTypeDef结构体里面有5个成员,TIM6和TIM7的寄存器里面只有
* TIM_Prescaler和TIM_Period,所以使用TIM6和TIM7的时候只需初始化这两个成员即可,
* 另外三个成员是通用定时器和高级定时器才有.
*-----------------------------------------------------------------------------
* TIM_Prescaler 都有
* TIM_CounterMode TIMx,x[6,7]没有,其他都有(基本定时器)
* TIM_Period 都有
* TIM_ClockDivision TIMx,x[6,7]没有,其他都有(基本定时器)
* TIM_RepetitionCounter TIMx,x[1,8]才有(高级定时器)
*-----------------------------------------------------------------------------
*/
static void TIM_Mode_Config(void)
{
TIM_TimeBaseInitTypeDef TIM_TimeBaseStructure;
// 开启TIMx_CLK,x[6,7]
RCC_APB1PeriphClockCmd(BASIC_TIM_CLK, ENABLE);
/* 累计 TIM_Period个后产生一个更新或者中断*/
//当定时器从0计数到4999,即为5000次,为一个定时周期
TIM_TimeBaseStructure.TIM_Period = 400-1;
//定时器时钟源TIMxCLK = 2 * PCLK1
// PCLK1 = HCLK / 4
// => TIMxCLK=HCLK/2=SystemCoreClock/2=90MHz
// 设定定时器频率为=TIMxCLK/(TIM_Prescaler+1)=10000Hz
TIM_TimeBaseStructure.TIM_Prescaler = 9000-1;
// 初始化定时器TIMx, x[2,3,4,5]
TIM_TimeBaseInit(BASIC_TIM, &TIM_TimeBaseStructure);
// 清除定时器更新中断标志位
TIM_ClearFlag(BASIC_TIM, TIM_FLAG_Update);
// 开启定时器更新中断
TIM_ITConfig(BASIC_TIM,TIM_IT_Update,ENABLE);
// 使能定时器
TIM_Cmd(BASIC_TIM, ENABLE);
}
/**
* @brief 配置TIM复用输出PWM时用到的I/O
* @param 无
* @retval 无
*/
static void TIMx_GPIO_Config(void)
{
/*定义一个GPIO_InitTypeDef类型的结构体*/
GPIO_InitTypeDef GPIO_InitStructure;
/*开启相关的GPIO外设时钟*/
RCC_AHB1PeriphClockCmd (LTDC_BL_GPIO_CLK, ENABLE);
/* 定时器通道引脚复用 */
GPIO_PinAFConfig(LTDC_BL_GPIO_PORT,GENERAL_OCPWM_PINSOURCE,GENERAL_OCPWM_AF);
/* 定时器通道引脚配置 */
GPIO_InitStructure.GPIO_Pin = LTDC_BL_GPIO_PIN;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStructure.GPIO_OType = GPIO_OType_PP;
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL;//GPIO_PuPd_UP;
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_100MHz;
GPIO_Init(LTDC_BL_GPIO_PORT, &GPIO_InitStructure);
}
/*
* 注意:TIM_TimeBaseInitTypeDef结构体里面有5个成员,TIM6和TIM7的寄存器里面只有
* TIM_Prescaler和TIM_Period,所以使用TIM6和TIM7的时候只需初始化这两个成员即可,
* 另外三个成员是通用定时器和高级定时器才有.
*-----------------------------------------------------------------------------
* TIM_Prescaler 都有
* TIM_CounterMode TIMx,x[6,7]没有,其他都有(基本定时器)
* TIM_Period 都有
* TIM_ClockDivision TIMx,x[6,7]没有,其他都有(基本定时器)
* TIM_RepetitionCounter TIMx,x[1,8]才有(高级定时器)
*-----------------------------------------------------------------------------
*/
void TIM_PWMOUTPUT_Config(u8 duty)
{
TIM_TimeBaseInitTypeDef TIM_TimeBaseStructure;
TIM_OCInitTypeDef TIM_OCInitStructure;
// 开启TIMx_CLK,x[2,3,4,5,12,13,14]
RCC_APB1PeriphClockCmd(GENERAL_TIM_CLK, ENABLE);
/* 累计 TIM_Period个后产生一个更新或者中断*/
//当定时器从0计数到8999,即为9000次,为一个定时周期
TIM_TimeBaseStructure.TIM_Period = 100-1;
// 高级控制定时器时钟源TIMxCLK = HCLK/2=90MHz
// 设定定时器频率为=TIMxCLK/(TIM_Prescaler+1)=100KHz
TIM_TimeBaseStructure.TIM_Prescaler = 9000-1;
// 采样时钟分频
TIM_TimeBaseStructure.TIM_ClockDivision=TIM_CKD_DIV1;
// 计数方式
TIM_TimeBaseStructure.TIM_CounterMode=TIM_CounterMode_Up;
// 初始化定时器TIMx, x[2,3,4,5,12,13,14]
TIM_TimeBaseInit(GENERAL_TIM, &TIM_TimeBaseStructure);
/*PWM模式配置*/
/* PWM1 Mode configuration: Channel1 */
TIM_OCInitStructure.TIM_OCMode = TIM_OCMode_PWM1; //配置为PWM模式1
TIM_OCInitStructure.TIM_OutputState = TIM_OutputState_Enable;
TIM_OCInitStructure.TIM_Pulse = duty-1;
TIM_OCInitStructure.TIM_OCPolarity = TIM_OCPolarity_High; //当定时器计数值小于CCR1_Val时为高电平
TIM_OC3Init(GENERAL_TIM, &TIM_OCInitStructure); //使能通道3
/*使能通道1重载*/
TIM_OC1PreloadConfig(GENERAL_TIM, TIM_OCPreload_Enable);
// 使能定时器
TIM_Cmd(GENERAL_TIM, ENABLE);
}
/**
* @brief 初始化基本定时器定时,1ms产生一次中断
* @param 无
* @retval 无
*/
void TIMx_Configuration(void)
{
TIMx_NVIC_Configuration();
TIM_Mode_Config();
TIMx_GPIO_Config();
TIM_PWMOUTPUT_Config(brightness);
}
void BASIC_TIM_IRQHandler (void)
{
static u8 sendcount;
static u16 dim_time;
static u8 dimflag;
u8 i;
if(TIM_GetITStatus( BASIC_TIM, TIM_IT_Update) != RESET )
{
Key_Scan();//按键扫描
DCD_EP_PrepareRx(&USB_OTG_dev,HID_OUT_EP,usbbuf,64);//接收PC数据
if(UsbHidReceiveComplete) //接收到数据
{
UsbHidReceiveComplete=0;
UsbDataHandle();
}
if(sendcount == 20)
{
for(i=0;i<8;i++)
{
Usart_SendByte(DEBUG_USART,tempreq[i]);//请求温度数据
}
if(page_flag != poweron)
{
udisk_scan();
}
sendcount = 0;
}
if(key_value == 0xFF && dimflag == 0)
{
if(DIM == DOFF)
{
}else if(DIM == D5){
if(dim_time < 7500)
{
dim_time++;
}else if(dim_time == 7500)
{
TIM_PWMOUTPUT_Config(10);
dimflag = 1;
}
}else if(DIM == D10){
if(dim_time < 15000)
{
dim_time++;
}else if(dim_time == 15000)
{
TIM_PWMOUTPUT_Config(10);
dimflag = 1;
}
}else if(DIM == D15){
if(dim_time < 22500)
{
dim_time++;
}else if(dim_time == 22500)
{
TIM_PWMOUTPUT_Config(10);
dimflag = 1;
}
}else if(DIM == D30){
if(dim_time < 45000)
{
dim_time++;
}else if(dim_time == 45000)
{
TIM_PWMOUTPUT_Config(10);
dimflag = 1;
}
}
}else if(key_value != 0xff){
if(dim_time == (DIM * 7500))
{
TIM_PWMOUTPUT_Config(brightness);
dimflag = 0;
}
dim_time = 0;
}
sendcount ++;
TIM_ClearITPendingBit(BASIC_TIM,TIM_IT_Update);
}
}
///**
// * @brief 初始化高级控制定时器定时,1ms产生一次中断
// * @param 无
// * @retval 无
// */
//void TIMx_Configuration(void)
//{
// TIMx_GPIO_Config();
//
// TIM_PWMOUTPUT_Config();
//}
/*********************************************END OF FILE**********************/
<file_sep>/User/tim/bsp_basic_tim.h
#ifndef __BASIC_TIM_H
#define __BASIC_TIM_H
#include "stm32f4xx.h"
#define BASIC_TIM TIM6
#define BASIC_TIM_CLK RCC_APB1Periph_TIM6
#define BASIC_TIM_IRQn TIM6_DAC_IRQn
#define BASIC_TIM_IRQHandler TIM6_DAC_IRQHandler
#define GENERAL_TIM TIM5
#define GENERAL_TIM_CLK RCC_APB1Periph_TIM5
#define GENERAL_TIM_IRQn TIM5_IRQn
#define GENERAL_TIM_IRQHandler TIM5_IRQHandler
void TIMx_Configuration(void);
void TIM_PWMOUTPUT_Config(u8 duty);
#endif /* __BASIC_TIM_H */
<file_sep>/User/touchscreen.c
/**
******************************************************************************
* @file main.c
* @author fire
* @version V1.0
* @date 2015-xx-xx
* @brief 触摸屏驱动
******************************************************************************
* @attention
*
* 实验平台:秉火 STM32 F429 开发板
* 论坛 :http://www.firebbs.cn
* 淘宝 :http://firestm32.taobao.com
*
******************************************************************************
*/
#include "stm32f4xx.h"
#include "./RTC/bsp_rtc.h"
#include "./usart/bsp_debug_usart.h"
#include "./lcd/bsp_lcd.h"
#include "jk508.h"
#include "touchscreen.h"
void Touch_GPIO_Config(void)
{
/*定义一个GPIO_InitTypeDef类型的结构体*/
GPIO_InitTypeDef GPIO_InitStructure;
/*开启相关的GPIO外设时钟*/
RCC_AHB1PeriphClockCmd (TOUCH_XPLUS_GPIO_CLK|TOUCH_XMINUS_GPIO_CLK|TOUCH_YPLUS_GPIO_CLK|
TOUCH_YMINUS_GPIO_CLK,ENABLE);
GPIO_InitStructure.GPIO_Pin = TOUCH_YPLUS_GPIO_PIN;/*选择引脚*/
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IN;/*设置引脚为输入模式*/
GPIO_InitStructure.GPIO_OType = GPIO_OType_OD;
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_UP; /*设置引脚为上拉模式*/
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_25MHz;/*设置引脚速率为2MHz */
GPIO_Init(TOUCH_YPLUS_GPIO_PORT, &GPIO_InitStructure);
GPIO_InitStructure.GPIO_Pin = TOUCH_XMINUS_GPIO_PIN;/*选择引脚*/
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_OUT;/*设置引脚为输出模式*/
GPIO_InitStructure.GPIO_OType = GPIO_OType_PP;/*设置引脚的输出类型为推挽输出*/
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_UP; /*设置引脚为上拉模式*/
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_25MHz;/*设置引脚速率为2MHz */
GPIO_Init(TOUCH_XMINUS_GPIO_PORT, &GPIO_InitStructure);
GPIO_ResetBits(TOUCH_XMINUS_GPIO_PORT,TOUCH_XMINUS_GPIO_PIN);
}
| 3bbf7c23c1f66de9ef1a0dddcb18aa99e6a04713 | [
"C"
] | 7 | C | gdgly/JK508HAND | e740f0843df860bd93de3e4b8cb59084c88deda1 | eb230fbd0561632601c6bdbaa10a11135593a17c | |
refs/heads/master | <file_sep># Female Secrets
## Preámbulo
Todas conocemos lo importante que es desahogarnos y compartir nuestros secretos y experiencias personales con esa(s) amiga(s) cercana(s), sin embargo, existe cierto temor de hacerlo mediante redes sociales ya que nunca sabemos quién puede terminar leyendo nuestros más intimos secretos por un descuido de nosotras mismas o de esa(s) persona(s) con quién hemos decidido compartir, para esto se ha creado esta aplicación que te permite cifrar y descifrar mensajes mediante el Cifrado Cesar, logrando así que tú y tu(s) amiga(s) puedan intercambiar mensajes cifrandolos y descifrandolos ;-)
## Prototipos


Feedback:
Utilizar un lenguaje adecuado al tema de la página.
<file_sep>const contrasena = document.getElementById("contrasena");
const btnIngresar = document.getElementById("btnIngresar");
const respuestaCorrecta = "LABORATORIA"
let contador = 0;
const div1 = document.getElementById('div1');
const div2 = document.getElementById('div2');
const div3 = document.getElementById('div3');
let inputTexto = document.getElementById('inputTexto');
let btnCifrar = document.getElementById('btnCifrar');
let btnDescifrar = document.getElementById('btnDescifrar');
let outputResultado = document.getElementById('outputResultado');
let offset = document.getElementById('offset');
btnIngresar.addEventListener('click', () => {
if (contrasena.value == respuestaCorrecta)
{
div1.classList.add('ocultar');
div2.classList.remove('ocultar');
}
else {
document.getElementById('mensjPasswordIncorrecta').innerHTML="Contraseña incorrecta, inténtalo nuevamente";
contador ++;
if(contador===3){
div1.classList.add('ocultar');
div2.classList.add('ocultar');
div3.classList.remove('ocultar');
}
}
});
btnCifrar.addEventListener("click", () =>{
let string = inputTexto.value;
let offsetC = offset.value;
outputResultado.value = cipher.encode(offsetC, string);
});
btnDescifrar.addEventListener("click", () =>{
let string = inputTexto.value;
let offsetC = offset.value;
outputResultado.value = cipher.decode(offsetC, string);
});
<file_sep>window.cipher = {
encode: (offsetC, string) => {
let mensajeCifrado = "";
for (let i = 0; i < string.length; i++) {
let asciiNumber = string.charCodeAt(i);
if (asciiNumber >= 65 && asciiNumber <= 90) {
mensajeCifrado += String.fromCharCode((asciiNumber - 65 + parseInt(offsetC)) % 26 + 65);
}
else {
mensajeCifrado+= string[i];
}
}
return mensajeCifrado;
},
decode: (offsetC, string) => {
let caractercifrado='';
let mensajeDescifrado = '';
for (let i = 0; i < string.length; i++) {
let asciiNumber = string.charCodeAt(i);
if (asciiNumber >= 65 && asciiNumber <= 90) {
caractercifrado = (asciiNumber - 65 - parseInt(offsetC)) % 26 + 65;
if (caractercifrado<65) {
caractercifrado = caractercifrado+26;
}
mensajeDescifrado += String.fromCharCode(caractercifrado);
}
else {
mensajeDescifrado+=string[i];
}
}
return mensajeDescifrado;
}
};
| 6faf13ecadb36c0112dcc5481341c7ecf4fb3a7b | [
"Markdown",
"JavaScript"
] | 3 | Markdown | Indirachsm/LIM010-Cipher | 8272efb255ceb5acea161e4bbce17676f91a621b | 0e8f1b37cfb99c15313ee8d8febadf5da36a98af | |
refs/heads/master | <file_sep>#include<stdio.h>
int main(){
float nota;
printf("Ingrese la calificación ");
scanf("%f",¬a);
if (nota>10.5) {
puts("el alumno ha aprobado"); //solo imprime dentro de un condicional
}
else{
puts("el alumno ha reprobado");
}
}
<file_sep>#include<stdio.h>
int main() {
int num,rever;
printf("Ingrese el número a reversar");
scanf("%i",&num);
while(num != 0){
rever = num % 10;
printf("%i",rever);
num = num / 10;
}
return 0;
}
<file_sep># git-curso
My first git project
Proyecto de prueba con git
<file_sep>/* Pedir dos números al usuario y sumarlos restarlos multiplicarlos y dividirlos*/
#include<stdio.h>
int main(){
int n1,n2, suma=0, resta=0, mult=0, div=0;
float grad;
printf ("digite 2 numeros: ");
scanf ("%i %i",&n1,&n2);
suma = n1 + n2;
resta = n1 - n2;
mult = n1 * n2;
div = n1 / n2;
printf("Los resultados son %i %i %i %i \n",suma,resta,mult,div);
printf("La suma es %i \n",suma);
printf("La resta es %i \n",resta);
printf("La multiplicacion %i \n",mult);
printf("La division es %i \n",div);
//Pasar de Celsius a Fahrenheit
printf("Ingrese los grados C para pasarlos a F ");
scanf ("%f",&grad);
grad= (grad*9/5)+32;
printf("Los grados F son %.2f",grad);
return 0;
}
<file_sep>#include<stdio.h>
int main(){
int n, suma=0, i, cont=0;
printf("Por favor ingrese n");
scanf("%i",&n);
for(i=1;i<=n;i++){
if(i%2==0){
suma=suma+i;
cont++;
}
}
printf("La suma de pares es %i, y la cantidad de pares es %i",suma,cont);
return 0;
}
<file_sep>#include<stdio.h> // libreria entradas y salidas
#include<string.h> //libreria manipulación de cadenas
#include<math.h> //libreria operaciones matemáticas
#include<stdlib.h>
/*
Lo recuerdo
No lo puedo creer
*/
#define PI 3.1416 //macro para definir una constante o determinar alias por ejemplo para acortar un ciclo for
int y=5; //variable global
int main (){ //Función principal
int x=10; //variable local (dentro de la función)
float suma=0;
suma=PI + x;
printf("la suma es: %.2f",suma); //imprime la variables con solo dos decimales
return 0; //indica a C que el proceso se completó correctamente
}
<file_sep>/* El bucle for:
Sintaxis:
for(inicializacion; condicion; incremento) {
Instrucciones;
...
} */
//Mostrar los 10 primeros números
#include<stdio.h>
int main() {
int i;
for(i=1;i<=10;i++){
printf("\t %i",i);
}
return 0;
}
<file_sep>#include<stdio.h>
int main(){
char a='e'; //tamaņo 1byte Rango 0...255
short b=-15;//tamaņo 2 bytes Rango -128...127
int c=1024; //tamaņo 2 bytes Rango -32768...32768
unsigned int d=128; // 2 bytes Rango 0...65535
long e=123456;//4 bytes Rango -2147483648...2147483647
float f=15.678; // 4 bytes
double m=123123.123123; //tamaņo 8 bytes
printf("El elemento es: %c \n",a);
printf("El elemento es: %i \n",b);
printf("El elemento es: %i\n",c);
printf("El elemento es: %i\n",d);
printf("El elemento es: %li\n",e);
printf("El elemento es: %.3f\n",f);
printf("El elemento es: %.lf\n",m);//el punto elimina todos los decimales
return 0;
}
<file_sep>/*La sentencia while
Sintaxis:
while(condicion) {
instrucciones;
} */
//Mostrar los 10 primeros números en pantalla
#include<stdio.h>
int main(){
int i;
i=1;
while(i<=10){
printf("%i. \n",i);
i ++; //sumarle la unidad para restar i --;
}
return 0;
}
<file_sep>#include<stdio.h>
#include<math.h>
int main(){
int n1,n2;
float raiz;
printf("Ingrese los dos números: ");
scanf("%i %i",&n1,&n2);
if (n1>n2){
printf(" El numero mayor es %i",n1);
}
else if(n1<n2){
printf(" El numero mayor es %i",n2);
}
else {
printf("Los números son iguales");
}
//Ejercicio raiz
if (n1<0){
printf("\n La raiz es imaginaria");
}
else {
raiz= sqrt(n1);
printf("\n La raiz de %i es: %.2f",n1,raiz);
}
return 0;
}
<file_sep>/* Repetición: el bucle do...while
Sintaxis:
do{
Instrucciones...
}while (condicion) (Se va a ejecutar mínimo una vez)*/
//Mostrar los 10 primeros numeros
#include<stdio.h>
int main(){
int i=1;
char opc;
do{
printf("%i \n",i);
i++;
}while(i<=10);
do{
fflush(stdin);
printf("Hola \n");
printf("Digite 's' para saludar nuevamente: ");//TECLA S
scanf("%c",&opc);
}while(opc=='s' || opc=='S'); // ||Operador "o"
return 0;
}
<file_sep>/* Una tienda ofrece un descuento del 15% sobre el total de la compra y
un cliente desea saber cuanto deberá pagar al final por su compra*/
#include<stdio.h>
int main(){
float precio_inicio,precio_desc;
int horas, pago_hora, pago_total;
printf("Ingrese el total de la compra en $: ");
scanf("%f",&precio_inicio);
precio_desc=precio_inicio*0.85;
printf("El valor a pagar es %.2f \n",precio_desc);
//salario dadas las horas y el salario por hora
printf("Ingrese las horas trabajadas: ");
scanf("%i",&horas);
printf("Ingrese el pago por hora: ");
scanf("%i",&pago_hora);
pago_total=horas*pago_hora;
printf("el salario total es %i",pago_total);
return 0;
}
<file_sep>#include<stdio.h>
int main(){
char nombre [30], signo[20];
printf("Digite su nombre: ");
gets(nombre);
printf("Digite su signo: ");
gets(signo);
if (strcmp(signo,"aries")==0){
printf("\n %s es signo aries", nombre);
}
else{
printf("No es signo aries");
}
return 0;
}
<file_sep>// Sacar la hipotenusa de un triangulo rectangulo
#include<stdio.h>
#include<math.h>
#define PI 3.1416
int main(){
float hipotenusa, cateto1, cateto2, radio;
//hipotenusa
printf("Digite los dos catetos: ");
scanf("%f %f",&cateto1,&cateto2);
hipotenusa = sqrt(pow(cateto1,2) + pow(cateto2,2));
printf("La hipotenusa del triangulo %.2f \n", hipotenusa);
printf("Defina el radio de la circunferencia: ");
scanf("%f",&radio);
radio = radio* 2 * PI;
printf("La longitud de la circunferencia es %.2f",radio);
return 0;
}
<file_sep>#include<stdio.h>
int main (){
int a,b,c;
a=b=c=10; // = es un operador de asignación
a = a + 10;
a += 10; //equivalente a a= a + 10
a -= 5; //a = a - 5
a *= 2; //a = a * 2
a /= 2; // a = a / 2
printf("El valor de a,b,c es %i %i %i",a,b,c); //imprimer diferente variables en el mismo printf
return 0;
}
<file_sep>#include<stdio.h>
#include<math.h>
int main(){
//area del trapecio
int base_M, base_m, area, altura,n1,n2,n3;
float media;
printf("digite la base mayor: ");
scanf("%i",&base_M);
printf("digite la base menor: ");
scanf("%i",&base_m);
printf("digite la base altura: ");
scanf("%i",&altura);
area=((base_M + base_m)*altura)/2;
printf("el area es %i",area);
//media aritmetica de 3 numeros
printf("digite los 3 numeros para sacar la media :");
scanf("%i %i %i",&n1,&n2,&n3);
media=(n1+n2+n3)/3;
printf("la media es %.2f",media);
return 0;
}
<file_sep>//entradas y salidas
#include<stdio.h>
int main(){
int a=10;
float b=15.5;
char c='e';
printf("digite el valor de la variable a:");
scanf("%i",&a); //lee los datos de entrada
printf("El valor es %i\n",a);
printf("%i %f %c",a,b,c); //imprimir varias variables a la vez
return 0;
}
<file_sep>// Determinar si un número es primo o no
#include<stdio.h>
int main(){
int i, numero, cont=0;
printf("Digite un numero: ");
scanf("%i",&numero);
for (i=1;i<=numero;i++){
if(numero%i==0){
cont++;
}
}
if (cont>2){
printf("\n El número es compuesto");
}
else {
printf("\n El número es primo");
}
return 0;
}
<file_sep>#include<stdio.h>
#include<stdlib.h>
int main(){
char tecla;
printf("Programa borrado de pantalla");
printf("\n --------------------------------------\n");
printf("\n --------------------------------------\n");
printf("Digite el número 1 :");
scanf("%c", &tecla);
if (tecla=='1'){
system("cls"); //limpiado de pantalla
printf("Ha funcionado cls");
}
else {
fflush(stdin); //Limpiar el buffer (espacio de memoria en el que se almacenan datos de manera temporal, usa sistema FIFO)
printf("\n No ha funcionado cls");
printf("\n Por favor digite 1 : ");
scanf("%c",&tecla);
if (tecla=='1'){
system("cls");
printf("Ha funcionado cls");
}
else {
printf("\n No ha funcionado cls");
}
}
return 0;
}
<file_sep>/* Sentencia switch
switch (selector) { //selector es variable entera o de tipo char, no puede ser double o float
case etiqueta1: sentencias1; break;
case etiqueta2: sentencias2; break;
case etiqueta3: sentencias3; break;
default: sentencias; //caso contrario a los cases
}*/
#include<stdio.h>
int main () {
char vocal;
printf("Digite una vocal: ");
scanf("%c",&vocal);
switch(vocal){
case 'a': printf("\n Vocal a");break;
case 'e': printf("\n Vocal e");break;
case 'i': printf("\n Vocal i");break;
case 'o': printf("\n Vocal o");break;
case 'u': printf("\n Vocal u");break;
default: printf("Se equivoco, no es una vocal");
}
return 0;
}
<file_sep>#include<stdio.h>
int main(){
int i, num;
long x=0,y=1,z=1;
printf("Digite la cantidad de elementos de la serie: ");
scanf("%i",&num);
for(i=1;i<=num;i++){
printf("%i, ",z);
z= x + y;
x= y;
y= z;
}
return 0;
}
<file_sep>//Ejercicio de arbol
#include<stdio.h>
int main() {
int i, n,r;
printf("Ingrese la cantidad de filas: ");
scanf("%i",&n);
for (i=1;i<=n;i++) {
for (r=0;r<(n-i);r++){
printf(" ");
}
for (r=1;r<=(2*i-1);r++){
printf("*");
}
printf("\n");
}
return 0;
}
<file_sep>#include<stdio.h>
int main(){
int cont=1, n, suma=0;
printf("ingrese n: ");
scanf("%i",&n);
while(cont<=n){
if(cont%2==0){
suma -= cont;
cont++;
}
else {
suma += cont;
cont ++;
}
}
printf("\n la suma es: %i",suma);
return 0;
}
| 4180e6362950112f825993df998eeb604ae65da8 | [
"Markdown",
"C"
] | 23 | C | danilop-png/git-curso | 480fb616a63b4c55a68580536409e11d146d799a | ecb6b39006316d625eda7305a8aba43c62bcd98a | |
refs/heads/master | <file_sep>class Example extends React.Component {
render() {
//create variables for welcome message
const greeting = 'Howdy';
const name = 'Molly';
const message = 'please check your order:';
const welcome = `${greeting} ${name} ${message}`;
//vars to hold details about sign
const sign = 'Montagure House';
const tiles = sign.length;
const subTotal = tiles * 5;
const shipping = 7;
const grandTotal = subTotal + shipping;
return (
<div>
<h1>Elderflower</h1>
<div id='content'>
<div id='greeting' className='message'>
{welcome}
</div>
<table>
<tbody>
<tr>
<td>Custom sign: </td>
<td id='userSign'>{sign}</td>
</tr>
<tr>
<td>Total tiles: </td>
<td id='tiles'>{tiles}</td>
</tr>
<tr>
<td>Subtotal: </td>
<td id='subTotal'>${subTotal}</td>
</tr>
<tr>
<td>Shipping: </td>
<td id='shipping'>${shipping}</td>
</tr>
<tr>
<td>Grand total: </td>
<td id='grandTotal'>${grandTotal}</td>
</tr>
</tbody>
</table>
<a href='#' className='action'>
Pay Now
</a>
</div>
</div>
);
}
}
| de7272720d15ecbbe52d9e9292caa210be917bf7 | [
"JavaScript"
] | 1 | JavaScript | fallencloud/static_react_storefront_02 | 089f528c033b049eb1d7efc1daaffd462f04ae89 | 5e6aeab19c4ffb6d653ae630e47f9145b58ea92b | |
refs/heads/master | <file_sep>class BookLocators:
NAME_LOCATOR ='article.product_pod h3 a'
LINK_LOCATOR ='article.product_pod h3 a'
PRICE_LOCATOR ='div.product_price p.price_color'
RATING_LOCATOR ='article.product_pod p.star-rating'
| 8f3961211274dd0e957b41c3f635ada5e0599663 | [
"Python"
] | 1 | Python | itamitut/Python_UDEMY_BooksScraping | 7dd3d1417776b72672fb05eb21fb3e8fd2232dab | d0e574dc913042d455d13fdb718d13b1bce9ddc9 | |
refs/heads/master | <repo_name>zmechanic/GccApplication4<file_sep>/common.h
#ifndef COMMON_H_
#define COMMON_H_
#include <stdlib.h>
#include <stdint.h>
const uint8_t MSB_BIT_MASK = 0b10000000;
const uint8_t LSB_BIT_MASK = 0b00000001;
const uint8_t COMMON_BUFFER_SIZE_IN_BYTES = 50;
const uint8_t UPLINK_BUFFER_SIZE_IN_BYTES = 35;
const uint8_t DEVICE_DATA_BUFFER_SIZE_IN_BYTES = 64;
const uint8_t DEVICE_WORK_BUFFER_SIZE_IN_BYTES = 16;
const uint8_t COMMON_BUFFER_ONE_BYTE_RESPONSE_BIT_FLAG = 0b10000000;
const uint8_t COMMON_BUFFER_ONE_BYTE_RESPONSE_CONTENT_BIT_MASK = 0b01111111;
const uint8_t COMMON_BUFFER_RESPONSE_CALCULATE_CRC_BIT_FLAG = 0b01000000;
const uint8_t COMMON_BUFFER_RESPONSE_PACKET_LENGTH_BIT_MASK = 0b00111111;
#endif /* COMMON_H_ */<file_sep>/wdt.h
#ifndef WDT_H_
#define WDT_H_
static __inline__
__attribute__ ((__always_inline__))
void wdt_enable_int_only (const uint8_t value)
{
__asm__ __volatile__ (
"in __tmp_reg__,__SREG__" "\n\t"
"cli" "\n\t"
"wdr" "\n\t"
"sts %0, %1" "\n\t"
"out __SREG__,__tmp_reg__" "\n\t"
"sts %0, %2" "\n \t"
: /* no outputs */
: "n" (_SFR_MEM_ADDR(_WD_CONTROL_REG)),
"r" ((uint8_t)(_BV(_WD_CHANGE_BIT) | _BV(WDE))),
"r" ((uint8_t) ((value & 0x08 ? _WD_PS3_MASK : 0x00) |
_BV(WDIE) | (value & 0x07)) )
: "r0"
);
}
static __inline__
__attribute__ ((__always_inline__))
void wdt_enable_int_and_reset (const uint8_t value)
{
__asm__ __volatile__ (
"in __tmp_reg__,__SREG__" "\n\t"
"cli" "\n\t"
"wdr" "\n\t"
"sts %0, %1" "\n\t"
"out __SREG__,__tmp_reg__" "\n\t"
"sts %0, %2" "\n \t"
: /* no outputs */
: "n" (_SFR_MEM_ADDR(_WD_CONTROL_REG)),
"r" ((uint8_t)(_BV(_WD_CHANGE_BIT) | _BV(WDE))),
"r" ((uint8_t) ((value & 0x08 ? _WD_PS3_MASK : 0x00) |
_BV(WDE) | _BV(WDIE) | (value & 0x07)) )
: "r0"
);
}
#endif /* WDT_H_ */<file_sep>/adc.h
#ifndef __ADC_H__
#define __ADC_H__
void ADC_init();
uint16_t ADC_read(uint8_t channel);
#endif //__ADC_H__
<file_sep>/hub_delays.h
#ifndef __HUB_DELAYS_H__
#define __HUB_DELAYS_H__
class HubDelays
{
private:
static const uint8_t delay_PacketStartClckHi = 15;
static const uint8_t delay_PacketStartDataHi = 15;
static const uint8_t delay_PacketStartDataLo = 15;
static const uint8_t delay_PacketEndClckHi = 15;
static const uint8_t delay_PacketEndDataHi = 15;
static const uint8_t delay_SendBitClckLo = 15;
static const uint8_t delay_SendBitData = 0xff;
static const uint8_t delay_SendBitClckHi = 15;
static const uint8_t delay_SendBit9ClckLo = 15;
static const uint8_t delay_SendBit9Data = 0xff;
static const uint8_t delay_SendBit9ClckHi = 15;
static const uint8_t delay_SendAbandonDataLo = 15;
static const uint8_t delay_SendAbandonClckHi = 15;
static const uint8_t delay_SendLastByteTerminate = 0x1e;
static const uint8_t delay_RecvBitClckLo = 15;
static const uint8_t delay_RecvBitData = 15;
static const uint8_t delay_RecvBitClckHi = 15;
static const uint8_t delay_RecvBit9ClckLo = 15;
static const uint8_t delay_RecvBit9Data = 15;
static const uint8_t delay_RecvBit9ClckHi = 15;
public:
static const uint8_t PacketStartClckHi = 0;
static const uint8_t PacketStartDataHi = 1;
static const uint8_t PacketStartDataLo = 2;
static const uint8_t PacketEndClckHi = 3;
static const uint8_t PacketEndDataHi = 4;
static const uint8_t SendBitClckLo = 5;
static const uint8_t SendBitData = 6;
static const uint8_t SendBitClckHi = 7;
static const uint8_t SendBit9ClckLo = 8;
static const uint8_t SendBit9Data = 9;
static const uint8_t SendBit9ClckHi = 10;
static const uint8_t SendAbandonDataLo = 11;
static const uint8_t SendAbandonClckHi = 12;
static const uint8_t SendLastByteTerminate = 13;
static const uint8_t RecvBitClckLo = 14;
static const uint8_t RecvBitData = 15;
static const uint8_t RecvBitClckHi = 16;
static const uint8_t RecvBit9ClckLo = 17;
static const uint8_t RecvBit9Data = 18;
static const uint8_t RecvBit9ClckHi = 19;
#ifdef SYNCHRON
private:
static const uint8_t ItemsCount = RecvBit9ClckHi + 1;
public:
static void InitializeDelaysWithDefaults(uint8_t * delays)
{
for (uint8_t i = 0; i < ItemsCount; i++)
{
delays[i] = 0;
}
}
#else
static void InitializeDelaysWithDefaults(uint8_t * delays)
{
delays[PacketStartClckHi] = delay_PacketStartClckHi;
delays[PacketStartDataHi] = delay_PacketStartDataHi;
delays[PacketStartDataLo] = delay_PacketStartDataLo;
delays[PacketEndClckHi] = delay_PacketEndClckHi;
delays[PacketEndDataHi] = delay_PacketEndDataHi;
delays[SendBitClckLo] = delay_SendBitClckLo;
delays[SendBitData] = delay_SendBitData;
delays[SendBitClckHi] = delay_SendBitClckHi;
delays[SendBit9ClckLo] = delay_SendBit9ClckLo;
delays[SendBit9Data] = delay_SendBit9Data;
delays[SendBit9ClckHi] = delay_SendBit9ClckHi;
delays[SendAbandonDataLo] = delay_SendAbandonDataLo;
delays[SendAbandonClckHi] = delay_SendAbandonClckHi;
delays[SendLastByteTerminate] = delay_SendLastByteTerminate;
delays[RecvBitClckLo] = delay_RecvBitClckLo;
delays[RecvBitData] = delay_RecvBitData;
delays[RecvBitClckHi] = delay_RecvBitClckHi;
delays[RecvBit9ClckLo] = delay_RecvBit9ClckLo;
delays[RecvBit9Data] = delay_RecvBit9Data;
delays[RecvBit9ClckHi] = delay_RecvBit9ClckHi;
}
#endif
};
#endif //__HUB_DELAYS_H__
<file_sep>/commands.h
#ifndef COMMANDS_H_
#define COMMANDS_H_
#include <stdlib.h>
#include <stdint.h>
class MagicCommonCommands
{
public:
static const uint8_t UplinkDataPump = 'U';
};
class CommonCommandTypes
{
public:
static const uint8_t FireAndForget = 0xAA;
static const uint8_t NeedResponse = 0xAB;
};
class Commands
{
public:
// protocol support commands
static const uint32_t CRST = (uint32_t)'C' << 24 | (uint32_t)'R' << 16 | (uint32_t)'S' << 8 | 'T'; // resets controller
static const uint32_t CREQ = (uint32_t)'C' << 24 | (uint32_t)'R' << 16 | (uint32_t)'E' << 8 | 'Q'; // requests controller to respond to patterned slave ID match
static const uint32_t CNEW = (uint32_t)'C' << 24 | (uint32_t)'N' << 16 | (uint32_t)'E' << 8 | 'W'; // reports whether controller is new and not been detected yet via REQA
static const uint32_t CSLP = (uint32_t)'C' << 24 | (uint32_t)'S' << 16 | (uint32_t)'L' << 8 | 'P'; // suspends controller
static const uint32_t CWUP = (uint32_t)'C' << 24 | (uint32_t)'W' << 16 | (uint32_t)'U' << 8 | 'P'; // wakes up controller
static const uint32_t CGID = (uint32_t)'C' << 24 | (uint32_t)'G' << 16 | (uint32_t)'I' << 8 | 'D'; // controller returns 32-bit module ID + 16-bit firmware version
static const uint32_t CSLB = (uint32_t)'C' << 24 | (uint32_t)'S' << 16 | (uint32_t)'L' << 8 | 'B'; // sets lock bits of controller
static const uint32_t CGLB = (uint32_t)'C' << 24 | (uint32_t)'G' << 16 | (uint32_t)'L' << 8 | 'B'; // gets lock bits of controller
static const uint32_t CERR = (uint32_t)'C' << 24 | (uint32_t)'E' << 16 | (uint32_t)'R' << 8 | 'R'; // queries controller for errors
// uplink stack commands
static const uint32_t UWRI = (uint32_t)'U' << 24 | (uint32_t)'W' << 16 | (uint32_t)'R' << 8 | 'I'; // writes to uplink input buffer
static const uint32_t UWRO = (uint32_t)'U' << 24 | (uint32_t)'W' << 16 | (uint32_t)'R' << 8 | 'O'; // writes to uplink output buffer
static const uint32_t UCPY = (uint32_t)'U' << 24 | (uint32_t)'C' << 16 | (uint32_t)'P' << 8 | 'Y'; // copies content of uplink input buffer to uplink output buffer
static const uint32_t UMOV = (uint32_t)'U' << 24 | (uint32_t)'M' << 16 | (uint32_t)'O' << 8 | 'V'; // moves content of uplink input buffer to uplink output buffer and clears input buffer
static const uint32_t URDI = (uint32_t)'U' << 24 | (uint32_t)'R' << 16 | (uint32_t)'D' << 8 | 'I'; // reads ("peek") content of uplink input buffer
static const uint32_t URDO = (uint32_t)'U' << 24 | (uint32_t)'R' << 16 | (uint32_t)'D' << 8 | 'O'; // reads ("peek") content of uplink output buffer
static const uint32_t UCCI = (uint32_t)'U' << 24 | (uint32_t)'C' << 16 | (uint32_t)'C' << 8 | 'I'; // checks uplink input buffer for content
static const uint32_t UCCO = (uint32_t)'U' << 24 | (uint32_t)'C' << 16 | (uint32_t)'C' << 8 | 'O'; // checks uplink output buffer for content
// target device (-duino) commands that are handled by module controller
static const uint32_t DRST = (uint32_t)'D' << 24 | (uint32_t)'R' << 16 | (uint32_t)'S' << 8 | 'T'; // resets -duino
static const uint32_t DSLP = (uint32_t)'D' << 24 | (uint32_t)'S' << 16 | (uint32_t)'L' << 8 | 'P'; // suspends -duino, so it no longer responds to uplink data
static const uint32_t DWUP = (uint32_t)'D' << 24 | (uint32_t)'W' << 16 | (uint32_t)'U' << 8 | 'P'; // wakes up -duino
static const uint32_t DMSG = (uint32_t)'D' << 24 | (uint32_t)'M' << 16 | (uint32_t)'S' << 8 | 'G'; // forwards message payload to -duino (this is the way to send commands that will be handled by device itself)
};
class DeviceCommands
{
public:
static const uint8_t DeviceCommandIdentifier = 0xff;
// target device (-duino) commands that are forwarded to device as-is
static const uint32_t DGID = (uint32_t)'D' << 24 | (uint32_t)'G' << 16 | (uint32_t)'I' << 8 | 'D'; // send request to -duino to return 64-bit module ID
static const uint32_t DLCK = (uint32_t)'D' << 24 | (uint32_t)'L' << 16 | (uint32_t)'C' << 8 | 'K'; // send request to -duino to apply locks as needed
static const uint32_t DRBE = (uint32_t)'D' << 24 | (uint32_t)'R' << 16 | (uint32_t)'B' << 8 | 'E'; // reads block from -duino EEPROM
};
class UplinkCommandTypes
{
public:
static const uint8_t PacketTypeMask = 0b11000000; //11xxxxxx
static const uint8_t PacketSizeMask = 0b00111111; //xx111111
static const uint8_t DeviceEmptyPacket = 0b00000000; //00 pattern is chosen to distinguish valid packet and non-responding previous module, which will always be 11
static const uint8_t DeviceDataPacket = 0b01000000; //01 pattern is chosen to distinguish valid packet and non-responding previous module, which will always be 11
static const uint8_t DeviceMessagePacket = 0b10000000; //10 pattern is chosen to distinguish valid packet and non-responding previous module, which will always be 11
static const uint8_t EmptyPacket = 0b00110110; //00110110 pattern to signify empty packet
};
#endif /* COMMANDS_H_ */<file_sep>/device.cpp
#include <string.h>
#include "common.h"
#include "commands.h"
#include "device.h"
static uint8_t InpBuffer[DEVICE_WORK_BUFFER_SIZE_IN_BYTES];
static uint8_t OutBuffer[DEVICE_WORK_BUFFER_SIZE_IN_BYTES];
static uint8_t _internalInpBuffer[DEVICE_DATA_BUFFER_SIZE_IN_BYTES];
static uint8_t _internalOutBuffer[DEVICE_DATA_BUFFER_SIZE_IN_BYTES];
uint8_t ProcessMessage(uint8_t length);
uint8_t ProcessServiceMessage(uint8_t length);
void DEV_Reset()
{
}
uint8_t DEV_ProcessMessage(uint8_t * srcBuffer, uint8_t length, uint8_t * dstBuffer)
{
memcpy(_internalInpBuffer, srcBuffer, length);
uint8_t readIndex = 0;
uint8_t writeIndex = 0;
while (readIndex <= length)
{
uint8_t inpMessageLength = 0;
if (readIndex < length)
{
inpMessageLength = _internalInpBuffer[readIndex] + 1;
for (uint8_t i = 0; i < inpMessageLength; i++)
{
InpBuffer[i] = _internalInpBuffer[readIndex++];
}
}
else
{
// this is to abandon the loop next time
readIndex++;
}
uint8_t outMessageLengthAndCode = inpMessageLength > 0 && InpBuffer[1] == DeviceCommands::DeviceCommandIdentifier
? ProcessServiceMessage(inpMessageLength)
: ProcessMessage(inpMessageLength);
uint8_t outMessageCode = outMessageLengthAndCode & 192;
uint8_t outMessageLength = outMessageLengthAndCode & 63;
// copy original message before response message, if needed
switch (outMessageCode)
{
case MessageResponseCodes::InsertAfterMessage:
case MessageResponseCodes::IgnoreMessage:
for (uint8_t i = 0; i < inpMessageLength; i++)
{
_internalOutBuffer[writeIndex++] = InpBuffer[i];
}
break;
}
// transfer response message from device to output buffer
for (uint8_t i = 0; i < outMessageLength; i++)
{
_internalOutBuffer[writeIndex++] = OutBuffer[i];
}
// copy original message after response message, if needed
switch (outMessageCode)
{
case MessageResponseCodes::InsertBeforeMessage:
for (uint8_t i = 0; i < inpMessageLength; i++)
{
_internalOutBuffer[writeIndex++] = InpBuffer[i];
}
break;
}
}
memcpy(dstBuffer, _internalOutBuffer, writeIndex);
return writeIndex;
}
uint8_t ProcessMessage(uint8_t length)
{
return 0;
}
uint8_t ProcessServiceMessage(uint8_t length)
{
return 0;
}
<file_sep>/Debug/makedep.mk
################################################################################
# Automatically-generated file. Do not edit or delete the file
################################################################################
adc.cpp
crc.cpp
error_logger.cpp
hub_interface.cpp
main.cpp
mod_interface.cpp
uart.cpp
usart.cpp
<file_sep>/usart.h
#ifndef __USART_H__
#define __USART_H__
void USART_Init(uint32_t baudRate);
bool USART_CopyAvailableTillLineEnd(char * target, uint8_t & cnt);
void USART_WriteChar(char data);
void USART_Write(char * stringPtr);
void USART_Write(char * stringPtr, uint8_t length);
void USART_Write(const char * stringPtr);
void USART_Write(const char * stringPtr, uint8_t length);
void USART_Write(volatile char * stringPtr);
void USART_Write(volatile char * stringPtr, uint8_t length);
void USART_WriteBoolResultCode(bool b);
void USART_WriteByteResultCode(uint8_t b);
void USART_WriteByteAsHex(uint8_t b);
void USART_WriteShortAsHex(uint16_t s);
void USART_WriteIntAsHex(uint32_t i);
#endif //__USART_H__
<file_sep>/device.h
#ifndef __DEVICE_H__
#define __DEVICE_H__
#include <stdlib.h>
#include <stdint.h>
class MessageResponseCodes
{
public:
static const uint8_t IgnoreMessage = 0b00000000;
static const uint8_t InsertAfterMessage = 0b01000000;
static const uint8_t InsertBeforeMessage = 0b10000000;
static const uint8_t DiscardMessage = 0b11000000;
};
void DEV_Reset();
uint8_t DEV_ProcessMessage(uint8_t * buffer, uint8_t length, uint8_t * dstBuffer);
#endif //__DEVICE_H__
<file_sep>/mod_interface.h
#ifndef __MOD_INTERFACE_H__
#define __MOD_INTERFACE_H__
void MOD_MonitorConnectionHealth();
void MOD_PadPortStateChanged(uint8_t currentPortState, uint8_t previousPortState);
#endif //__MOD_INTERFACE_H__
<file_sep>/hub_interface.cpp
#include "config.h"
#include <stdlib.h>
#include <stdint.h>
#include <stdio.h>
#include <avr/io.h>
#include <util/delay.h>
#include "common.h"
#include "errors.h"
#include "crc.h"
#include "commands.h"
#include "error_logger.h"
#include "hub_delays.h"
#include "hub_interface.h"
#include "usart.h"
// port assignment for hub pins
#define PAD_PORT_CFG DDRB
#define PAD_PORT_INP PINB
#define PAD_PORT_OUT PORTB
// uplink pin - only one pin needed (i.e. uplink signal out)
#define LNK_PORT_CFG DDRB
#define LNK_PORT_INP PINB
#define LNK_PORT_OUT PORTB
#define LNK_UOUT PORTB4
// hub pins - only two pins needed (i.e. clock and data)
#define PAD_CLCK PORTB1
#define PAD_DATA PORTB2
// configures pin for output or input
#define CFG_PAD_OUT(pad) PAD_PORT_CFG |= (1 << pad)
#define CFG_PAD_INP(pad) PAD_PORT_CFG &= ~(1 << pad)
// set pin template
#define SET_PAD_HI(pad) PAD_PORT_OUT |= (1 << pad)
#define SET_PAD_LO(pad) PAD_PORT_OUT &= ~(1 << pad)
// get pin state template
#define GET_PAD_HI(pad) ((PAD_PORT_INP & (1 << pad)) != 0)
#define GET_PAD_LO(pad) ((PAD_PORT_INP & (1 << pad)) == 0)
// configures pin for CLCK signal either for output or input
#define CFG_PAD_CLCK_OUT CFG_PAD_OUT(PAD_CLCK)
// configures pin for DATA signal either for output or input
#define CFG_PAD_DATA_OUT CFG_PAD_OUT(PAD_DATA)
#define CFG_PAD_DATA_INP CFG_PAD_INP(PAD_DATA)
// sets state of CLCK pin either HI or LO
#define SET_PAD_CLCK_HI SET_PAD_HI(PAD_CLCK)
#define SET_PAD_CLCK_LO SET_PAD_LO(PAD_CLCK)
// sets state of DATA pin either HI or LO
#define SET_PAD_DATA_HI SET_PAD_HI(PAD_DATA)
#define SET_PAD_DATA_LO SET_PAD_LO(PAD_DATA)
// asserts CLCK pin output state is set as expected
#define ASSERT_PAD_CLCK_HI ((PAD_PORT_OUT & (1 << PAD_CLCK)) != 0)
#define ASSERT_PAD_CLCK_LO ((PAD_PORT_OUT & (1 << PAD_CLCK)) == 0)
// gets and validates whether state of DATA pin is set as expected
#define GET_PAD_DATA_HI GET_PAD_HI(PAD_DATA)
#define GET_PAD_DATA_LO GET_PAD_LO(PAD_DATA)
// configures pin for UPLINK signal either for output or input
#define CFG_LNK_UOUT_OUT LNK_PORT_CFG |= (1 << LNK_UOUT)
// sets state of UPLINK output pin either HI or LO
#define SET_LNK_HI LNK_PORT_OUT |= (1 << LNK_UOUT)
#define SET_LNK_LO LNK_PORT_OUT &= ~(1 << LNK_UOUT)
static uint8_t _commonBufferTransceiverBitQueue;
static uint8_t _uplinkBufferTransmitterBitQueue;
static uint8_t _commonBuffer[COMMON_BUFFER_SIZE_IN_BYTES];
static uint8_t _uplinkBuffer[UPLINK_BUFFER_SIZE_IN_BYTES];
static uint8_t _commonBufferWriteIndex;
static uint8_t _commonBufferReadIndex;
static uint8_t _expectedResponseLength;
static bool _validateCrcForCurrentResponse;
static uint8_t _delays[32];
static inline void DelayForSyncTest() __attribute__((__always_inline__));
static inline void DelayForAsync(uint8_t delayId) __attribute__((__always_inline__));
static bool SendPacketStart();
static bool SendPacketEnd();
static bool SendEntireBuffer(bool terminate, bool transmitUplinkData);
static bool SendCommonCommandPart(uint8_t commandType, uint32_t slave, uint32_t command, uint8_t * payload, uint8_t payloadLength);
static uint8_t ReadOneByteResponseFromCommonBuffer();
static uint8_t ReadNextByteFromCommonBuffer();
static uint32_t ReadNextWordFromCommonBuffer();
static void WriteNextBytesToCommonBuffer(uint8_t * v, uint8_t length);
static void WriteNextByteToCommonBuffer(uint8_t v);
static void WriteNextWordToCommonBuffer(uint32_t v);
static void WriteNextByteToUplinkBuffer(uint8_t uplinkBufferValue, uint8_t commonBufferValue);
void HUB_Initialize()
{
HubDelays::InitializeDelaysWithDefaults(_delays);
}
uint8_t HUB_GetDelay(uint8_t delayId)
{
return _delays[delayId];
}
void HUB_SetDelay(uint8_t delayId, uint8_t value)
{
_delays[delayId] = value;
}
uint8_t * HUB_GetBufferContent()
{
return _commonBuffer;
}
bool HUB_SendEmptyDuinoCommand()
{
uint8_t payload[1];
payload[0] = UplinkCommandTypes::EmptyPacket;
return HUB_SendUplinkPacket(payload, 1);
}
bool HUB_SendUplinkPumpPacket()
{
return HUB_SendUplinkPacket(NULL, 0);
}
bool HUB_SendUplinkPacket(uint8_t * payload, uint8_t length)
{
_commonBufferWriteIndex = 0;
_commonBufferReadIndex = 0;
WriteNextByteToUplinkBuffer(0, MagicCommonCommands::UplinkDataPump);
for (uint8_t i = 0; i < length; i++)
{
WriteNextByteToUplinkBuffer(payload[i], 0);
}
for (uint8_t i = length; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
WriteNextByteToUplinkBuffer(0, 0);
}
bool result = false;
if (SendPacketStart())
{
result = SendEntireBuffer(true, true);
}
SendPacketEnd();
return result;
}
bool HUB_SendCommandToAll(uint32_t command)
{
return HUB_SendCommand(0, command, NULL, 0);
}
bool HUB_SendCommandToAll(uint32_t command, uint32_t payload)
{
return HUB_SendCommand(0, command, payload);
}
bool HUB_SendCommandToAll(uint32_t command, uint8_t * payload, uint8_t payloadLength)
{
return HUB_SendCommand(0, command, payload, payloadLength);
}
bool HUB_SendCommand(uint32_t slave, uint32_t command)
{
return HUB_SendCommand(slave, command, NULL, 0);
}
bool HUB_SendCommand(uint32_t slave, uint32_t command, uint32_t payload)
{
uint8_t temp[4] = { (uint8_t)((payload >> 24) & 0xff), (uint8_t)((payload >> 16) & 0xff), (uint8_t)((payload >> 8) & 0xff), (uint8_t)(payload & 0xff) };
return HUB_SendCommand(slave, command, temp, 4);
}
bool HUB_SendCommand(uint32_t slave, uint32_t command, uint8_t * payload, uint8_t payloadLength)
{
bool result = SendCommonCommandPart(CommonCommandTypes::FireAndForget, slave, command, payload, payloadLength);
SendPacketEnd();
return result;
}
bool HUB_SendCommandToAllWithExpectedResponse(uint32_t command)
{
return HUB_SendCommandWithExpectedResponse(0, command, NULL, 0);
}
bool HUB_SendCommandToAllWithExpectedResponse(uint32_t command, uint32_t payload)
{
return HUB_SendCommandWithExpectedResponse(0, command, payload);
}
bool HUB_SendCommandToAllWithExpectedResponse(uint32_t command, uint8_t * payload, uint8_t payloadLength)
{
return HUB_SendCommandWithExpectedResponse(0, command, payload, payloadLength);
}
bool HUB_SendCommandWithExpectedResponse(uint32_t slave, uint32_t command)
{
return HUB_SendCommandWithExpectedResponse(slave, command, NULL, 0);
}
bool HUB_SendCommandWithExpectedResponse(uint32_t slave, uint32_t command, uint32_t payload)
{
uint8_t temp[4] = { (uint8_t)((payload >> 24) & 0xff), (uint8_t)((payload >> 16) & 0xff), (uint8_t)((payload >> 8) & 0xff), (uint8_t)(payload & 0xff) };
return HUB_SendCommandWithExpectedResponse(slave, command, temp, 4);
}
bool HUB_SendCommandWithExpectedResponse(uint32_t slave, uint32_t command, uint8_t * payload, uint8_t payloadLength)
{
bool result = SendCommonCommandPart(CommonCommandTypes::NeedResponse, slave, command, payload, payloadLength);
if (result == false)
{
// always send packet end;
// however, module is able to recover even without it
SendPacketEnd();
return false;
}
_commonBufferTransceiverBitQueue = 0;
_commonBufferWriteIndex = 0;
_expectedResponseLength = 0;
// clear buffer before writing response
// in reality this is not needed as content length is known
// this means, that any left overs in the buffer after response can be safely ignored
// however, nulling the buffer makes debugging easier
for (uint8_t i = 0; i < COMMON_BUFFER_SIZE_IN_BYTES; i++)
{
_commonBuffer[i] = 0;
}
// read response from slave(s)
for (;;)
{
for (uint8_t i = 0; i < 8; i++)
{
DelayForSyncTest();
SET_PAD_CLCK_LO;
DelayForAsync(HubDelays::RecvBitClckLo);
DelayForSyncTest();
SET_PAD_DATA_HI;
CFG_PAD_DATA_INP;
DelayForAsync(HubDelays::RecvBitData);
_commonBufferTransceiverBitQueue = _commonBufferTransceiverBitQueue << 1;
_commonBufferTransceiverBitQueue |= GET_PAD_DATA_HI ? 1 : 0;
DelayForSyncTest();
SET_PAD_CLCK_HI;
DelayForAsync(HubDelays::RecvBitClckHi);
}
// transmit 9th extra bit
DelayForSyncTest();
SET_PAD_CLCK_LO;
DelayForAsync(HubDelays::RecvBit9ClckLo);
DelayForSyncTest();
SET_PAD_DATA_LO;
DelayForAsync(HubDelays::RecvBit9Data);
DelayForSyncTest();
SET_PAD_CLCK_HI;
DelayForAsync(HubDelays::RecvBit9ClckHi);
if (_commonBufferWriteIndex == 0)
{
if (_commonBufferTransceiverBitQueue == 0)
{
_commonBuffer[0] = 0;
break;
}
if (_commonBufferTransceiverBitQueue < COMMON_BUFFER_ONE_BYTE_RESPONSE_BIT_FLAG)
{
_expectedResponseLength = _commonBufferTransceiverBitQueue & COMMON_BUFFER_RESPONSE_PACKET_LENGTH_BIT_MASK;
_validateCrcForCurrentResponse = (_commonBufferTransceiverBitQueue & COMMON_BUFFER_RESPONSE_CALCULATE_CRC_BIT_FLAG) != 0;
}
else
{
_expectedResponseLength = 1;
_validateCrcForCurrentResponse = false;
}
if (_validateCrcForCurrentResponse)
{
_expectedResponseLength++;
}
}
_commonBuffer[_commonBufferWriteIndex] = _commonBufferTransceiverBitQueue;
if (_commonBufferWriteIndex == _expectedResponseLength)
{
break;
}
if (++_commonBufferWriteIndex == COMMON_BUFFER_SIZE_IN_BYTES)
{
break;
}
}
// always send packet end; however, module is able to recover even without it
SendPacketEnd();
// check to prevent buffer overflow
if (_commonBufferWriteIndex == COMMON_BUFFER_SIZE_IN_BYTES)
{
_commonBufferWriteIndex = 0;
_commonBufferReadIndex = 0;
ERRLOG_LogError(Errors::HubCommonBufferOnResponseReadOverflow);
return false;
}
// validate CRC
if (_validateCrcForCurrentResponse)
{
if (_commonBuffer[_commonBufferWriteIndex] != CRC_CalculateBufferCrc(_commonBuffer, _commonBufferWriteIndex))
{
_commonBufferWriteIndex = 0;
_commonBufferReadIndex = 0;
ERRLOG_LogError(Errors::HubCommonBufferOnResponseReadCrcMismatch);
return false;
}
// we need to remove "calculate CRC" request flag from the header
// this flag is embedded in the response by module, but will confuse other things when response need to be processed further
// however, "calculate CRC" request flag is part of CRC itself, so we also can't remove it earlier
if ((_commonBuffer[0] & COMMON_BUFFER_ONE_BYTE_RESPONSE_BIT_FLAG) == 0 &&
(_commonBuffer[0] & COMMON_BUFFER_RESPONSE_CALCULATE_CRC_BIT_FLAG) != 0)
{
_commonBuffer[0] &= COMMON_BUFFER_RESPONSE_PACKET_LENGTH_BIT_MASK;
}
}
_commonBufferWriteIndex = 0;
_commonBufferReadIndex = 0;
return true;
}
uint8_t HUB_DetectNewSlaves()
{
bool result = HUB_SendCommandToAllWithExpectedResponse(Commands::CNEW);
if (!result)
{
return 255;
}
uint8_t response = ReadOneByteResponseFromCommonBuffer();
if (response > COMMON_BUFFER_ONE_BYTE_RESPONSE_CONTENT_BIT_MASK)
{
// error occurred
return 255;
}
return response == 127 ? 0 : 1;
}
uint8_t HUB_DetectSlaves(uint32_t * foundSlaveAddresses, uint8_t maxSlavesToReturn)
{
uint8_t slavesFound = 0;
uint8_t maxIterations = 200;
uint32_t wdLastFoundSlaveId = 0;
// wake-up all slaves first
// as some sleeping slaves will prevent full enumeration
HUB_SendCommandToAll(Commands::CWUP);
for (;;)
{
BeginAgain:
// use greedy pattern first
bool result = HUB_SendCommandToAllWithExpectedResponse(Commands::CREQ, 0);
if (!result)
{
if (wdLastFoundSlaveId == 0)
{
return 255;
}
break;
}
uint8_t responseLength = ReadNextByteFromCommonBuffer();
if (responseLength != 8)
{
if (wdLastFoundSlaveId == 0)
{
return 255;
}
break;
}
uint32_t idNormal = ReadNextWordFromCommonBuffer();
uint32_t idInvert = ReadNextWordFromCommonBuffer();
uint32_t idVarial = idNormal ^ idInvert ^ 0xffffffff;
if (idVarial == 0xffffffff)
{
return 255;
}
if (idVarial == 0)
{
foundSlaveAddresses[slavesFound++] = idNormal;
return slavesFound;
}
uint8_t usedIdBitsCount = 0;
uint32_t maxPossibleSlaveId = 1;
// allocate as much space as potentially can be used
uint32_t usedBitsToInts[32];
if (idVarial != 0)
{
for (uint8_t i = 0; i < 32; i++)
{
if (((idVarial >> i) & 1) == 1)
{
usedBitsToInts[usedIdBitsCount++] = 1 << i;
}
}
maxPossibleSlaveId = 1 << usedIdBitsCount;
}
// this prevents eternal loop when no slave responded, which happens when protocol fails
uint8_t protocolFailurePrevention = 0;
for (uint32_t i = 1; i <= maxPossibleSlaveId; i++)
{
if (protocolFailurePrevention++ == 32)
{
maxIterations = 0;
break;
}
// iterate only when more that one slave responded
if (idVarial != 0)
{
uint32_t slaveMatchTemplate = 0;
for (uint8_t s = 0; s < usedIdBitsCount; s++)
{
if (((i >> s) & 1) == 1)
{
slaveMatchTemplate |= usedBitsToInts[s];
}
}
// create detailed pattern
result = HUB_SendCommandToAllWithExpectedResponse(Commands::CREQ, slaveMatchTemplate);
if (!result)
{
continue;
}
responseLength = ReadNextByteFromCommonBuffer();
if (responseLength != 8)
{
continue;
}
idNormal = ReadNextWordFromCommonBuffer();
idInvert = ReadNextWordFromCommonBuffer();
idVarial = idNormal ^ idInvert ^ 0xffffffff;
if (idVarial == 0xffffffff)
{
maxIterations = 0;
break;
}
protocolFailurePrevention = 0;
}
// check if no bit-collision detected
// this would mean that we've found an actual slave ID
if (idVarial == 0)
{
// check if we found the same slave
// this will indicate an issue in slave suspension
if (idNormal == wdLastFoundSlaveId)
{
// we can't recover from this, so
// terminate detection loop
maxIterations = 0;
break;
}
// add found slave ID to the list
foundSlaveAddresses[slavesFound++] = idNormal;
if (slavesFound >= maxSlavesToReturn)
{
break;
}
// suspend found slave so it can no longer contribute into list of IDs
HUB_SendCommand(idNormal, Commands::CSLP);
// reset collision detection flag
idVarial = 0xffffffff;
// arm watch dog
wdLastFoundSlaveId = idNormal;
// restart from the beginning
goto BeginAgain;
}
}
// prevent endless loop
if (maxIterations > 0)
{
maxIterations--;
continue;
}
break;
}
// wake-up all slaves
HUB_SendCommandToAll(Commands::CWUP);
return slavesFound;
}
uint8_t HUB_DetectSlavesSequence(uint32_t * foundSlaveAddresses, uint8_t maxSlavesToReturn)
{
// create test packet
const uint8_t payloadLength = 4;
uint8_t payload[payloadLength];
payload[0] = 0b00000010;
payload[1] = 0b00001010;
payload[2] = 0b00101010;
payload[3] = 0b10101010;
// wake-up all slaves first
// as some sleeping slaves will prevent full enumeration
HUB_SendCommandToAll(Commands::CWUP);
// send packet to first connected module
bool result = HUB_SendUplinkPacket(payload, payloadLength);
if (!result)
{
return 255;
}
uint8_t slavesFound = 0;
for (uint8_t i = 0; i < 255; i++)
{
result = HUB_SendCommandToAllWithExpectedResponse(Commands::UCCI, payload, payloadLength);
if (!result)
{
if (i == 0)
{
return 255;
}
break;
}
uint8_t responseLength = ReadNextByteFromCommonBuffer();
if (responseLength != 8)
{
if (i == 0)
{
return 255;
}
break;
}
uint32_t idNormal = ReadNextWordFromCommonBuffer();
uint32_t idInvert = ReadNextWordFromCommonBuffer();
uint32_t idVarial = idNormal ^ idInvert ^ 0xffffffff;
if (idVarial != 0)
{
if (i == 0 || idVarial == 0xffffffff)
{
return 255;
}
break;
}
// add found slave ID to the list
foundSlaveAddresses[slavesFound++] = idNormal;
if (slavesFound >= maxSlavesToReturn)
{
break;
}
// move test packet from input to output uplink buffer within the same module
result = HUB_SendCommand(idNormal, Commands::UMOV);
if (!result)
{
if (i == 0)
{
return 255;
}
break;
}
// pump test packet to the next module
result = HUB_SendUplinkPumpPacket();
if (!result)
{
if (i == 0)
{
return 255;
}
break;
}
}
return slavesFound;
}
bool SendCommonCommandPart(uint8_t commandType, uint32_t slave, uint32_t command, uint8_t * payload, uint8_t payloadLength)
{
_commonBufferWriteIndex = 0;
_commonBufferReadIndex = 0;
WriteNextByteToCommonBuffer(commandType);
WriteNextWordToCommonBuffer(slave);
// command always takes 4 bytes
uint8_t commandLength = 4;
if (payload != NULL && payloadLength > 0)
{
commandLength += payloadLength;
}
WriteNextByteToCommonBuffer(commandLength);
WriteNextWordToCommonBuffer(command);
if (payload != NULL && payloadLength > 0)
{
WriteNextBytesToCommonBuffer(payload, payloadLength);
}
uint8_t crc = CRC_CalculateBufferCrc(_commonBuffer, _commonBufferWriteIndex);
WriteNextByteToCommonBuffer(crc);
if (SendPacketStart())
{
return SendEntireBuffer(commandType == CommonCommandTypes::FireAndForget, false);
}
return false;
}
bool SendEntireBuffer(bool terminate, bool transmitUplinkData)
{
if (transmitUplinkData)
{
CFG_LNK_UOUT_OUT;
}
while (_commonBufferReadIndex < _commonBufferWriteIndex)
{
_commonBufferTransceiverBitQueue = _commonBuffer[_commonBufferReadIndex];
if (_commonBufferReadIndex > 0 && _commonBufferReadIndex <= UPLINK_BUFFER_SIZE_IN_BYTES)
{
_uplinkBufferTransmitterBitQueue = _uplinkBuffer[_commonBufferReadIndex - 1];
}
else
{
_uplinkBufferTransmitterBitQueue = 0;
}
// configure DAT pin for output, but set CLK to LO first,
// so that DAT transition is valid
SET_PAD_CLCK_LO;
CFG_PAD_DATA_OUT;
SET_LNK_LO;
for (uint8_t i = 0; i < 8; i++)
{
DelayForSyncTest();
SET_PAD_CLCK_LO;
DelayForAsync(HubDelays::SendBitClckLo);
DelayForSyncTest();
if ((_commonBufferTransceiverBitQueue & MSB_BIT_MASK) > 0)
{
SET_PAD_DATA_HI;
}
else
{
SET_PAD_DATA_LO;
}
if (transmitUplinkData == true)
{
if ((_uplinkBufferTransmitterBitQueue & MSB_BIT_MASK) > 0)
{
SET_LNK_HI;
}
else
{
SET_LNK_LO;
}
}
DelayForAsync(HubDelays::SendBitData);
DelayForSyncTest();
SET_PAD_CLCK_HI;
DelayForAsync(HubDelays::SendBitClckHi);
_commonBufferTransceiverBitQueue = _commonBufferTransceiverBitQueue << 1;
_uplinkBufferTransmitterBitQueue = _uplinkBufferTransmitterBitQueue << 1;
}
_commonBufferReadIndex++;
// now we append extra 9th bit
DelayForSyncTest();
SET_PAD_CLCK_LO;
DelayForAsync(HubDelays::SendBit9ClckLo);
// we set 9th DATA bit to Z state, so that slave can drive it low acknowledging byte receive
DelayForSyncTest();
SET_PAD_DATA_HI;
CFG_PAD_DATA_INP;
if (transmitUplinkData == true)
{
SET_LNK_LO;
}
DelayForAsync(HubDelays::SendBit9Data);
// get state of DATA pin
// check if any slave has driven DATA low
// at least one must do that to acknowledge byte receive
//if (PinData.GetState() == OpenDrainPinState.Hi)
if (GET_PAD_DATA_HI)
{
// no slave responded
// stop sending data
DelayForSyncTest();
CFG_PAD_DATA_OUT;
SET_PAD_DATA_LO;
DelayForAsync(HubDelays::SendAbandonDataLo);
DelayForSyncTest();
SET_PAD_CLCK_HI;
DelayForAsync(HubDelays::SendAbandonClckHi);
return false;
}
if (_commonBufferReadIndex == _commonBufferWriteIndex)
{
DelayForSyncTest();
if (terminate)
{
// we need to set DATA pin Lo, so we can always rise DATA pin, in order to indicate end of data packet
// if actual 8-bit aligned data ends with 1 (Hi level), we can't rise DATA pin as it's already Hi
// so, we append unconditional extra bit, which is always 0 (zero)
CFG_PAD_DATA_OUT;
SET_PAD_DATA_LO;
}
else
{
// we need to set DATA pin to Z state, as we expect slave to send some response
// in this case we need to release DATA pin so that slave can start driving it
SET_PAD_DATA_HI;
CFG_PAD_DATA_INP;
}
DelayForAsync(HubDelays::SendLastByteTerminate);
}
DelayForSyncTest();
SET_PAD_CLCK_HI;
DelayForAsync(HubDelays::SendBit9ClckHi);
}
return true;
}
bool SendPacketStart()
{
// XXX+-------------
// | HI
// CLCK XXX+
//
// XXXXXX+-----+
// | ??? | LO
// DATA XXXXXX+ +-----
// 1. Rise CLCK to HI
// 2. Configure DATA as input, with pull-up it will go HI
// 3. Read DATA state
// 4. Make sure that DATA is HI, if it is LO then it means one of the modules is driving it LO, which is wrong
// 5. Pull DATA to LO, this will indicate start of the packet
DelayForSyncTest();
SET_PAD_CLCK_HI;
DelayForAsync(HubDelays::PacketStartClckHi);
DelayForSyncTest();
SET_PAD_DATA_HI;
CFG_PAD_DATA_INP;
DelayForAsync(HubDelays::PacketStartDataHi);
if (GET_PAD_DATA_LO)
{
ERRLOG_LogError(Errors::HubCommonInterfaceDataPinDrivenLowOnPacketStart);
for (uint8_t i = 8; i >= 0; i--)
{
SendPacketEnd();
if (i == 0)
{
return false;
}
DelayForSyncTest();
SET_PAD_DATA_HI;
CFG_PAD_DATA_INP;
DelayForAsync(HubDelays::PacketStartDataHi);
if (!GET_PAD_DATA_LO)
{
break;
}
}
}
DelayForSyncTest();
CFG_PAD_DATA_OUT;
SET_PAD_DATA_LO;
DelayForAsync(HubDelays::PacketStartDataLo);
return true;
}
bool SendPacketEnd()
{
// -----------------
//
// CLCK
//
// +-----------
// | HI
// DATA -----+
// 1. CLCK must be HI and must remain HI
// 2. Configure DATA as input, with pull-up it will go HI, this will indicate end of the packet
// CLCK must be already HI, report an error if not
if (ASSERT_PAD_CLCK_LO)
{
ERRLOG_LogError(Errors::HubCommonInterfaceClckPinIsLowOnPacketEnd);
}
// rise CLCK to HI as this what it has to be at the end of packet
DelayForSyncTest();
SET_PAD_CLCK_HI;
DelayForAsync(HubDelays::PacketEndClckHi);
DelayForSyncTest();
SET_PAD_DATA_HI;
CFG_PAD_DATA_INP;
DelayForAsync(HubDelays::PacketEndDataHi);
// nothing should drive DATA to LO when packet ends, report an error if not
if (GET_PAD_DATA_LO)
{
ERRLOG_LogError(Errors::HubCommonInterfaceDataPinDrivenLowOnPacketEnd);
return false;
}
return true;
}
uint8_t ReadOneByteResponseFromCommonBuffer()
{
uint8_t v = _commonBuffer[0];
if (v < COMMON_BUFFER_ONE_BYTE_RESPONSE_BIT_FLAG)
{
return Errors::InvalidOneByteResponse;
}
return v & COMMON_BUFFER_ONE_BYTE_RESPONSE_CONTENT_BIT_MASK;
}
uint8_t ReadNextByteFromCommonBuffer()
{
return _commonBuffer[_commonBufferReadIndex++];
}
uint32_t ReadNextWordFromCommonBuffer()
{
uint32_t word = _commonBuffer[_commonBufferReadIndex++];
word = (word << 8) | _commonBuffer[_commonBufferReadIndex++];
word = (word << 8) | _commonBuffer[_commonBufferReadIndex++];
word = (word << 8) | _commonBuffer[_commonBufferReadIndex++];
return word;
}
void WriteNextBytesToCommonBuffer(uint8_t * v, uint8_t length)
{
if (_commonBufferWriteIndex + length >= COMMON_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::HubCommonBufferOnWriteByteOverflow);
return;
}
for (uint8_t i = 0; i < length; i++)
{
_commonBuffer[_commonBufferWriteIndex++] = v[i];
}
}
void WriteNextByteToCommonBuffer(uint8_t v)
{
if (_commonBufferWriteIndex >= COMMON_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::HubCommonBufferOnWriteByteOverflow);
return;
}
_commonBuffer[_commonBufferWriteIndex++] = v;
}
void WriteNextWordToCommonBuffer(uint32_t v)
{
if (_commonBufferWriteIndex + 4 >= COMMON_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::HubCommonBufferOnWriteWordOverflow);
return;
}
_commonBuffer[_commonBufferWriteIndex++] = (v >> 24) & 0xff;
_commonBuffer[_commonBufferWriteIndex++] = (v >> 16) & 0xff;
_commonBuffer[_commonBufferWriteIndex++] = (v >> 8) & 0xff;
_commonBuffer[_commonBufferWriteIndex++] = v & 0xff;
}
void WriteNextByteToUplinkBuffer(uint8_t uplinkBufferValue, uint8_t commonBufferValue)
{
if (_commonBufferWriteIndex >= COMMON_BUFFER_SIZE_IN_BYTES ||
_commonBufferWriteIndex > UPLINK_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::HubUplinkBufferOnWriteByteOverflow);
return;
}
_commonBuffer[_commonBufferWriteIndex] = commonBufferValue;
if (_commonBufferWriteIndex > 0)
{
_uplinkBuffer[_commonBufferWriteIndex - 1] = uplinkBufferValue;
}
_commonBufferWriteIndex++;
}
void DelayForSyncTest()
{
#ifdef SYNCHRON
// reset 16-bit profiling Timer 1 only when in synchronous mode
// when in release mode the entire function body is omitted, and compiler removes all calls into this function during optimization
TCNT1 = 0;
__builtin_avr_delay_cycles(1);
#endif
}
void DelayForAsync(uint8_t delayId)
{
#ifdef SYNCHRON
__builtin_avr_delay_cycles(1);
uint16_t timenow = TCNT1;
if (timenow <= 0xff)
{
if (_delays[delayId] < timenow)
{
_delays[delayId] = timenow;
}
}
else
{
_delays[delayId] = 0;
}
#else
for (uint8_t i = 0; i < _delays[delayId]; i++)
{
_delay_us(1);
}
#endif
}
<file_sep>/mod_interface.cpp
#include <stdlib.h>
#include <stdint.h>
#include <avr/io.h>
#include <avr/interrupt.h>
#include <avr/wdt.h>
#include <avr/eeprom.h>
#include "common.h"
#include "errors.h"
#include "crc.h"
#include "commands.h"
#include "error_logger.h"
#include "wdt.h"
#include "device.h"
#include "mod_interface.h"
#include "usart.h"
// port assignment for physical connection pins
#define PAD_PORT_CFG DDRC
#define PAD_PORT_INP PINC
#define PAD_PORT_OUT PORTC
// uplink pins - two pins needed (i.e. uplink signal in and out)
#define LNK_PORT_CFG DDRC
#define LNK_PORT_INP PINC
#define LNK_PORT_OUT PORTC
#define LNK_UINP PINC4
#define LNK_UOUT PORTC5
// all available pins for physical connection
#define INPUT_PAD_1 PINC0
#define INPUT_PAD_2 PINC1
#define INPUT_PAD_3 PINC2
#define INPUT_PAD_4 PINC3
// configures pin for output or input
#define CFG_PAD_OUT(pad) PAD_PORT_CFG |= (1 << pad)
#define CFG_PAD_INP(pad) PAD_PORT_CFG &= ~(1 << pad)
// set pin state template
#define SET_PAD_HI(pad) PAD_PORT_OUT |= (1 << pad)
#define SET_PAD_LO(pad) PAD_PORT_OUT &= ~(1 << pad)
// get pin state template
#define GET_PAD_HI(pad) ((PAD_PORT_INP & (1 << pad)) != 0)
#define GET_PAD_LO(pad) ((PAD_PORT_INP & (1 << pad)) == 0)
// gets state of UPLINK input pin
#define GET_LNK_HI ((LNK_PORT_INP & (1 << LNK_UINP)) != 0)
#define GET_LNK_LO ((LNK_PORT_INP & (1 << LNK_UINP)) == 0)
// sets state of UPLINK output pin either HI or LO
#define SET_LNK_HI LNK_PORT_OUT |= (1 << LNK_UOUT)
#define SET_LNK_LO LNK_PORT_OUT &= ~(1 << LNK_UOUT)
// asserts CLCK pin output state is set as expected
#define ASSERT_PAD_HI(pad) ((PAD_PORT_OUT & (1 << pad)) != 0)
#define ASSERT_PAD_LO(pad) ((PAD_PORT_OUT & (1 << pad)) == 0)
static const uint8_t MAX_POWER_FLIPS_FOR_RESET = 190;
static const uint8_t POWER_DETECTION_MIN_STATE_CHANGES = 20;
static const uint8_t POWER_DETECTION_MAX_POWER_FLIPS = 3;
static const uint8_t POWER_DETECTION_MIN_STATE_FLIPS = 8;
static const uint8_t CLOCK_DETECTION_MIN_STATE_CHANGES = 20;
static const uint8_t CLOCK_DETECTION_MIN_DATA_FLIPS = 2;
static const uint8_t VERSION_MAJOR = 7;
static const uint8_t VERSION_MINOR = 11;
static const uint8_t UPLINK_ACTIVATED_BIT_FLAG = 0b10000000;
static const uint8_t UPLINK_PACKET_LENGTH_BIT_MASK = 0b00111111;
static const uint8_t COMMON_BUFFER_MODE_INITIAL = 0;
static const uint8_t COMMON_BUFFER_MODE_RECEIVE = 1;
static const uint8_t COMMON_BUFFER_MODE_BEFORE_RESPOND = 2;
static const uint8_t COMMON_BUFFER_MODE_RESPOND = 3;
static const uint8_t COMMON_BUFFER_MODE_BEFORE_TERMINATE = 4;
static const uint8_t COMMON_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES = 6; // 1 byte command ID + 4 byte slave ID + 1 byte CRC
static const uint8_t COMMON_COMMAND_MAX_PAYLOAD_LENGTH_IN_BYTES = COMMON_BUFFER_SIZE_IN_BYTES - (COMMON_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES + 4);
static const uint8_t UPLINK_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES = 2; // 1 byte command ID and length + 1 byte CRC
static const uint8_t UPLINK_COMMAND_MAX_PAYLOAD_LENGTH_IN_BYTES = UPLINK_BUFFER_SIZE_IN_BYTES - UPLINK_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES;
static uint32_t _address = 0x01020304;
static uint8_t PAD_CLCK = 0xff;
static uint8_t PAD_DATA = 0xff;
static uint8_t PAD_VCC = 0xff;
static uint8_t PAD_GND = 0xff;
static uint8_t _resetOnPowerChangeCounter = 0;
static uint8_t _directionDetectionState = 0xff;
static uint8_t _p1DetectionCounter = 0;
static uint8_t _p2DetectionCounter = 0;
static uint8_t _p3DetectionCounter = 0;
static uint8_t _p4DetectionCounter = 0;
static bool _isDiscovered = false;
static bool _isControllerSuspended = false;
static bool _isDeviceSuspended = false;
static uint8_t _commonBuffer[COMMON_BUFFER_SIZE_IN_BYTES];
static uint8_t _commonBufferWriteIndex;
static uint8_t _commonBufferReadIndex;
static uint8_t _commonBufferTransmitReciveMode;
static uint8_t _commonBufferTransceiverBitQueue;
static uint8_t _commonBufferTransceiverBitCounter;
static uint8_t _expectedCommandLengthIncludingPayload;
static uint8_t _uplinkBufferInp[UPLINK_BUFFER_SIZE_IN_BYTES];
static uint8_t _uplinkBufferOut[UPLINK_BUFFER_SIZE_IN_BYTES];
static uint8_t _uplinkBufferModeAndIndex;
static uint8_t _uplinkBufferReceiverBitQueue;
static uint8_t _uplinkBufferTransmitterBitQueue;
static bool CheckCanHandleCommand();
static bool ProcessCommand();
static uint8_t ReadNextByteFromCommonBuffer();
static uint32_t ReadNextWordFromCommonBuffer();
static void WriteResponseHeaderByteToCommonBuffer(uint8_t length, bool calculateCrc);
static void WriteOneByteResponseToCommonBuffer(uint8_t v);
static void WriteNextByteToCommonBuffer(uint8_t v);
static void WriteNextWordToCommonBuffer(uint32_t v);
static void ClearUplinkInpBuffer();
static void ClearUplinkOutBuffer();
static void SendDataToDevice();
static void ResetController();
static void ResetDevice();
static uint8_t GetPinBitIndexInInputPort(uint8_t pinIndex);
void MOD_MonitorConnectionHealth()
{
if (_directionDetectionState != 0)
{
return;
}
if (GET_PAD_HI(PAD_GND) || GET_PAD_LO(PAD_VCC))
{
if (++_resetOnPowerChangeCounter > MAX_POWER_FLIPS_FOR_RESET)
{
ResetController();
return;
}
}
}
void MOD_PadPortStateChanged(uint8_t currentPortState, uint8_t previousPortState)
{
uint8_t change = currentPortState ^ previousPortState;
if (_directionDetectionState != 0)
{
bool p1s = (currentPortState & (1 << INPUT_PAD_1)) != 0;
bool p2s = (currentPortState & (1 << INPUT_PAD_2)) != 0;
bool p3s = (currentPortState & (1 << INPUT_PAD_3)) != 0;
bool p4s = (currentPortState & (1 << INPUT_PAD_4)) != 0;
// detect GND pin
if (_directionDetectionState == 0xff)
{
_p1DetectionCounter = _p2DetectionCounter = _p3DetectionCounter = _p4DetectionCounter = 0;
if (!p1s && p2s && p3s && p4s)
{
_directionDetectionState = 1;
}
else if (p1s && !p2s && p3s && p4s)
{
_directionDetectionState = 2;
}
else if (p1s && p2s && !p3s && p4s)
{
_directionDetectionState = 3;
}
else if (p1s && p2s && p3s && !p4s)
{
_directionDetectionState = 4;
}
}
// detect VCC pin
if (_directionDetectionState >= 1 && _directionDetectionState <= 4)
{
if (change & (1 << INPUT_PAD_1)) _p1DetectionCounter++;
if (change & (1 << INPUT_PAD_2)) _p2DetectionCounter++;
if (change & (1 << INPUT_PAD_3)) _p3DetectionCounter++;
if (change & (1 << INPUT_PAD_4)) _p4DetectionCounter++;
if (_p1DetectionCounter > POWER_DETECTION_MIN_STATE_CHANGES ||
_p2DetectionCounter > POWER_DETECTION_MIN_STATE_CHANGES ||
_p3DetectionCounter > POWER_DETECTION_MIN_STATE_CHANGES ||
_p4DetectionCounter > POWER_DETECTION_MIN_STATE_CHANGES)
{
switch (_directionDetectionState)
{
case 1:
if (_p1DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
if (_p2DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p3DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p4DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 12;
}
else if (_p2DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p3DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p4DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 13;
}
else if (_p2DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p3DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p4DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
_directionDetectionState = 14;
}
}
break;
case 2:
if (_p2DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
if (_p1DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p3DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p4DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 21;
}
else if (_p1DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p3DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p4DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 23;
}
else if (_p1DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p3DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p4DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
_directionDetectionState = 24;
}
}
break;
case 3:
if (_p3DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
if (_p1DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p2DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p4DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 31;
}
else if (_p1DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p2DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p4DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 32;
}
else if (_p1DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p2DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p4DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
_directionDetectionState = 34;
}
}
break;
case 4:
if (_p4DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
if (_p1DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p2DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p3DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 41;
}
else if (_p1DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p2DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS && _p3DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS)
{
_directionDetectionState = 42;
}
else if (_p1DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p2DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS && _p3DetectionCounter < POWER_DETECTION_MAX_POWER_FLIPS)
{
_directionDetectionState = 43;
}
}
break;
}
if (_directionDetectionState > 4)
{
_p1DetectionCounter = _p1DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS ? 1 : 0;
_p2DetectionCounter = _p2DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS ? 1 : 0;
_p3DetectionCounter = _p3DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS ? 1 : 0;
_p4DetectionCounter = _p4DetectionCounter > POWER_DETECTION_MIN_STATE_FLIPS ? 1 : 0;
}
else
{
_directionDetectionState = 0xff;
}
}
return;
}
// detect CLK and DAT pins
if (_directionDetectionState < 44)
{
// detect if pin #1 need to be monitored + current change is on pin #1
if (_p1DetectionCounter > 0 && (change & (1 << INPUT_PAD_1)))
{
// check that pin #1 has just transitioned while other monitored pins are Hi at this moment
if ((_p2DetectionCounter > 0 && p2s) || (_p3DetectionCounter > 0 && p3s) || (_p4DetectionCounter > 0 && p4s))
{
if (_p1DetectionCounter < 255)
{
_p1DetectionCounter++;
}
}
}
// detect if pin #2 need to be monitored + current change is on pin #2
if (_p2DetectionCounter > 0 && (change & (1 << INPUT_PAD_2)))
{
// check that pin #2 has just transitioned while other monitored pins are Hi at this moment
if ((_p1DetectionCounter > 0 && p1s) || (_p3DetectionCounter > 0 && p3s) || (_p4DetectionCounter > 0 && p4s))
{
if (_p2DetectionCounter < 255)
{
_p2DetectionCounter++;
}
}
}
// detect if pin #3 need to be monitored + current change is on pin #3
if (_p3DetectionCounter > 0 && (change & (1 << INPUT_PAD_3)))
{
// check that pin #3 has just transitioned while other monitored pins are Hi at this moment
if ((_p1DetectionCounter > 0 && p1s) || (_p2DetectionCounter > 0 && p2s) || (_p4DetectionCounter > 0 && p4s))
{
if (_p3DetectionCounter < 255)
{
_p3DetectionCounter++;
}
}
}
// detect if pin #4 need to be monitored + current change is on pin #4
if (_p4DetectionCounter > 0 && (change & (1 << INPUT_PAD_4)))
{
// check that pin #4 has just transitioned while other monitored pins are Hi at this moment
if ((_p1DetectionCounter > 0 && p1s) || (_p2DetectionCounter > 0 && p2s) || (_p3DetectionCounter > 0 && p3s))
{
if (_p4DetectionCounter < 255)
{
_p4DetectionCounter++;
}
}
}
// detecting clock and data pins
if (_p1DetectionCounter > CLOCK_DETECTION_MIN_STATE_CHANGES ||
_p2DetectionCounter > CLOCK_DETECTION_MIN_STATE_CHANGES ||
_p3DetectionCounter > CLOCK_DETECTION_MIN_STATE_CHANGES ||
_p4DetectionCounter > CLOCK_DETECTION_MIN_STATE_CHANGES)
{
if ((_p1DetectionCounter == 0 || _p1DetectionCounter > CLOCK_DETECTION_MIN_DATA_FLIPS) && (_p2DetectionCounter == 0 || _p2DetectionCounter > CLOCK_DETECTION_MIN_DATA_FLIPS) && (_p3DetectionCounter == 0 || _p3DetectionCounter > CLOCK_DETECTION_MIN_DATA_FLIPS) && (_p4DetectionCounter == 0 || _p4DetectionCounter > CLOCK_DETECTION_MIN_DATA_FLIPS))
{
// at this point GND and VCC pin indices are encoded in decimal format as following:
// | | GND
// | 00 | 01 | 02 | 03 | 04 | n/a
// | 10 | 11 | 12 | 13 | 14 | 1
// | 20 | 21 | 22 | 23 | 24 | 2
// | 30 | 31 | 32 | 33 | 34 | 3
// | 40 | 41 | 42 | 43 | 44 | 4
// ----|----|----|----|----|----|----
// VCC | n/a| 1 | 2 | 3 | 4 |
uint8_t gndPinIndex = 0;
uint8_t vccPinIndex = 0;
uint8_t clkPinIndex = 0;
uint8_t datPinIndex = 0;
if (_directionDetectionState > 40 && _directionDetectionState < 50)
{
gndPinIndex = 4;
vccPinIndex = _directionDetectionState - 40;
}
else if (_directionDetectionState > 30 && _directionDetectionState < 40)
{
gndPinIndex = 3;
vccPinIndex = _directionDetectionState - 30;
}
else if (_directionDetectionState > 20 && _directionDetectionState < 30)
{
gndPinIndex = 2;
vccPinIndex = _directionDetectionState - 20;
}
else if (_directionDetectionState > 10 && _directionDetectionState < 20)
{
gndPinIndex = 1;
vccPinIndex = _directionDetectionState - 10;
}
// out of which:
// only few will be a DATA transitioning when CLK is Hi, which indicates command start
// the majority will be CLK transitioning Hi->Lo when DATA is Hi
if ((gndPinIndex == 1 && vccPinIndex == 2) || (gndPinIndex == 2 && vccPinIndex == 1))
{
if (_p3DetectionCounter < _p4DetectionCounter)
{
// pin #3 is DATA
datPinIndex = 3;
clkPinIndex = 4;
}
else
{
// pin #4 is DATA
datPinIndex = 4;
clkPinIndex = 3;
}
}
else if ((gndPinIndex == 1 && vccPinIndex == 3) || (gndPinIndex == 3 && vccPinIndex == 1))
{
if (_p2DetectionCounter < _p4DetectionCounter)
{
// pin #2 is DATA
datPinIndex = 2;
clkPinIndex = 4;
}
else
{
// pin #4 is DATA
datPinIndex = 4;
clkPinIndex = 2;
}
}
else if ((gndPinIndex == 1 && vccPinIndex == 4) || (gndPinIndex == 4 && vccPinIndex == 1))
{
if (_p2DetectionCounter < _p3DetectionCounter)
{
// pin #2 is DATA
datPinIndex = 2;
clkPinIndex = 3;
}
else
{
// pin #3 is DATA
datPinIndex = 3;
clkPinIndex = 2;
}
}
else if ((gndPinIndex == 2 && vccPinIndex == 3) || (gndPinIndex == 3 && vccPinIndex == 2))
{
if (_p1DetectionCounter < _p4DetectionCounter)
{
// pin #1 is DATA
datPinIndex = 1;
clkPinIndex = 4;
}
else
{
// pin #4 is DATA
datPinIndex = 4;
clkPinIndex = 1;
}
}
else if ((gndPinIndex == 2 && vccPinIndex == 4) || (gndPinIndex == 4 && vccPinIndex == 2))
{
if (_p1DetectionCounter < _p3DetectionCounter)
{
// pin #1 is DATA
datPinIndex = 1;
clkPinIndex = 3;
}
else
{
// pin #3 is DATA
datPinIndex = 3;
clkPinIndex = 1;
}
}
else if ((gndPinIndex == 3 && vccPinIndex == 4) || (gndPinIndex == 4 && vccPinIndex == 3))
{
if (_p1DetectionCounter < _p2DetectionCounter)
{
// pin #1 is DATA
datPinIndex = 1;
clkPinIndex = 2;
}
else
{
// pin #2 is DATA
datPinIndex = 2;
clkPinIndex = 1;
}
}
PAD_CLCK = GetPinBitIndexInInputPort(clkPinIndex);
PAD_DATA = GetPinBitIndexInInputPort(datPinIndex);
PAD_GND = GetPinBitIndexInInputPort(gndPinIndex);
PAD_VCC = GetPinBitIndexInInputPort(vccPinIndex);
if (PAD_CLCK > 7 || PAD_DATA > 7 || PAD_GND > 7 || PAD_VCC > 7)
{
_directionDetectionState = 0xff;
return;
}
_directionDetectionState = 0xf0;
}
}
}
// all pins detected, wait for current packet end
if (_directionDetectionState == 0xf0)
{
// packet end is indicated by Lo=>Hi transition of DAT pin
// while CLK pin is Hi
if (change & (1 << PAD_DATA))
{
bool dataPinState = (currentPortState & (1 << PAD_DATA)) != 0;
bool clckPinState = (currentPortState & (1 << PAD_CLCK)) != 0;
if (dataPinState && clckPinState)
{
_resetOnPowerChangeCounter = 0;
_directionDetectionState = 0;
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_INITIAL;
goto main;
}
}
return;
}
return;
}
if (change & (1 << PAD_VCC) || change & (1 << PAD_GND))
{
if (++_resetOnPowerChangeCounter > MAX_POWER_FLIPS_FOR_RESET)
{
ResetController();
return;
}
}
main:
bool dataPinState = (currentPortState & (1 << PAD_DATA)) != 0;
bool clckPinState = (currentPortState & (1 << PAD_CLCK)) != 0;
// check for transition of DAT pin
if (change & (1 << PAD_DATA))
{
// check for CLK to be Hi, as transition of DAT pin is only valid when CLK is Hi
if (clckPinState)
{
// check for packet "start"
// "start" is indicated by Hi=>Lo transition of DAT pin
if (!dataPinState)
{
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_RECEIVE;
_commonBufferTransceiverBitQueue = 0;
_commonBufferTransceiverBitCounter = 0;
_commonBufferWriteIndex = 0;
_uplinkBufferModeAndIndex = 0;
_uplinkBufferReceiverBitQueue = 0;
_uplinkBufferTransmitterBitQueue = 0;
_expectedCommandLengthIncludingPayload = 0xff;
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
return;
}
// check for packet "end"
// "end" is indicated by Lo=>Hi transition of DAT pin
if (dataPinState)
{
// check if any data received and packet didn't overflow
if (_commonBufferWriteIndex > 0 && _commonBufferTransmitReciveMode == COMMON_BUFFER_MODE_RECEIVE)
{
//decrement receive buffer index as a new char will not be received
_commonBufferWriteIndex--;
// process command if it looks like a command
if (_commonBuffer[0] == CommonCommandTypes::FireAndForget)
{
ProcessCommand();
}
}
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_INITIAL;
_commonBufferTransceiverBitQueue = 0;
_commonBufferTransceiverBitCounter = 0;
_commonBufferWriteIndex = 0;
// process data in uplink buffer
if (_uplinkBufferModeAndIndex != 0)
{
// clear uplink output buffer as everything has now been sent out
ClearUplinkOutBuffer();
// check that -duino is not suspended
// and the packet is intended for -duino consumption
if (!_isDeviceSuspended &&
((_uplinkBufferInp[0] & UplinkCommandTypes::PacketTypeMask) == UplinkCommandTypes::DeviceDataPacket ||
_uplinkBufferInp[0] == UplinkCommandTypes::EmptyPacket))
{
SendDataToDevice();
}
}
_uplinkBufferModeAndIndex = 0;
_uplinkBufferReceiverBitQueue = 0;
_uplinkBufferTransmitterBitQueue = 0;
_expectedCommandLengthIncludingPayload = 0xff;
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
return;
}
return;
}
return;
}
// check for transition of CLK pin
if (change & (1 << PAD_CLCK))
{
// receive mode logic
if (_commonBufferTransmitReciveMode == COMMON_BUFFER_MODE_RECEIVE)
{
// detect CLK pin going Lo
// this has no significance during listening phase for bits 0..7,
// as state of DAT pin is undefined at this moment (DAT pin state will be valid later on, when CLK pin goes Hi)
// however, we need to take some extra care when we receive extra 9th bit
if (!clckPinState)
{
// we need to drive DAT pin Lo when we receive ACK bit (extra 9th bit)
// plus some analysis of the buffer content
if (_commonBufferTransceiverBitCounter == 8)
{
_resetOnPowerChangeCounter = 0;
bool isCommandValid = true;
// run command specific logic only when uplink is not active
// this works for meaningful data being transmitted over physical pin only
if (_uplinkBufferModeAndIndex == 0)
{
// check if we are receiving a command, and slave address is matching
if (_commonBufferWriteIndex == 4)
{
if (CheckCanHandleCommand() == false)
{
isCommandValid = false;
}
}
// extract command length
else if (_commonBufferWriteIndex == 5)
{
if (_commonBufferTransceiverBitQueue + COMMON_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES >= COMMON_BUFFER_SIZE_IN_BYTES)
{
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_INITIAL;
ERRLOG_LogError(Errors::ModuleCommonCommandReceiveOverflow);
isCommandValid = false;
}
else
{
// check if command requires response within same packet
if (_commonBuffer[0] == CommonCommandTypes::NeedResponse)
{
_expectedCommandLengthIncludingPayload = _commonBufferTransceiverBitQueue + COMMON_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES;
}
else
{
_expectedCommandLengthIncludingPayload = 0xff;
}
}
}
// check if command need to be processed before "end" packet condition is received
else if (_commonBufferWriteIndex == _expectedCommandLengthIncludingPayload)
{
// process command with expected response
if (ProcessCommand())
{
// prepare for response mode
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_BEFORE_RESPOND;
}
else
{
isCommandValid = false;
}
}
}
if (isCommandValid)
{
// drive DATA pin Lo when state is valid
//_datPinDriveState = OpenDrainPinState.Lo;
CFG_PAD_OUT(PAD_DATA);
SET_PAD_LO(PAD_DATA);
}
else
{
// release DATA pin as we no longer need to occupy it
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
// switch to termination mode
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_BEFORE_TERMINATE;
}
return;
}
return;
}
// check for data bit "clock-in", it is indicated by Lo=>Hi transition of CLK pin
// run "uplink" logic only when activated by magic packet
if (_uplinkBufferModeAndIndex != 0)
{
// check if we at the very first bit of every byte
if (_commonBufferTransceiverBitCounter == 0)
{
// check for uplink transmit mode
if ((_uplinkBufferModeAndIndex & UPLINK_ACTIVATED_BIT_FLAG) != 0)
{
// get value from output buffer into the output bit queue
uint8_t index = _uplinkBufferModeAndIndex & UPLINK_PACKET_LENGTH_BIT_MASK;
_uplinkBufferTransmitterBitQueue = _uplinkBufferOut[index];
}
}
// this logic runs when we receive ACK bit (extra 9th bit)
if (_commonBufferTransceiverBitCounter == 8)
{
// check for uplink receive mode
if ((_uplinkBufferModeAndIndex & UPLINK_ACTIVATED_BIT_FLAG) != 0)
{
// save input value from input bit queue into input buffer
uint8_t index = _uplinkBufferModeAndIndex & UPLINK_PACKET_LENGTH_BIT_MASK;
_uplinkBufferInp[index] = _uplinkBufferReceiverBitQueue;
}
// reset all counters and advance to the next position
_uplinkBufferReceiverBitQueue = 0;
_uplinkBufferTransmitterBitQueue = 0;
_uplinkBufferModeAndIndex++;
}
// for bits 0..7 we read/write IR data
else
{
// check for uplink activation mode
if ((_uplinkBufferModeAndIndex & UPLINK_ACTIVATED_BIT_FLAG) != 0)
{
// gets bit value from output bit queue and set IR transmitter pin state
if ((_uplinkBufferTransmitterBitQueue & MSB_BIT_MASK) != 0)
{
//_pinIROut.SetState(OpenDrainPinState.Hi);
SET_LNK_HI;
}
else
{
//_pinIROut.SetState(OpenDrainPinState.Lo);
SET_LNK_LO;
}
// shift IR transmitter buffer
_uplinkBufferTransmitterBitQueue = _uplinkBufferTransmitterBitQueue << 1;
// shift IR receiver buffer
_uplinkBufferReceiverBitQueue = _uplinkBufferReceiverBitQueue << 1;
// get IR receiver pin state and persist its value into input bit queue
//if (_pinIRInp.GetState() == OpenDrainPinState.Hi)
if (GET_LNK_HI)
{
_uplinkBufferReceiverBitQueue |= LSB_BIT_MASK;
}
}
}
}
// check for extra 9th bit
if (_commonBufferTransceiverBitCounter == 8)
{
// check for special instructions
// like uplink data pump, for example
if (_commonBufferWriteIndex == 0)
{
if (_commonBuffer[0] == MagicCommonCommands::UplinkDataPump)
{
// set flag to indicate uplink buffer activation
_uplinkBufferModeAndIndex = UPLINK_ACTIVATED_BIT_FLAG;
// clear input buffer before receiving anything
ClearUplinkInpBuffer();
}
}
// release DATA pin, as we no longer need to occupy it
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
// reset counters, as we are now ready to receive next byte
_commonBufferTransceiverBitQueue = 0;
_commonBufferTransceiverBitCounter = 0;
// check for buffer overflow
if (++_commonBufferWriteIndex > COMMON_BUFFER_SIZE_IN_BYTES)
{
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_INITIAL;
ERRLOG_LogError(Errors::ModuleCommonCommandReceiveOverflow);
}
return;
}
// shift DATA buffer
_commonBufferTransceiverBitQueue = _commonBufferTransceiverBitQueue << 1;
// get DATA pin state and persist its value into input bit queue
if (dataPinState)
{
_commonBufferTransceiverBitQueue |= LSB_BIT_MASK;
}
if (++_commonBufferTransceiverBitCounter == 8)
{
// append received byte into buffer
_commonBuffer[_commonBufferWriteIndex] = _commonBufferTransceiverBitQueue;
}
return;
}
// logic for transition from receive to response
if (_commonBufferTransmitReciveMode == COMMON_BUFFER_MODE_BEFORE_RESPOND)
{
// command is processed while CLK pin is driven Lo by master during transmission of 9th extra bit
// we now need to do some extra logic when CLK goes Hi and prepare slave for transmitting data back to master
if (clckPinState)
{
// release DATA pin, as we no longer need to occupy it
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
// reset read index as we will use it for sending data back to master
_commonBufferReadIndex = 0;
// load first byte into transmission queue
_commonBufferTransceiverBitCounter = 0;
_commonBufferTransceiverBitQueue = _commonBuffer[_commonBufferReadIndex];
// switch to response mode
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_RESPOND;
return;
}
return;
}
// logic for transition from receive to terminate
if (_commonBufferTransmitReciveMode == COMMON_BUFFER_MODE_BEFORE_TERMINATE)
{
// command is processed while CLK pin is driven Lo by master during transmission of 9th extra bit
// we now need to do some extra logic when CLK goes Hi and prepare slave for transmitting data back to master
if (clckPinState)
{
// release DATA pin, as we no longer need to occupy it
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
// reset read index as we will use it for sending data back to master
_commonBufferReadIndex = 0;
// load first byte into transmission queue
_commonBufferTransceiverBitCounter = 0;
_commonBufferTransceiverBitQueue = 0;
// switch to initial mode
_commonBufferTransmitReciveMode = COMMON_BUFFER_MODE_INITIAL;
return;
}
return;
}
// response mode logic
if (_commonBufferTransmitReciveMode == COMMON_BUFFER_MODE_RESPOND)
{
// check if it is OK to set data bit
// it is indicated by Hi=>Lo transition of CLK pin
if (!clckPinState)
{
if (_commonBufferTransceiverBitCounter == 8)
{
// for extra 9th bit we need to release data pin as hub will drive it low by itself
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
// and move to next byte now
_commonBufferTransceiverBitCounter = 0;
if (++_commonBufferReadIndex == COMMON_BUFFER_SIZE_IN_BYTES)
{
_commonBufferReadIndex--;
}
_commonBufferTransceiverBitQueue = _commonBuffer[_commonBufferReadIndex];
return;
}
// for bits 0..7 (8 actual bits) we actively drive data pin when needed (need to transmit 0 zero)
if ((_commonBufferTransceiverBitQueue & MSB_BIT_MASK) != 0)
{
//_datPinDriveState = OpenDrainPinState.Z;
SET_PAD_HI(PAD_DATA);
CFG_PAD_INP(PAD_DATA);
}
else
{
//_datPinDriveState = OpenDrainPinState.Lo;
CFG_PAD_OUT(PAD_DATA);
SET_PAD_LO(PAD_DATA);
}
_commonBufferTransceiverBitQueue = _commonBufferTransceiverBitQueue << 1;
_commonBufferTransceiverBitCounter++;
return;
}
return;
}
return;
}
}
bool CheckCanHandleCommand()
{
// command structure so far is as following:
// byte 0 - command header (0xAA or 0xAB)
// byte 1..4 - 32-bit slave address
// reset read index as we will be reading now
_commonBufferReadIndex = 0;
// check command header
uint8_t commandType = ReadNextByteFromCommonBuffer();
if (commandType != CommonCommandTypes::FireAndForget && commandType != CommonCommandTypes::NeedResponse)
{
return false;
}
// extract target slave ID from command and compare
uint32_t targetAddress = ReadNextWordFromCommonBuffer();
if (targetAddress != 0 && _address != targetAddress)
{
return false;
}
// reset read index again as something else will process buffer later
_commonBufferReadIndex = 0;
return true;
}
bool ProcessCommand()
{
// command structure is as following:
// byte 0 - command header (0xAA or 0xAB)
// byte 1..4 - 32-bit slave address
// byte 5 - packet length minus 2 for header and CRC
// byte 6..9 - 32-bit command
// byte 10..N-1 - payload
// byte N - CRC8
// reset read index as we will be reading now
_commonBufferReadIndex = 0;
// check command header
uint8_t commandType = ReadNextByteFromCommonBuffer();
if (commandType != CommonCommandTypes::FireAndForget && commandType != CommonCommandTypes::NeedResponse)
{
ERRLOG_LogError(Errors::ModuleCommonCommandInvalidPrefix);
return false;
}
// extract target slave ID from command and compare
uint32_t targetAddress = ReadNextWordFromCommonBuffer();
if (targetAddress != 0)
{
if (_address != targetAddress)
{
return false;
}
_isDiscovered = true;
}
// get command length
uint8_t commandLengthIncludingPayload = ReadNextByteFromCommonBuffer();
// validate buffer position
if (commandLengthIncludingPayload + COMMON_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES != _commonBufferWriteIndex)
{
ERRLOG_LogError(Errors::ModuleCommonCommandLengthMismatch);
return false;
}
// compare CRC
if (_commonBuffer[_commonBufferWriteIndex] != CRC_CalculateBufferCrc(_commonBuffer, _commonBufferWriteIndex))
{
ERRLOG_LogError(Errors::ModuleCommonCommandCrcMismatch);
return false;
}
uint8_t commandPayloadLength = commandLengthIncludingPayload - 4;
uint32_t commandId = ReadNextWordFromCommonBuffer();
// reset write index as we might need to write now
_commonBufferWriteIndex = 0;
switch (commandId)
{
case Commands::CRST:
{
ResetController();
break;
}
case Commands::CREQ:
{
if (_isControllerSuspended) return false;
uint32_t slaveAddressBitMask = ReadNextWordFromCommonBuffer();
if (slaveAddressBitMask == 0 || (_address & slaveAddressBitMask) == slaveAddressBitMask)
{
WriteResponseHeaderByteToCommonBuffer(8, false);
WriteNextWordToCommonBuffer(_address);
WriteNextWordToCommonBuffer(_address ^ 0xffffffff);
break;
}
return false;
}
case Commands::CGID:
{
if (_isControllerSuspended) return false;
WriteResponseHeaderByteToCommonBuffer(6, true);
WriteNextWordToCommonBuffer(_address);
WriteNextByteToCommonBuffer(VERSION_MAJOR);
WriteNextByteToCommonBuffer(VERSION_MINOR);
break;
}
case Commands::CSLB:
{
if (_isControllerSuspended) return false;
eeprom_write_byte((uint8_t*)0, ReadNextByteFromCommonBuffer());
break;
}
case Commands::CGLB:
{
if (_isControllerSuspended) return false;
WriteResponseHeaderByteToCommonBuffer(1, true);
WriteNextByteToCommonBuffer(eeprom_read_byte((uint8_t*)0));
break;
}
case Commands::CERR:
{
if (_isControllerSuspended) return false;
uint8_t errors[ERRLOG_MAX_ERRORS];
ERRLOG_GetErrors(errors);
WriteResponseHeaderByteToCommonBuffer(ERRLOG_MAX_ERRORS, true);
for (uint8_t i = 0; i < ERRLOG_MAX_ERRORS; i++)
{
WriteNextByteToCommonBuffer(errors[i]);
}
break;
}
case Commands::CSLP:
_isControllerSuspended = true;
break;
case Commands::CWUP:
_isControllerSuspended = false;
break;
case Commands::CNEW:
{
if (_isControllerSuspended) return false;
// it is counter-intuitive to return 0 when controller is new
// but when multiple bricks are connected, then a new brick will pull signal line low,
// this way a new brick can be discovered among others
WriteOneByteResponseToCommonBuffer(_isDiscovered ? 127 : 0);
break;
}
case Commands::UWRI:
{
if (targetAddress == 0) return false;
if (commandPayloadLength > UPLINK_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::ModuleUplinkCommandTooLongPayload);
break;
}
for (uint8_t i = 0; i < commandPayloadLength; i++)
{
_uplinkBufferInp[i] = ReadNextByteFromCommonBuffer();
}
for (uint8_t i = commandPayloadLength; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
_uplinkBufferInp[i] = 0;
}
break;
}
case Commands::UWRO:
{
if (targetAddress == 0) return false;
if (commandPayloadLength > UPLINK_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::ModuleUplinkCommandTooLongPayload);
break;
}
for (uint8_t i = 0; i < commandPayloadLength; i++)
{
_uplinkBufferOut[i] = ReadNextByteFromCommonBuffer();
}
for (uint8_t i = commandPayloadLength; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
_uplinkBufferOut[i] = 0;
}
break;
}
case Commands::URDI:
{
if (targetAddress == 0) return false;
WriteResponseHeaderByteToCommonBuffer(UPLINK_BUFFER_SIZE_IN_BYTES, true);
for (uint8_t i = 0; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
WriteNextByteToCommonBuffer(_uplinkBufferInp[i]);
}
break;
}
case Commands::URDO:
{
if (targetAddress == 0) return false;
WriteResponseHeaderByteToCommonBuffer(UPLINK_BUFFER_SIZE_IN_BYTES, true);
for (uint8_t i = 0; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
WriteNextByteToCommonBuffer(_uplinkBufferOut[i]);
}
break;
}
case Commands::UCPY:
{
for (uint8_t i = 0; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
_uplinkBufferOut[i] = _uplinkBufferInp[i];
}
break;
}
case Commands::UMOV:
{
for (uint8_t i = 0; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
_uplinkBufferOut[i] = _uplinkBufferInp[i];
_uplinkBufferInp[i] = 0;
}
break;
}
case Commands::UCCI:
{
if (commandPayloadLength > UPLINK_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::ModuleUplinkCommandTooLongPayload);
break;
}
for (uint8_t i = 0; i < commandPayloadLength; i++)
{
if (_uplinkBufferInp[i] != ReadNextByteFromCommonBuffer())
{
return false;
}
}
WriteResponseHeaderByteToCommonBuffer(8, false);
WriteNextWordToCommonBuffer(_address);
WriteNextWordToCommonBuffer(_address ^ 0xffffffff);
break;
}
case Commands::UCCO:
{
if (commandPayloadLength > UPLINK_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::ModuleUplinkCommandTooLongPayload);
break;
}
for (uint8_t i = 0; i < commandPayloadLength; i++)
{
if (_uplinkBufferOut[i] != ReadNextByteFromCommonBuffer())
{
return false;
}
}
WriteResponseHeaderByteToCommonBuffer(8, false);
WriteNextWordToCommonBuffer(_address);
WriteNextWordToCommonBuffer(_address ^ 0xffffffff);
break;
}
case Commands::DRST:
{
if (_isControllerSuspended) return false;
ResetDevice();
break;
}
case Commands::DSLP:
{
if (_isControllerSuspended) return false;
_isDeviceSuspended = true;
break;
}
case Commands::DWUP:
{
if (_isControllerSuspended) return false;
_isDeviceSuspended = false;
break;
}
case Commands::DMSG:
{
if (_isDeviceSuspended)
{
break;
}
uint8_t writeIndexWithinPacket = 0;
// prepend mandatory packet header
_uplinkBufferInp[writeIndexWithinPacket++] = UplinkCommandTypes::DeviceMessagePacket | (commandPayloadLength + 2);
// prepend mandatory message length
_uplinkBufferInp[writeIndexWithinPacket++] = commandPayloadLength + 1;
// prepend message type to signify forwarded command
_uplinkBufferInp[writeIndexWithinPacket++] = DeviceCommands::DeviceCommandIdentifier;
for (uint8_t i = 0; i < commandPayloadLength; i++)
{
_uplinkBufferInp[writeIndexWithinPacket++] = ReadNextByteFromCommonBuffer();
}
_uplinkBufferModeAndIndex = 0;
SendDataToDevice();
uint8_t responseLength = _uplinkBufferOut[1];
WriteResponseHeaderByteToCommonBuffer(responseLength, true);
for (uint8_t i = 2; i < 2 + responseLength; i++)
{
WriteNextByteToCommonBuffer(_uplinkBufferOut[i]);
}
break;
}
default:
return false;
}
// by default, calculate CRC only for commands that need response
if (commandType == CommonCommandTypes::NeedResponse &&
_commonBufferWriteIndex > 1 &&
(_commonBuffer[0] & COMMON_BUFFER_ONE_BYTE_RESPONSE_BIT_FLAG) == 0 &&
(_commonBuffer[0] & COMMON_BUFFER_RESPONSE_CALCULATE_CRC_BIT_FLAG) != 0)
{
WriteNextByteToCommonBuffer(CRC_CalculateBufferCrc(_commonBuffer, _commonBufferWriteIndex));
}
return true;
}
uint8_t ReadNextByteFromCommonBuffer()
{
return _commonBuffer[_commonBufferReadIndex++];
}
uint32_t ReadNextWordFromCommonBuffer()
{
uint32_t word = _commonBuffer[_commonBufferReadIndex++];
word = (word << 8) | _commonBuffer[_commonBufferReadIndex++];
word = (word << 8) | _commonBuffer[_commonBufferReadIndex++];
word = (word << 8) | _commonBuffer[_commonBufferReadIndex++];
return word;
}
void WriteResponseHeaderByteToCommonBuffer(uint8_t length, bool calculateCrc)
{
if (length > COMMON_BUFFER_RESPONSE_PACKET_LENGTH_BIT_MASK)
{
return;
}
if (length == 0)
{
WriteNextByteToCommonBuffer(0);
return;
}
uint8_t b = length;
if (calculateCrc)
{
b |= COMMON_BUFFER_RESPONSE_CALCULATE_CRC_BIT_FLAG;
}
WriteNextByteToCommonBuffer(b);
}
void WriteOneByteResponseToCommonBuffer(uint8_t v)
{
if (v > COMMON_BUFFER_ONE_BYTE_RESPONSE_CONTENT_BIT_MASK)
{
ERRLOG_LogError(Errors::ModuleCommonBufferSingleByteResponseToLarge);
return;
}
WriteNextByteToCommonBuffer(v | COMMON_BUFFER_ONE_BYTE_RESPONSE_BIT_FLAG);
}
void WriteNextByteToCommonBuffer(uint8_t v)
{
if (_commonBufferWriteIndex >= COMMON_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::ModuleCommonBufferOnWriteByteOverflow);
return;
}
_commonBuffer[_commonBufferWriteIndex++] = v;
}
void WriteNextWordToCommonBuffer(uint32_t v)
{
if (_commonBufferWriteIndex + 4 >= COMMON_BUFFER_SIZE_IN_BYTES)
{
ERRLOG_LogError(Errors::ModuleCommonBufferOnWriteWordOverflow);
return;
}
_commonBuffer[_commonBufferWriteIndex++] = (v >> 24) & 0xff;
_commonBuffer[_commonBufferWriteIndex++] = (v >> 16) & 0xff;
_commonBuffer[_commonBufferWriteIndex++] = (v >> 8) & 0xff;
_commonBuffer[_commonBufferWriteIndex++] = (v & 0xff);
}
void ClearUplinkInpBuffer()
{
for (uint8_t i = 0; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
_uplinkBufferInp[i] = 0;
}
}
void ClearUplinkOutBuffer()
{
for (uint8_t i = 0; i < UPLINK_BUFFER_SIZE_IN_BYTES; i++)
{
_uplinkBufferOut[i] = 0;
}
}
void SendDataToDevice()
{
//USART_Write("SendDataToDevice");
//USART_WriteChar('\r');
uint8_t commandType = _uplinkBufferInp[0] & UplinkCommandTypes::PacketTypeMask;
if (commandType == UplinkCommandTypes::PacketTypeMask)
{
ERRLOG_LogError(Errors::DeviceUplinkDisconnected);
return;
}
uint8_t packetLength = _uplinkBufferInp[0] & UplinkCommandTypes::PacketSizeMask;
if (commandType == UplinkCommandTypes::DeviceEmptyPacket)
{
if (_uplinkBufferInp[0] != UplinkCommandTypes::EmptyPacket)
{
ERRLOG_LogError(Errors::DeviceUplinkShortToGround);
return;
}
}
else if (commandType == UplinkCommandTypes::DeviceDataPacket)
{
if (packetLength + UPLINK_COMMAND_SERVICE_DATA_LENGTH_IN_BYTES > (_uplinkBufferModeAndIndex & UPLINK_PACKET_LENGTH_BIT_MASK))
{
ERRLOG_LogError(Errors::DeviceCommandInsufficientPacketLength);
return;
}
// exit if CRC on data do not match
if (CRC_CalculateBufferCrc(_uplinkBufferInp, packetLength + 1) != _uplinkBufferInp[packetLength + 1])
{
ERRLOG_LogError(Errors::DeviceCommandCrcMismatch);
return;
}
}
else if (commandType == UplinkCommandTypes::DeviceMessagePacket)
{
if (_uplinkBufferModeAndIndex != 0)
{
ERRLOG_LogError(Errors::DeviceForwardMessageInvalidOrigin);
return;
}
}
uint8_t deviceResponseDataLength = DEV_ProcessMessage(&_uplinkBufferInp[1], packetLength - 1, &_uplinkBufferOut[1]);
if (deviceResponseDataLength > UplinkCommandTypes::PacketSizeMask + 1)
{
ERRLOG_LogError(Errors::DeviceRespondedWithTooLongPacket);
return;
}
if (deviceResponseDataLength == 0)
{
_uplinkBufferOut[0] = UplinkCommandTypes::EmptyPacket;
}
else
{
_uplinkBufferOut[0] = UplinkCommandTypes::DeviceDataPacket | deviceResponseDataLength;
_uplinkBufferOut[deviceResponseDataLength + 1] = CRC_CalculateBufferCrc(_uplinkBufferOut, deviceResponseDataLength + 1);
}
}
void ResetController()
{
//wdt_enable(WDTO_250MS);
DEV_Reset();
//while (true) {};
_resetOnPowerChangeCounter = 0;
PAD_CLCK = PAD_DATA = PAD_VCC = PAD_GND = 0xff;
_directionDetectionState = 0xff;
_p1DetectionCounter = _p2DetectionCounter = _p3DetectionCounter = _p4DetectionCounter = 0;
_isDiscovered = false;
_isControllerSuspended = false;
_isDeviceSuspended = false;
_commonBufferWriteIndex = 0;
_commonBufferReadIndex = 0;
_commonBufferTransmitReciveMode = 0;
_commonBufferTransceiverBitQueue = 0;
_commonBufferTransceiverBitCounter = 0;
_expectedCommandLengthIncludingPayload = 0;
_uplinkBufferModeAndIndex = 0;
_uplinkBufferReceiverBitQueue = 0;
_uplinkBufferTransmitterBitQueue = 0;
}
void ResetDevice()
{
DEV_Reset();
}
uint8_t GetPinBitIndexInInputPort(uint8_t pinIndex)
{
switch (pinIndex)
{
case 1:
return INPUT_PAD_1;
case 2:
return INPUT_PAD_2;
case 3:
return INPUT_PAD_3;
case 4:
return INPUT_PAD_4;
default:
return 0xff;
}
}<file_sep>/config.h
#ifndef CONFIG_H_
#define CONFIG_H_
//#define SYNCHRON
#define F_CPU 16000000UL
#define UART_RX0_BUFFER_SIZE 32
#define UART_TX0_BUFFER_SIZE 32
#endif /* CONFIG_H_ */<file_sep>/error_logger.h
#ifndef __ERROR_LOGGER_H__
#define __ERROR_LOGGER_H__
const uint8_t ERRLOG_MAX_ERRORS = 16;
void ERRLOG_LogError(uint8_t errorCode);
void ERRLOG_ClearErrors();
void ERRLOG_GetErrors(uint8_t * target);
uint8_t ERRLOG_GetErrorCount();
#endif //__ERROR_LOGGER_H__
<file_sep>/adc.cpp
#include <stdlib.h>
#include <stdint.h>
#include <avr/io.h>
#include "adc.h"
void ADC_init()
{
// Set the ADC pre-scaler to 128 (i.e., 16MHz/128 = 125KHz)
ADCSRA |= (1 << ADPS2) | (1 << ADPS1) | (1 << ADPS0);
// Set the voltage reference from AVcc (i.e., 5V).
ADMUX |= (1 << REFS0 );
// Turn on the ADC.
ADCSRA |= (1 << ADEN);
ADCSRA |= (1 << ADSC);
}
uint16_t ADC_read(uint8_t channel)
{
// Clear the previously read channel.
ADMUX &= 0xf0;
// Select the ADC channel to be read.
ADMUX |= channel;
// Start conversion.
ADCSRA |= (1 << ADSC);
// Wait till conversion finishes.
while ((ADCSRA & (1 << ADIF)) == 0) {}
// Get ADC value.
uint16_t result = ADC;
// Clear the ADIF flag.
ADCSRA |= ( 1 << ADIF );
return result;
}<file_sep>/usart.cpp
#include "config.h"
#include <stdlib.h>
#include <stdint.h>
#include <avr/io.h>
#include <avr/interrupt.h>
#include "uart.h"
#include "usart.h"
void USART_Init(uint32_t baudRate)
{
uart0_init(UART_BAUD_SELECT(baudRate, F_CPU));
}
bool USART_CopyAvailableTillLineEnd(char * target, uint8_t & cnt)
{
bool lineEndReceived = false;
for (;;)
{
uint16_t fc = uart0_getc();
if (fc > 0xFF) break;
char c = fc & 0xFF;
if (c == '\r' || c == '\n')
{
lineEndReceived = true;
fc = uart0_peek();
if (fc > 0xFF) break;
c = fc & 0xFF;
if (c != '\r' && c != '\n')
{
break;
}
}
else
{
target[cnt++] = c;
}
}
return lineEndReceived;
}
void USART_WriteChar(char data)
{
uart0_putc(data);
}
void USART_Write(char * stringPtr)
{
while (*stringPtr != 0)
{
uart0_putc(*stringPtr++);
}
}
void USART_Write(char * stringPtr, uint8_t length)
{
for (uint8_t i = 0; i < length; i++)
{
uart0_putc(stringPtr[i]);
}
}
void USART_Write(const char * stringPtr)
{
while (*stringPtr != 0)
{
uart0_putc(*stringPtr++);
}
}
void USART_Write(const char * stringPtr, uint8_t length)
{
for (uint8_t i = 0; i < length; i++)
{
uart0_putc(stringPtr[i]);
}
}
void USART_Write(volatile char * stringPtr)
{
while (*stringPtr != 0)
{
uart0_putc(*stringPtr++);
}
}
void USART_Write(volatile char * stringPtr, uint8_t length)
{
for (uint8_t i = 0; i < length; i++)
{
uart0_putc(stringPtr[i]);
}
}
void USART_WriteBoolResultCode(bool b)
{
uart0_putc('$');
USART_WriteByteAsHex(b ? 0x00 : 0xFF);
}
void USART_WriteByteResultCode(uint8_t b)
{
uart0_putc('$');
USART_WriteByteAsHex(b);
}
void USART_WriteByteAsHex(uint8_t b)
{
uint8_t v = b >> 4;
v += v > 9 ? 55 : '0';
uart0_putc(v);
v = b & 0x0F;
v += v > 9 ? 55 : '0';
uart0_putc(v);
}
void USART_WriteShortAsHex(uint16_t s)
{
USART_WriteByteAsHex((uint8_t)(s >> 8));
USART_WriteByteAsHex((uint8_t)s);
}
void USART_WriteIntAsHex(uint32_t i)
{
USART_WriteByteAsHex((uint8_t)(i >> 24));
USART_WriteByteAsHex((uint8_t)(i >> 16));
USART_WriteByteAsHex((uint8_t)(i >> 8));
USART_WriteByteAsHex((uint8_t)i);
}<file_sep>/hub_interface.h
#ifndef __HUB_INTERFACE_H__
#define __HUB_INTERFACE_H__
#include <stdlib.h>
#include <stdint.h>
void HUB_Initialize();
void HUB_SetDelay(uint8_t delayId, uint8_t value);
uint8_t HUB_GetDelay(uint8_t delayId);
uint8_t * HUB_GetBufferContent();
bool HUB_SendCommandToAll(uint32_t command);
bool HUB_SendCommandToAll(uint32_t command, uint32_t payload);
bool HUB_SendCommandToAll(uint32_t command, uint8_t * payload, uint8_t payloadLength);
bool HUB_SendCommand(uint32_t slave, uint32_t command);
bool HUB_SendCommand(uint32_t slave, uint32_t command, uint32_t payload);
bool HUB_SendCommand(uint32_t slave, uint32_t command, uint8_t * payload, uint8_t payloadLength);
bool HUB_SendCommandToAllWithExpectedResponse(uint32_t command);
bool HUB_SendCommandToAllWithExpectedResponse(uint32_t command, uint32_t payload);
bool HUB_SendCommandToAllWithExpectedResponse(uint32_t command, uint8_t * payload, uint8_t payloadLength);
bool HUB_SendCommandWithExpectedResponse(uint32_t slave, uint32_t command);
bool HUB_SendCommandWithExpectedResponse(uint32_t slave, uint32_t command, uint32_t payload);
bool HUB_SendCommandWithExpectedResponse(uint32_t slave, uint32_t command, uint8_t * payload, uint8_t payloadLength);
uint8_t HUB_DetectNewSlaves();
uint8_t HUB_DetectSlaves(uint32_t * foundSlaveAddresses, uint8_t maxSlavesToReturn);
uint8_t HUB_DetectSlavesSequence(uint32_t * foundSlaveAddresses, uint8_t maxSlavesToReturn);
bool HUB_SendEmptyDuinoCommand();
bool HUB_SendUplinkPumpPacket();
bool HUB_SendUplinkPacket(uint8_t * payload, uint8_t length);
#endif //__HUB_INTERFACE_H__
<file_sep>/error_logger.cpp
#include <stdint.h>
#include "error_logger.h"
static uint8_t _errors[ERRLOG_MAX_ERRORS];
static uint8_t _errorCount;
void ERRLOG_LogError(uint8_t errorCode)
{
if (errorCode == 0)
{
return;
}
for (uint8_t i = ERRLOG_MAX_ERRORS - 1; i > 0; i--)
{
_errors[i] = _errors[i - 1];
}
_errors[0] = errorCode;
if (_errorCount < 255)
{
_errorCount++;
}
}
void ERRLOG_GetErrors(uint8_t * target)
{
for (uint8_t i = 0; i < ERRLOG_MAX_ERRORS; i++)
{
target[i] = _errors[i];
}
}
uint8_t ERRLOG_GetErrorCount()
{
return _errorCount;
}
void ERRLOG_ClearErrors()
{
for (uint8_t i = 0; i < ERRLOG_MAX_ERRORS; i++)
{
_errors[i] = 0;
}
_errorCount = 0;
}
<file_sep>/crc.h
#ifndef __CRC_H__
#define __CRC_H__
uint8_t CRC_CalculateBufferCrc(uint8_t * buffer, uint8_t length);
#endif //__CRC_H__
<file_sep>/errors.h
#ifndef ERRORS_H_
#define ERRORS_H_
class Errors
{
public:
static const uint8_t HubCommonInterfaceDataPinDrivenLowOnPacketStart = 0x11;
static const uint8_t HubCommonInterfaceClckPinIsLowOnPacketEnd = 0x12;
static const uint8_t HubCommonInterfaceDataPinDrivenLowOnPacketEnd = 0x13;
static const uint8_t HubCommonBufferOnResponseReadOverflow = 0x21;
static const uint8_t HubCommonBufferOnResponseReadCrcMismatch = 0x22;
static const uint8_t HubCommonBufferOnWriteByteOverflow = 0x23;
static const uint8_t HubCommonBufferOnWriteWordOverflow = 0x24;
static const uint8_t HubUplinkBufferOnWriteByteOverflow = 0x2a;
static const uint8_t ModuleCommonBufferSingleByteResponseToLarge = 0x30;
static const uint8_t ModuleCommonBufferOnWriteByteOverflow = 0x33;
static const uint8_t ModuleCommonBufferOnWriteWordOverflow = 0x34;
static const uint8_t ModuleCommonCommandLengthMismatch = 0x40;
static const uint8_t ModuleCommonCommandCrcMismatch = 0x41;
static const uint8_t ModuleCommonCommandReceiveOverflow = 0x42;
static const uint8_t ModuleCommonCommandInvalidPrefix = 0x43;
static const uint8_t ModuleUplinkCommandTooLongPayload = 0x4a;
static const uint8_t DeviceCommandInsufficientPacketLength = 0x80;
static const uint8_t DeviceCommandCrcMismatch = 0x81;
static const uint8_t DeviceNotReadyForPacket = 0x82;
static const uint8_t DeviceNotAcknowledgedPacketStart = 0x83;
static const uint8_t DeviceNotAcknowledgedPacketEnd = 0x84;
static const uint8_t DeviceRespondedWithTooLongPacket = 0x85;
static const uint8_t DeviceUplinkDisconnected = 0x86;
static const uint8_t DeviceUplinkShortToGround = 0x87;
static const uint8_t DeviceForwardMessageInvalidOrigin = 0x88;
static const uint8_t InvalidOneByteResponse = 0xff;
};
#endif /* ERRORS_H_ */<file_sep>/main.cpp
#include "config.h"
#include <stdlib.h>
#include <stdint.h>
#include <avr/io.h>
#include <avr/interrupt.h>
#include <avr/wdt.h>
#include <util/delay.h>
#include <util/atomic.h>
#include "usart.h"
#include "hub_interface.h"
#include "mod_interface.h"
#include "error_logger.h"
#include "commands.h"
#include "common.h"
static volatile uint8_t _previousInputPinsState;
static uint8_t HexToByte(uint8_t h1, uint8_t h2);
// This function is called upon a HARDWARE RESET:
void get_mcusr(void) __attribute__((naked)) __attribute__((section(".init3")));
void get_mcusr(void)
{
MCUSR = 0;
wdt_disable();
}
//ISR (_VECTOR(1)) { USART_WriteChar('1'); }
//ISR (_VECTOR(2)) { USART_WriteChar('2'); }
//ISR (_VECTOR(3)) { USART_WriteChar('3'); }
//ISR (_VECTOR(4)) { USART_WriteChar('4'); }
//ISR (_VECTOR(5)) { USART_WriteChar('5'); }
//ISR (_VECTOR(6)) { USART_WriteChar('6'); }
//ISR (_VECTOR(7)) { USART_WriteChar('7'); }
//ISR (_VECTOR(8)) { USART_WriteChar('8'); }
//ISR (_VECTOR(9)) { USART_WriteChar('9'); }
//ISR (_VECTOR(10)) { USART_WriteChar('A'); }
//ISR (_VECTOR(11)) { USART_WriteChar('B'); }
//ISR (_VECTOR(12)) { USART_WriteChar('C'); }
//ISR (_VECTOR(13)) { USART_WriteChar('D'); }
//ISR (_VECTOR(14)) { USART_WriteChar('E'); }
//ISR (_VECTOR(15)) { USART_WriteChar('F'); }
//ISR (_VECTOR(16)) { USART_WriteChar('G'); }
//ISR (_VECTOR(17)) { USART_WriteChar('H'); }
//ISR (_VECTOR(18)) { USART_WriteChar('I'); }
//ISR (_VECTOR(19)) { USART_WriteChar('J'); }
//ISR (_VECTOR(20)) { USART_WriteChar('K'); }
//ISR (_VECTOR(21)) { USART_WriteChar('L'); }
//ISR (_VECTOR(22)) { USART_WriteChar('M'); }
//ISR (_VECTOR(23)) { USART_WriteChar('N'); }
//ISR (_VECTOR(24)) { USART_WriteChar('O'); }
//ISR (_VECTOR(25)) { USART_WriteChar('P'); }
//ISR (_VECTOR(26)) { USART_WriteChar('Q'); }
//ISR (_VECTOR(27)) { USART_WriteChar('R'); }
//ISR (_VECTOR(28)) { USART_WriteChar('S'); }
//ISR (_VECTOR(29)) { USART_WriteChar('T'); }
//ISR (_VECTOR(30)) { USART_WriteChar('U'); }
//ISR (_VECTOR(31)) { USART_WriteChar('V'); }
//ISR (_VECTOR(32)) { USART_WriteChar('W'); }
//ISR (_VECTOR(33)) { USART_WriteChar('X'); }
//ISR (_VECTOR(34)) { USART_WriteChar('Y'); }
//ISR (_VECTOR(35)) { USART_WriteChar('Z'); }
ISR (BADISR_vect)
{
USART_WriteChar('!');
}
// ==================================================
// Watchdog timer elapsed interrupt handling routine.
// ==================================================
ISR (WDT_vect)
{
USART_WriteChar('W');
}
// ==================================================
// Pin state change interrupt handling routine.
// ==================================================
ISR (PCINT1_vect)
{
ATOMIC_BLOCK(ATOMIC_RESTORESTATE)
{
uint8_t currentInputPinsState = PINC;
MOD_PadPortStateChanged(currentInputPinsState, _previousInputPinsState);
_previousInputPinsState = PINC;
// DAT pin toggling between input and output modes will cause a new pin change interrupt.
// This is due to pull-up resistor on DAT pin.
// For example if pin was in output mode and driven Lo, then when it is released (switched to input mode),
// the pull-up register will pull it up Hi. This change will be detected as external change to pin state,
// and pin change interrupt will occur.
// To prevent this, we clear interrupt flag which could have been sen as logic within interrupt runs.
PCIFR &= ~(1 << PCIF1);
}
}
int main(void)
{
// Disable interrupts.
cli();
HUB_Initialize();
// DDRx: 0 - input
// 1 - output
// PINx: read pin state
// PORTx: write port state
// 0 - sink (Lo)
// 1 - source (Hi)
// PUDx: 0 - pull up enabled
// 1 - pull up disabled for entire register (i.e. all bits in 'x' port)
// Configure mod:
// common connection pads: DDC0-DDC3
// uplink input: DDC4
// uplink output: DDC5
DDRC = (0 << DDC0) | (0 << DDC1) | (0 << DDC2) | (0 << DDC3) | (0 << DDC4) | (1 << DDC5);
// Configure hub:
// output pins: DDB1-DDB4
// D9 D10 D12 D11
DDRB = (1 << DDB1) | (1 << DDB2) | (1 << DDB3) | (1 << DDB4);
PORTB &= ~((1 << PORTB1) | (1 << PORTB2) | (1 << PORTB3) | (1 << PORTB4));
// Set PCIE1 to enable PCMSK1 scan.
PCICR = 0;
PCICR |= (1 << PCIE1);
// Set PCINTx to trigger an interrupt on pin state change.
PCMSK1 = 0;
PCMSK1 |= (1 << PCINT8) | (1 << PCINT9) | (1 << PCINT10) | (1 << PCINT11);
USART_Init(57600);
TCCR1A = 0;
TCCR1B = (0 << CS12) | (0 << CS11) | (1 << CS10);
// Enable interrupts.
sei();
uint8_t commandBufferReadIndex = 0;
uint8_t commandBufferWriteIndex = 0;
char commandBuffer[2 + 8 + 8 + (COMMON_BUFFER_SIZE_IN_BYTES * 2)];
uint8_t payloadLength = 0;
uint8_t payload[COMMON_BUFFER_SIZE_IN_BYTES];
for (;;)
{
MOD_MonitorConnectionHealth();
if (!USART_CopyAvailableTillLineEnd(commandBuffer, commandBufferWriteIndex))
{
continue;
}
commandBufferReadIndex = 0;
payloadLength = 0;
char usartReceivedByte = commandBuffer[commandBufferReadIndex++];
if (usartReceivedByte == 'L')
{
while (commandBufferReadIndex < commandBufferWriteIndex)
{
char c = commandBuffer[commandBufferReadIndex++];
USART_WriteChar(c);
}
commandBufferWriteIndex = 0;
USART_WriteChar('\n');
continue;
}
if (usartReceivedByte == 'T')
{
char c = commandBuffer[commandBufferReadIndex++];
char hh = commandBuffer[commandBufferReadIndex++];
char hl = commandBuffer[commandBufferReadIndex++];
uint8_t delayId = HexToByte(hh, hl);
uint8_t value;
switch (c)
{
case 'S':
hh = commandBuffer[commandBufferReadIndex++];
hl = commandBuffer[commandBufferReadIndex++];
value = HexToByte(hh, hl);
HUB_SetDelay(delayId, value);
break;
case 'G':
value = HUB_GetDelay(delayId);
USART_WriteByteAsHex(value);
break;
}
commandBufferWriteIndex = 0;
USART_WriteChar('\n');
continue;
}
if (usartReceivedByte == '#')
{
char c = commandBuffer[commandBufferReadIndex++];
bool isRequest = c == '?';
uint32_t w1 = 0;
for (uint8_t i = 0; i < 4; i++)
{
if (commandBufferWriteIndex == commandBufferReadIndex) break;
char hh = commandBuffer[commandBufferReadIndex++];
if (hh == '-') break;
char hl = commandBuffer[commandBufferReadIndex++];
w1 = w1 << 8;
w1 |= HexToByte(hh, hl);
}
uint32_t w2 = 0;
for (uint8_t i = 0; i < 4; i++)
{
if (commandBufferWriteIndex == commandBufferReadIndex) break;
char hh = commandBuffer[commandBufferReadIndex++];
if (hh == '-') break;
char hl = commandBuffer[commandBufferReadIndex++];
w2 = w2 << 8;
w2 |= HexToByte(hh, hl);
}
if (w2 != 0)
{
for (uint8_t i = 0; i < COMMON_BUFFER_SIZE_IN_BYTES; i++)
{
if (commandBufferWriteIndex == commandBufferReadIndex) break;
char hh = commandBuffer[commandBufferReadIndex++];
if (hh == '-') break;
char hl = commandBuffer[commandBufferReadIndex++];
payload[i] = HexToByte(hh, hl);
payloadLength++;
}
}
bool result = false;
if (w1 != 0 && w2 == 0)
{
if (isRequest)
{
result = HUB_SendCommandToAllWithExpectedResponse(w1);
}
else
{
result = HUB_SendCommandToAll(w1);
}
}
else
{
if (isRequest)
{
result = HUB_SendCommandWithExpectedResponse(w1, w2, payload, payloadLength);
}
else
{
result = HUB_SendCommand(w1, w2, payload, payloadLength);
}
}
if (result && isRequest)
{
uint8_t * buff = HUB_GetBufferContent();
if (buff[0] < COMMON_BUFFER_SIZE_IN_BYTES)
{
for (uint8_t i = 1; i <= buff[0]; i++)
{
USART_WriteByteAsHex(buff[i]);
}
}
}
else
{
USART_WriteBoolResultCode(result);
}
USART_WriteChar('\n');
commandBufferWriteIndex = 0;
continue;
}
if (usartReceivedByte == '?')
{
uint8_t * buff = HUB_GetBufferContent();
for (uint8_t i = 0; i < COMMON_BUFFER_SIZE_IN_BYTES; i++)
{
USART_WriteByteAsHex(buff[i]);
}
USART_WriteChar('\n');
commandBufferWriteIndex = 0;
continue;
}
if (usartReceivedByte == 'D')
{
char c = commandBuffer[commandBufferReadIndex++];
switch (c)
{
case 'N':
USART_WriteByteResultCode(HUB_DetectNewSlaves());
USART_WriteChar('\n');
break;
case 'A':
case 'S':
{
uint32_t slaves[32];
uint8_t cnt = c == 'S' ? HUB_DetectSlavesSequence(slaves, 32) : HUB_DetectSlaves(slaves, 32);
if (cnt == 255)
{
USART_WriteBoolResultCode(false);
}
else
{
for (uint8_t i = 0; i < cnt; i++)
{
USART_WriteIntAsHex(slaves[i]);
}
}
USART_WriteChar('\n');
}
break;
}
commandBufferWriteIndex = 0;
continue;
}
if (usartReceivedByte == 'E')
{
char c = commandBuffer[commandBufferReadIndex++];
switch (c)
{
case 'Q':
{
uint8_t errors[ERRLOG_MAX_ERRORS];
ERRLOG_GetErrors(errors);
for (uint8_t i = 0; i < ERRLOG_MAX_ERRORS; i++)
{
USART_WriteByteAsHex(errors[i]);
}
USART_WriteChar('\n');
}
break;
case 'C':
ERRLOG_ClearErrors();
USART_WriteBoolResultCode(true);
USART_WriteChar('\n');
break;
}
commandBufferWriteIndex = 0;
continue;
}
if (usartReceivedByte == 'U')
{
bool result = false;
char c = commandBuffer[commandBufferReadIndex++];
switch (c)
{
case 'E':
result = HUB_SendEmptyDuinoCommand();
break;
case 'P':
result = HUB_SendUplinkPumpPacket();
break;
case 'D':
{
payloadLength = 0;
for (uint8_t i = 0; i < COMMON_BUFFER_SIZE_IN_BYTES; i++)
{
if (commandBufferWriteIndex == commandBufferReadIndex) break;
char hh = commandBuffer[commandBufferReadIndex++];
if (hh == '-') break;
char hl = commandBuffer[commandBufferReadIndex++];
payload[i] = HexToByte(hh, hl);
payloadLength++;
}
result = HUB_SendUplinkPacket(payload, payloadLength);
}
break;
}
USART_WriteBoolResultCode(result);
USART_WriteChar('\n');
commandBufferWriteIndex = 0;
continue;
}
commandBufferReadIndex = 0;
commandBufferWriteIndex = 0;
USART_WriteByteResultCode(0xF0);
USART_WriteChar('\n');
//uint16_t adcValue = ADC_read(ADC5D);
//itoa(adcValue, str, 10);
//USART_putstring(str);
//USART_send('\r');
}
return 0;
}
uint8_t HexToByte(uint8_t hh, uint8_t hl)
{
uint8_t result = 0;
if (hh >= '0' && hh <= '9')
{
result = hh - '0';
}
else if (hh >= 'A' && hh <= 'F')
{
result = (hh - 'A') + 0xA;
}
else if (hh >= 'a' && hh <= 'f')
{
result = (hh - 'a') + 0xA;
}
result = result << 4;
if (hl >= '0' && hl <= '9')
{
result |= hl - '0';
}
else if (hl >= 'A' && hl <= 'F')
{
result |= (hl - 'A') + 0xA;
}
else if (hl >= 'a' && hl <= 'f')
{
result |= (hl - 'a') + 0xA;
}
return result;
}
| 696f95921fc093c91e1307b57c64566c29efc007 | [
"C",
"Makefile",
"C++"
] | 21 | C | zmechanic/GccApplication4 | 170dc778b3a10a908459888b9d6091f2fce3bf95 | 8b993bf98012fc54fed1792bd2f352e43b0f697d | |
refs/heads/main | <file_sep># URL-pdf-to-file
The user is asked to search for a pdf on google. Search for ay pdf text and, the program will take that request, search it up on google using selenium and pyautogui , it will then download the pdf from the URL and save it in the current directory.
<file_sep>import pyautogui
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
import time
import requests
import urllib
from urllib.request import urlopen
import json
from pathlib import Path
ans = input("What do you want to search and download? ")
driver = webdriver.Chrome(ChromeDriverManager().install())
def get_website(ans1):
ChromeUrl = "http://www.google.com"
driver.get(ChromeUrl)
driver.find_element_by_xpath("/html/body/div[1]/div[3]/form/div[1]/div[1]/div[1]/div/div[2]/input")
pyautogui.typewrite(ans1)
driver.find_element_by_css_selector("input[class='gNO89b']").click()
time.sleep(2)
results = driver.find_element_by_css_selector('h3[class="LC20lb DKV0Md"]').click()
time.sleep(5)
urls = driver.current_url
get_website(ans)
urls = driver.current_url
r = requests.get(urls, allow_redirects=True)
def is_downloadable(url):
h = requests.head(url, allow_redirects=True)
header = h.headers
content_type = header.get('content-type')
if 'text' in content_type.lower():
return False
if 'html' in content_type.lower():
return False
return True
print(is_downloadable(urls))
if urls.find('/'):
filename = urls.rsplit('/', 1)[1]
filename1 = Path(filename)
response = requests.get(urls)
filename1.write_bytes(response.content)
input('Press ENTER to exit')
| 6623055151bea3676c5ba98487f907ece2f80808 | [
"Markdown",
"Python"
] | 2 | Markdown | Atimassengill/URL-pdf-to-file | 15aafd4234ce24b1ac9f854dfc451d202826b683 | 85816344e655267a9b82abb2f1eab24592aa570f | |
refs/heads/master | <file_sep>/**
* Goods.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/documentation/concepts/models-and-orm/models
*/
module.exports = {
tableName: 'goods',
attributes: {
socialcreditCode: {
type: 'string'
},
reservoirNo: {
type: 'string'
},
reservoirName: {
type: 'string'
},
stockNo: {
type: 'string'
},
stockName: {
type: 'string'
},
aoNo: {
type: 'string'
},
aoName: {
type: 'string'
},
goodsNo: {
type: 'string'
},
goodsName: {
type: 'string'
},
sealingMark: {
type: 'string'
},
//货位编码
goodsCode: {
type: 'string'
},
capacity: {
type: 'integer'
}
}
};
<file_sep>/**
* CarriageController
*
* @description :: Server-side logic for managing carriages
* @help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
//登录
login: (req, res) => {
let params = req.body;
let userInfo = {
companyName: params.companyName
}
Carriage.find(userInfo).exec((err, result) => {
if (err) return res.send(Message.messages(0, '登录失败', err))
if (!result.length) return res.send(Message.messages(0, '您还未注册!', result));
if (params.password === result[0].password) {
res.send(Message.messages(1, '登录成功!', result));
}
})
},
//增加
register: function (req, res) {
let params = req.body;
Carriage.create(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '创建成功', err));
res.send(Message.messages(1, '创建成功', result));
})
},
//删除
delete: function (req, res) {
let params = req.body;
Carriage.destroy(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '删除失败', err));
if (!result.length) return res.send(Message.messages(0, '删除失败', result));
res.send(Message.messages(1, '删除成功', result));
})
},
//修改
update: function (req, res) {
let params = {
id: req.body.id,
}
let updateParams = req.body;
Carriage.update(params, updateParams).exec((err, result) => {
if (err) return res.send(Message.messages(0, '更新失败', err));
if (!result.length) return res.send(Message.messages(0, '更新失败', result));
res.send(Message.messages(1, '更新成功', result));
})
},
find: function (req, res) {
let params = req.query;
Carriage.find(params || {}).exec((err, result) => {
if (err) return res.send(Message.messages(0, '查找失败', err))
if (!result.length) return res.send(Message.messages(0, '查找失败', result))
res.send(Message.messages(1, '查找成功', result));
})
},
};
<file_sep>/**
* Reservoir.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/documentation/concepts/models-and-orm/models
*/
module.exports = {
tableName:'reservior',
attributes: {
//社会信用代码
socialcreditCode:{
type:'string',
},
reservoirNo:{
type:'string'
},
reservoirName:{
type:'string'
},
QHDM:{
type:'string'
},
province:{
type:'string'
},
city:{
type:'string'
},
county:{
type:'string'
},
town:{
type:'string'
},
postalCode:{
type:'string'
}
}
};
<file_sep>/**
* User.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/documentation/concepts/models-and-orm/models
*/
module.exports = {
tableName: 'processor',
attributes: {
socialcreditCode: {
type: 'string',
unique: true
},
organizationCode: {
type: 'string',
unique: true
},
companyName: {
type: 'string',
unique: true
},
password: {
type: 'string',
},
companyType: {
type: 'string'
},
companyLinkman: {
type: 'string'
},
companyTele: {
type: 'string'
},
address: {
type: 'string'
},
lealPerson: {
type: 'string'
},
legalID: {
type: 'string'
},
legalTele: {
type: 'string'
},
email: {
type: 'string'
},
fax:{
type: 'string'
},
status: {
type: 'string',
enum: ['待审核', '审核通过', '审核未通过']
}
}
};
<file_sep>/**
* GoodsController
*
* @description :: Server-side logic for managing goods
* @help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
add: (req, res, callback) => {
let params = req.body;
let isExitParams1 = {
socialcreditCode: params.socialcreditCode,
reservoirNo: params.reservoirNo,
reservoirName: params.reservoirName,
stockNo: params.stockNo,
stockName: params.stockName,
aoNo: params.aoNo,
aoName: params.aoName,
goodsNo: params.goodsNo
}
let isExitParams2 = {
socialcreditCode: params.socialcreditCode,
reservoirNo: params.reservoirNo,
reservoirName: params.reservoirName,
stockNo: params.stockNo,
stockName: params.stockName,
aoNo: params.aoNo,
aoName: params.aoName,
goodsNo: params.goodsNo,
goodsName: params.goodsName
}
async.waterfall([(callback) => {
Goods.find(isExitParams1).exec((err, result) => {
if (err) return callback(err);
if (result.length) {
callback(null, true);//即将插入的已经存在
} else {
callback(null, false);
}
});
}, (isExit, callback) => {
if (!isExit) {
Goods.find(isExitParams2).exec((err, result) => {
if (err) return callback(err);
if (result.length) {
callback(null, true);//即将插入的已经存在
} else {
callback(null, false);
}
});
} else {
callback(null, isExit);
};
}, (isExit, callback) => {
if (!isExit) {
Goods.create(params).exec((err, result) => {
if (err) return callback(err);
callback(null, result);
})
} else {
callback(null, isExit);
};
}], (err, result) => {
if (err) res.send(Message.messages(0, '创建失败!', err));
if (result === true) {
res.send(Message.messages(0, '创建失败!', []));
} else {
res.send(Message.messages(1, '创建成功', result));
}
});
},
delete: (req, res) => {
let params = req.body;
Goods.destroy(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '删除失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '删除失败!', result));
res.send(Message.messages(1, '删除成功', result));
});
},
update: (req, res) => {
let params = { id: req.body.id };
let updateParams = req.body;
Goods.update(params, updateParams).exec((err, result) => {
if (err) return res.send(Message.messages(0, '更新失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '更新失败!', result));
res.send(Message.messages(1, '更新成功', result));
});
},
find: (req, res) => {
let params = req.query;
let GoodsInfoParams = {
socialcreditCode: params.socialcreditCode
// goodsCode: params.goodsCode
}
// Goods.find(params || {}).exec((err, result) => {
// if (err) return res.send(Message.messages(0, '查找失败!', err));
// if (result.length === 0) return res.send(Message.messages(0, '查找失败!', result));
// res.send(Message.messages(1, '查找成功', result));
// });
async.waterfall([(callback) => {
Goods.find(params || {}).exec((err, result) => {
if (err) callback(err);;
if (result.length === 0) {
callback(null, []);
} else {
callback(null, result);
}
});
}, (goods, callback) => {
if (goods.length !== 0) {
GoodsInfo.find(GoodsInfoParams).exec((err, result) => {
if (err) callback(err);
if (result.length === 0) {
callback(null, { goods, goodsInfo: [] });
} else {
callback(null, { goods, goodsInfo: result });
}
});
} else {
callback(null, '查找失败');
}
}], (err, result) => {
console.log(result);
// res.send(Message.messages(1, '更新成功', result));
switch (result) {
case '查找失败':
res.send(Message.messages(1, result, []));
break;
default:
res.send(Message.messages(1, '查找成功', result));
break;
}
});
}
};
<file_sep>/**
* Route Mappings
* (sails.config.routes)
*
* Your routes map URLs to views and controllers.
*
* If Sails receives a URL that doesn't match any of the routes below,
* it will check for matching files (images, scripts, stylesheets, etc.)
* in your assets directory. e.g. `http://localhost:1337/images/foo.jpg`
* might match an image file: `/assets/images/foo.jpg`
*
* Finally, if those don't match either, the default 404 handler is triggered.
* See `api/responses/notFound.js` to adjust your app's 404 logic.
*
* Note: Sails doesn't ACTUALLY serve stuff from `assets`-- the default Gruntfile in Sails copies
* flat files from `assets` to `.tmp/public`. This allows you to do things like compile LESS or
* CoffeeScript for the front-end.
*
* For more information on configuring custom routes, check out:
* http://sailsjs.org/#!/documentation/concepts/Routes/RouteTargetSyntax.html
*/
module.exports.routes = {
/***************************************************************************
* *
* Make the view located at `views/homepage.ejs` (or `views/homepage.jade`, *
* etc. depending on your default view engine) your home page. *
* *
* (Alternatively, remove this and add an `index.html` file in your *
* `assets` directory) *
* *
***************************************************************************/
//加工企业接口
'post /api/processor/login': 'processor.login',
'post /api/processor/register': 'processor.register',
'post /api/processor/delete': 'processor.delete',
'post /api/processor/update': 'processor.update',
'get /api/processor/find': 'processor.find',
//仓储企业接口
'post /api/storages/login': 'storages.login',
'post /api/storages/register': 'storages.register',
'post /api/storages/delete': 'storages.delete',
'post /api/storages/update': 'storages.update',
'get /api/storages/find': 'storages.find',
//物流运输企业接口
'post /api/carriage/login': 'carriage.login',
'post /api/carriage/register': 'carriage.register',
'post /api/carriage/delete': 'carriage.delete',
'post /api/carriage/update': 'carriage.update',
'get /api/carriage/find': 'carriage.find',
//监管企业接口
'get /api/supervise/find': 'Supervise.find',
//质检企业产生报告
'post /api/QCInfo/add': 'QCInfo.add',
'post /api/QCInfo/delete': 'QCInfo.delete',
'post /api/QCInfo/update': 'QCInfo.update',
'get /api/QCInfo/find': 'QCInfo.find',
//企业客户管理接口
'post /api/client/add': 'client.add',
'post /api/client/delete': 'client.delete',
'post /api/client/update': 'client.update',
'get /api/client/find': 'client.find',
//物流运输企业运力接口
'post /api/carriage/transport/add': 'transport.add',
'post /api/carriage/transport/delete': 'transport.delete',
'post /api/carriage/transport/update': 'transport.update',
'get /api/carriage/transport/find': 'transport.find',
//物流运输企业司机管理接口
'post /api/carriage/driver/add': 'driver.add',
'post /api/carriage/driver/delete': 'driver.delete',
'post /api/carriage/driver/update': 'driver.update',
'get /api/carriage/driver/find': 'driver.find',
//物流企业登记运输信息接口
'post /api/carriage/carriageInfo/add': 'carriageInfo.add',
'post /api/carriage/carriageInfo/delete': 'carriageInfo.delete',
'post /api/carriage/carriageInfo/update': 'carriageInfo.update',
'get /api/carriage/carriageInfo/find': 'carriageInfo.find',
//库区管理接口
'post /api/reservoir/add': 'reservoir.add',
'post /api/reservoir/delete': 'reservoir.delete',
'post /api/reservoir/update': 'reservoir.update',
'get /api/reservoir/find': 'reservoir.find',
//仓库管理接口
'post /api/stock/add': 'stock.add',
'post /api/stock/delete': 'stock.delete',
'post /api/stock/update': 'stock.update',
'get /api/stock/find': 'stock.find',
//廒间管理接口
'post /api/ao/add': 'ao.add',
'post /api/ao/delete': 'ao.delete',
'post /api/ao/update': 'ao.update',
'get /api/ao/find': 'ao.find',
//货位管理接口
'post /api/goods/add': 'goods.add',
'post /api/goods/delete': 'goods.delete',
'post /api/goods/update': 'goods.update',
'get /api/goods/find': 'goods.find',
//货位管理接口
'post /api/goodsInfo/add': 'goodsInfo.add',
'post /api/goodsInfo/delete': 'goodsInfo.delete',
'post /api/goodsInfo/update': 'goodsInfo.update',
'get /api/goodsInfo/find': 'goodsInfo.find',
//出库管理接口
'post /api/stockIn/add': 'stockIn.add',
'post /api/stockIn/delete': 'stockIn.delete',
'post /api/stockIn/update': 'stockIn.update',
'get /api/stockIn/find': 'stockIn.find',
//入库管理接口
'post /api/stockOut/add': 'stockOut.add',
'post /api/stockOut/delete': 'stockOut.delete',
'post /api/stockOut/update': 'stockOut.update',
'get /api/stockOut/find': 'stockOut.find',
/***************************************************************************
* *
* Custom routes here... *
* *
* If a request to a URL doesn't match any of the custom routes above, it *
* is matched against Sails route blueprints. See `config/blueprints.js` *
* for configuration options and examples. *
* *
***************************************************************************/
};
<file_sep>/**
* ClientController
*
* @description :: Server-side logic for managing clients
* @help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
add: (req, res, callback) => {
let params = req.body;
let isExitParams = {
socialcreditCode: params.socialcreditCode,
companysocialcreditCode: params.companysocialcreditCode,
companyName: params.companyName
}
// let isExitParams2 = {
// socialcreditCode: params.socialcreditCode,
// companysocialcreditCode: params.companysocialcreditCode,
// companyName: params.companyName
// }
async.waterfall([(callback) => {
Client.find(isExitParams).exec((err, result) => {
if (err) return callback(err);
if (result.length) {
callback(null, true);//即将插入的已经存在
} else {
callback(null, false);
}
});
}, (isExit, callback) => {
console.log(isExit);
if (!isExit) {
Client.create(params).exec((err, result) => {
console.log(err);
if (err) return callback(err);
callback(null, result);
})
} else {
callback(null, isExit);
};
}], (err, result) => {
if (err) res.send(Message.messages(0, '创建失败!', err));
if (result === true) {
res.send(Message.messages(0, '创建失败!', []));
} else {
res.send(Message.messages(1, '创建成功', result));
}
});
},
delete: (req, res) => {
let params = req.body;
Client.destroy(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '删除失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '删除失败!', result));
res.send(Message.messages(1, '删除成功', result));
});
},
update: (req, res) => {
let params = { id: req.body.id };
let updateParams = req.body;
Client.update(params, updateParams).exec((err, result) => {
if (err) return res.send(Message.messages(0, '更新失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '更新失败!', result));
res.send(Message.messages(1, '更新成功', result));
});
},
find: (req, res) => {
let params = req.query;
Client.find(params || {}).exec((err, result) => {
if (err) return res.send(Message.messages(0, '查找失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '查找失败!', result));
res.send(Message.messages(1, '查找成功', result));
});
}
};
<file_sep><a id="top"></a>
> **粮食溯源数据库接口**
##### 基本接口地址:
```
http://192.168.127.12:1337/api
```
### 目录
---
一.仓储企业
- [x] 1.[仓储企业登录](#storages_login)
- [x] 1.[仓储企业注册](#storages_regregister)
- [x] 2.[仓储企业注销](#storages_delete)
- [x] 3.[仓储企业更新](#storages_update)
- [x] 4.[仓储企业查找](#storages_find)
---
二.加工企业
- [x] 1.[加工企业登录](#processor_login)
- [x] 1.[加工企业注册](#processor_regregister)
- [x] 2.[加工企业注销](#processor_delete)
- [x] 3.[加工企业更新](#processor_update)
- [x] 4.[加工企业查找](#processor_find)
---
三.物流企业
- 企业管理
- [x] 1.[物流企业登录](#carriage_login)
- [x] 1.[物流企业注册](#carriage_regregister)
- [x] 2.[物流企业注销](#carriage_delete)
- [x] 3.[物流企业更新](#carriage_update)
- [x] 4.[物流企业查找](#carriage_find)
- 运力管理
- [x] 1.[物流企业运力添加](#carriage/transport/add)
- [x] 2.[物流企业运力删除](#carriage/transport/delete)
- [x] 3.[物流企业运力更新](#carriage/transport/update)
- [x] 4.[物流企业运力查找](#carriage/transport/find)
- 司机管理
- [x] 1.[物流企业司机添加](#carriage/driver/add)
- [x] 2.[物流企业司机删除](#carriage/driver/delete)
- [x] 3.[物流企业司机更新](#carriage/driver/update)
- [x] 4.[物流企业司机查找](#carriage/driver/find)
- 物流信息管理
- [x] 1.[物流企业信息添加](#carriage/carriageInfo/add)
- [x] 2.[物流企业信息删除](#carriage/carriageInfo/delete)
- [x] 3.[物流企业信息更新](#carriage/carriageInfo/update)
- [x] 4.[物流企业信息查找](#carriage/carriageInfo/find)
---
四.库区管理
- [x] 1.[企业库区添加](#/reservoir/add)
- [x] 2.[企业库区删除](#/reservoir/delete)
- [x] 3.[企业库区更新](#/reservoir/update)
- [x] 4.[企业库区查找](#/reservoir/find)
- 仓库管理
- [x] 1.[仓库添加](#/stock/add)
- [x] 2.[仓库删除](#/stock/delete)
- [x] 3.[仓库更新](#/stock/update)
- [x] 4.[仓库查找](#/stock/find)
- 廒间管理
- [x] 1.[廒间添加](#/ao/add)
- [x] 2.[廒间删除](#/ao/delete)
- [x] 3.[廒间更新](#/ao/update)
- [x] 4.[廒间查找](#/ao/find)
- 货位管理
- [x] 1.[货位添加](#/goods/add)
- [x] 2.[货位删除](#/goods/delete)
- [x] 3.[货位更新](#/goods/update)
- [x] 4.[货位查找](#/goods/find)
- 货位粮食信息管理
- [x] 1.[货位粮食信息添加](#/goodsInfo/add)
- [x] 2.[货位粮食信息删除](#/goodsInfo/delete)
- [x] 3.[货位粮食信息更新](#/goodsInfo/update)
- [x] 4.[货位粮食信息查找](#/goodsInfo/find)
---
五.企业客户管理
- [x] 1.[企业客户添加](#/client/add)
- [x] 2.[企业客户删除](#/client/delete)
- [x] 3.[企业客户更新](#/client/update)
- [x] 4.[企业客户查找](#/client/find)
---
六.入库管理
- [x] 1.[企业添加入库信息](#/stockIn/add)
- [x] 2.[企业删除入库信息](#/stockIn/delete)
- [x] 3.[企业更新入库信息](#/stockIn/update)
- [x] 4.[企业查找入库信息](#/stockIn/find)
- 出库管理
- [x] 1.[企业添加出库信息](#/stockOut/add)
- [x] 2.[企业删除出库信息](#/stockOut/delete)
- [x] 3.[企业更新出库信息](#/stockOut/update)
- [x] 4.[企业查找出库信息](#/stockOut/find)
---
七.质检企业
- [x] 1.[质检企业质检信息添加](#/QCInfo/add)
- [x] 2.[质检企业质检信息删除](#/QCInfo/delete)
- [x] 3.[质检企业质检信息更新](#/QCInfo/update)
- [x] 4.[质检企业质检信息查找](#/QCInfo/find)
---
八.监管企业
- [x] 4.[注册企业信息查找](#/supervise/find)
---
### 一.仓储企业
<a id="storages_login"></a>
- [x] [回到顶部](#top)
#### 仓储企业登录
##### 接口地址:
```
POST /storages/login
```
接口作用:
```
仓储企业注册
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
companyName | string | 企业名称|Y
password | string | 密码|Y
##### input:
```
{
"companyName":"百度1",
"password":"<PASSWORD>"
}
```
##### output:
```
{
"code": 1,
"message": "登录成功!",
"data": [
{
"socialcreditCode": "xiamenjiagongB1234",
"companyName": "厦门加工企业B",
"organizationCode": "agongB123",
"password": "<PASSWORD>",
"companyType": "小麦加工企业",
"companyLinkman": "李小花",
"companyTele": "15678900987",
"address": "福建省厦门市",
"lealPerson": "刘明",
"legalID": "531819198707231829",
"legalTele": "13267890987",
"status": "待审核",
"createdAt": "2018-04-12T12:46:41.037Z",
"updatedAt": "2018-04-12T12:46:41.037Z",
"id": "5acf553102d3fd6a6f7afd5e"
}
]
}
```
<a id="storages_regregister"></a>
- [x] [回到顶部](#top)
#### 1.仓储企业注册
##### 接口地址:
```
POST /storages/register
```
接口作用:
```
仓储企业注册
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode | string | 社会信用代码|Y
companyName | string | 企业名称|Y
organizationCode | string | 企业组织代码|Y
password | string | 密码|Y
companyType | string | 企业类型|Y
companyLinkman | string | 企业联系人|Y
companyTele | string | 联系人电话|Y
address | string | 企业地址|Y
lealPerson | string | 法人代表|Y
legalID | string | 法人身份证ID|Y
legalTele|string|法人电话|Y
status|string|注册状态|Y(此处默认填写“待审核”)
##### input:
```
{
"socialcreditCode":"1",
"companyName":"百度1",
"organizationCode":"百度1",
"password":"<PASSWORD>",
"companyType":"companyType",
"companyLinkman":"123343",
"companyTele":"organizationCode",
"address":"百度3",
"lealPerson":"123343",
"legalID":"organizationCode",
"legalTele":"百度3",
"status":"待审核"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661bf39fa0d98372a"
}
}
```
<a id="storages_delete"></a>
- [x] [回到顶部](#top)
#### 2.注销企业注册
##### 接口地址:
```
POST /storages/delete
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0a8661bf39fa0d98372a"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661bf39fa0d98372a"
}
]
}
```
<a id="storages_update"></a>
- [x] [回到顶部](#top)
#### 3.更改企业注册信息
##### 接口地址:
```
POST /storages/update
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0c0861bf39fa0d98372b",
"password":"<PASSWORD>"(其余字段选填)
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c0861bf39fa0d98372b"
}
]
}
```
<a id="storages_find"></a>
- [x] [回到顶部](#top)
#### 4.查找企业注册信息
##### 接口地址:
```
POST /storages/find
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0c0861bf39fa0d98372b"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c0861bf39fa0d98372b"
}
]
}
```
### 二.加工企业
<a id="processor_login"></a>
- [x] [回到顶部](#top)
#### 企业登录
##### 接口地址:
```
POST /processor/login
```
接口作用:
```
仓储企业注册
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
companyName | string | 企业名称|Y
password | string | 密码|Y
##### input:
```
{
"companyName":"百度1",
"password":"<PASSWORD>"
}
```
##### output:
```
{
"code": 1,
"message": "登录成功!",
"data": [
{
"socialcreditCode": "xiamenjiagongB1234",
"companyName": "厦门加工企业B",
"organizationCode": "agongB123",
"password": "<PASSWORD>",
"companyType": "小麦加工企业",
"companyLinkman": "李小花",
"companyTele": "15678900987",
"address": "福建省厦门市",
"lealPerson": "刘明",
"legalID": "531819198707231829",
"legalTele": "13267890987",
"status": "待审核",
"createdAt": "2018-04-12T12:46:41.037Z",
"updatedAt": "2018-04-12T12:46:41.037Z",
"id": "5acf553102d3fd6a6f7afd5e"
}
]
}
```
<a id="processor_regregister"></a>
- [x] [回到顶部](#top)
#### 1.加工企业注册
##### 接口地址:
```
POST /processor/register
```
接口作用:
```
仓储企业注册
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode | string | 社会信用代码|Y
companyName | string | 企业名称|Y
organizationCode | string | 企业组织代码|Y
password | string | 密码|Y
companyType | string | 企业类型|Y
companyLinkman | string | 企业联系人|Y
companyTele | string | 联系人电话|Y
address | string | 企业地址|Y
lealPerson | string | 法人代表|Y
legalID | string | 法人身份证ID|Y
legalTele|string|法人电话|Y
status|string|注册状态|Y(此处默认填写“待审核”)
##### input:
```
{
"socialcreditCode":"1",
"companyName":"百度1",
"organizationCode":"百度1",
"password":"<PASSWORD>",
"companyType":"companyType",
"companyLinkman":"123343",
"companyTele":"organizationCode",
"address":"百度3",
"lealPerson":"123343",
"legalID":"organizationCode",
"legalTele":"百度3",
"status":"待审核"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661bf39fa0d98372a"
}
}
```
<a id="processor_delete"></a>
- [x] [回到顶部](#top)
#### 2.注销企业注册
##### 接口地址:
```
POST /processor/delete
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0a8661bf39fa0d98372a"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661bf39fa0d98372a"
}
]
}
```
<a id="processor_update"></a>
- [x] [回到顶部](#top)
#### 3.更改企业注册信息
##### 接口地址:
```
POST /processor/update
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0c0861bf39fa0d98372b",
"password":"<PASSWORD>"(其余字段选填)
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c0861bf39fa0d98372b"
}
]
}
```
<a id="processor_find"></a>
- [x] [回到顶部](#top)
#### 4.查找企业注册信息
##### 接口地址:
```
POST /processor/find
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0c0861bf39fa0d98372b"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c0861bf39fa0d98372b"
}
]
}
```
### 二.加工企业
<a id="processor_regregister"></a>
- [x] [回到顶部](#top)
#### 1.加工企业注册
##### 接口地址:
```
POST /processor/register
```
接口作用:
```
仓储企业注册
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode | string | 社会信用代码|Y
companyName | string | 企业名称|Y
organizationCode | string | 企业组织代码|Y
password | string | 密码|Y
companyType | string | 企业类型|Y
companyLinkman | string | 企业联系人|Y
companyTele | string | 联系人电话|Y
address | string | 企业地址|Y
lealPerson | string | 法人代表|Y
legalID | string | 法人身份证ID|Y
legalTele|string|法人电话|Y
status|string|注册状态|Y(此处默认填写“待审核”)
##### input:
```
{
"socialcreditCode":"1",
"companyName":"百度1",
"organizationCode":"百度1",
"password":"<PASSWORD>",
"companyType":"companyType",
"companyLinkman":"123343",
"companyTele":"organizationCode",
"address":"百度3",
"lealPerson":"123343",
"legalID":"organizationCode",
"legalTele":"百度3",
"status":"待审核"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661bf39fa0d98372a"
}
}
```
<a id="processor_delete"></a>
- [x] [回到顶部](#top)
#### 2.注销企业注册
##### 接口地址:
```
POST /processor/delete
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0a8661bf39fa0d98372a"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661<PASSWORD>9<PASSWORD>"
}
]
}
```
<a id="processor_update"></a>
- [x] [回到顶部](#top)
#### 3.更改企业注册信息
##### 接口地址:
```
POST /processor/update
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"<PASSWORD>",
"password":"<PASSWORD>"(其余字段选填)
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c0861bf39fa0d98372b"
}
]
}
```
<a id="processor_find"></a>
- [x] [回到顶部](#top)
#### 4.查找企业注册信息
##### 接口地址:
```
POST /processor/find
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5<PASSWORD>"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "<PASSWORD>",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c08<PASSWORD>"
}
]
}
```
### 三.物流企业
<a id="carriage_login"></a>
- [x] [回到顶部](#top)
#### 企业登录
##### 接口地址:
```
POST /carriage/login
```
接口作用:
```
仓储企业注册
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
companyName | string | 企业名称|Y
password | string | 密码|Y
##### input:
```
{
"companyName":"百度1",
"password":"<PASSWORD>"
}
```
##### output:
```
{
"code": 1,
"message": "登录成功!",
"data": [
{
"socialcreditCode": "xiamenjiagongB1234",
"companyName": "厦门加工企业B",
"organizationCode": "agongB123",
"password": "<PASSWORD>",
"companyType": "小麦加工企业",
"companyLinkman": "李小花",
"companyTele": "15678900987",
"address": "福建省厦门市",
"lealPerson": "刘明",
"legalID": "531819198707231829",
"legalTele": "13267890987",
"status": "待审核",
"createdAt": "2018-04-12T12:46:41.037Z",
"updatedAt": "2018-04-12T12:46:41.037Z",
"id": "5acf553102d3fd6a6f7afd5e"
}
]
}
```
<a id="carriage_regregister"></a>
- [x] [回到顶部](#top)
#### 1.加工企业注册
##### 接口地址:
```
POST /carriage/register
```
接口作用:
```
仓储企业注册
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode | string | 社会信用代码|Y
companyName | string | 企业名称|Y
organizationCode | string | 企业组织代码|Y
password | string | 密码|Y
companyType | string | 企业类型|Y
companyLinkman | string | 企业联系人|Y
companyTele | string | 联系人电话|Y
address | string | 企业地址|Y
lealPerson | string | 法人代表|Y
legalID | string | 法人身份证ID|Y
legalTele|string|法人电话|Y
status|string|注册状态|Y(此处默认填写“待审核”)
##### input:
```
{
"socialcreditCode":"1",
"companyName":"百度1",
"organizationCode":"百度1",
"password":"<PASSWORD>",
"companyType":"companyType",
"companyLinkman":"123343",
"companyTele":"organizationCode",
"address":"百度3",
"lealPerson":"123343",
"legalID":"organizationCode",
"legalTele":"百度3",
"status":"待审核"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661bf39fa0d98372a"
}
}
```
<a id="carriage_delete"></a>
- [x] [回到顶部](#top)
#### 2.注销企业注册
##### 接口地址:
```
POST /carriage/delete
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0a8661bf39fa0d98372a"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-08T12:26:46.323Z",
"updatedAt": "2018-04-08T12:26:46.323Z",
"id": "5aca0a8661bf39fa0d98372a"
}
]
}
```
<a id="carriage_update"></a>
- [x] [回到顶部](#top)
#### 3.更改企业注册信息
##### 接口地址:
```
POST /carriage/update
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0c0861bf39fa0d98372b",
"password":"<PASSWORD>"(其余字段选填)
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c0861bf39fa0d98372b"
}
]
}
```
<a id="carriage_find"></a>
- [x] [回到顶部](#top)
#### 4.查找企业注册信息
##### 接口地址:
```
POST /carriage/find
```
接口作用:
```
仓储企业注销
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|注册的ID|Y
##### input:
```
{
"id":"5aca0c0861bf39fa0d98372b"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "审核通过",
"createdAt": "2018-04-08T12:33:12.492Z",
"updatedAt": "2018-04-08T12:34:46.593Z",
"id": "5aca0c0861bf39fa0d98372b"
}
]
}
```
### 运力管理
<a id="carriage/transport/add"></a>
- [x] [回到顶部](#top)
#### 1.添加运力
##### 接口地址
```
POST /carriage/transport/add
```
接口作用:
```
物流企业添加运力
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
carriageType|enum|运力类型(汽车,船舶)|Y
carriageNo|string|运力牌号|Y
##### input:
```
{
"socialcreditCode":"1",
"carriageType":"汽车",
"carriageNo":"d68jnj819"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"carriageType": "汽车",
"carriageNo": "d68jnj819",
"createdAt": "2018-04-09T02:07:12.032Z",
"updatedAt": "2018-04-09T02:07:12.032Z",
"id": "5acacad0f11587d9033e35c2"
}
}
```
<a id="carriage/transport/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除运力
##### 接口地址
```
POST /carriage/transport/delete
```
接口作用:
```
物流企业删除运力
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|编号|Y
##### input:
```
{
"id":"5acacad0f11587d9033e35c2"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"carriageType": "汽车",
"carriageNo": "d68jnj819",
"createdAt": "2018-04-09T02:07:12.032Z",
"updatedAt": "2018-04-09T02:07:12.032Z",
"id": "5acacad0f11587d9033e35c2"
}
]
}
```
<a id="carriage/transport/update"></a>
- [x] [回到顶部](#top)
#### 3.更新运力
##### 接口地址
```
POST /carriage/transport/update
```
接口作用:
```
物流企业更新运力
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|编号|Y
socialcreditCode|string|社会信用代码|N
carriageType|string|运力类型|N
##### input:
```
{
"id":"5acacad0f11587d9033e35c2",
"socialcreditCode":"2",
"carriageType":"船舶",
"carriageNo":"2"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"carriageType": "汽车",
"carriageNo": "d68jnj819",
"createdAt": "2018-04-09T02:07:12.032Z",
"updatedAt": "2018-04-09T02:07:12.032Z",
"id": "5acacad0f11587d9033e35c2"
}
]
}
```
<a id="carriage/transport/find"></a>
#### 4.查找运力
##### 接口地址
```
POST /carriage/transport/find
```
接口作用:
```
物流企业查找运力
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|编号|Y
socialcreditCode|string|社会信用代码|N
carriageType|string|运力类型|N
##### input:
```
{
"socialcreditCode":"2"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "2",
"carriageType": "船舶",
"carriageNo": "2",
"createdAt": "2018-04-09T02:06:23.147Z",
"updatedAt": "2018-04-09T02:16:51.567Z",
"id": "5acaca9f581e78b003c163cc"
}
]
}
```
### 物流信息登记管理
<a id="carriage/carriageInfo/add"></a>
- [x] [回到顶部](#top)
#### 1.添加物流信息
##### 接口地址
```
POST /carriage/carriageInfo/add
```
接口作用:
```
物流企业添加物流信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
companyName|enum|企业名称|Y
waybillNo|string|运单号|Y
carriageStatus|string|运单号|Y
shipper|string|发货方(企业信用代码)|Y
deliveryTime|string|发货时间|Y
receiver|string|接受方(企业信用代码)|Y
receiverTime|string|接收时间|Y
contractNo|string|合同号|Y
variety|string|品种|Y
startCount|string|装车数量|Y
endCount|string|卸车数量|Y
packing|string|包装方式|Y
carriageType|string|运力类型|Y
carriageNo|string|船号/车号|Y
driverName|string|司机姓名|Y
driverTele|string|司机电话|Y
inType|string|录入类型(枚举:待办,自主)|Y
##### input:
```
{
"socialcreditCode":"123451",
"companyName":"京东",
"waybillNo":"12345611",
"carriageStatus":"在途",
"shipper":"123",
"deliveryTime":"20180410",
"receiver":"12345",
"receiverTime":"20180411",
"contractNo":"12345",
"variety":"12345",
"startCount":"12345",
"endCount":"12345",
"packing":"包装",
"carriageType":"汽车",
"carriageNo":"12345",
"driverName":"时昌雪",
"driverTele":"18811776463",
"inType":"自主"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "123451",
"companyName": "京东",
"waybillNo": "12345611",
"carriageStatus": "在途",
"shipper": "123",
"deliveryTime": "20180410",
"receiver": "12345",
"receiverTime": "20180411",
"contractNo": "12345",
"variety": "12345",
"startCount": "12345",
"endCount": "12345",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "12345",
"driverName": "时昌雪",
"driverTele": "18811776463",
"inType": "自主",
"createdAt": "2018-04-15T07:16:51.930Z",
"updatedAt": "2018-04-15T07:16:51.930Z",
"id": "5ad2fc63ee507c3d0430c003"
}
}
```
<a id="carriage/carriageInfo/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除登记信息
##### 接口地址
```
POST /carriage/carriageInfo/delete
```
接口作用:
```
物流企业删除登记信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|编号|Y
##### input:
```
{
"id":"5acad0a4f11587d9033e35c3"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "123451",
"companyName": "京东",
"waybillNo": "12345611",
"carriageStatus": "在途",
"shipper": "123",
"deliveryTime": "20180410",
"receiver": "12345",
"receiverTime": "20180411",
"contractNo": "12345",
"variety": "12345",
"startCount": "12345",
"endCount": "12345",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "12345",
"driverName": "时昌雪",
"driverTele": "18811776463",
"inType": "自主",
"createdAt": "2018-04-15T07:16:51.930Z",
"updatedAt": "2018-04-15T07:16:51.930Z",
"id": "5ad2fc63ee507c3d0430c003"
}
]
}
```
<a id="carriage/carriageInfo/update"></a>
- [x] [回到顶部](#top)
#### 3.更新登记信息
##### 接口地址
```
POST /carriage/carriageInfo/update
```
接口作用:
```
物流企业更新登记信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|编号|Y
carriageType|string|运力类型|N
##### input:
```
{
"id":"5acad351ae906d3604cdd3bc",
"driverName":"Lucy"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "123451",
"companyName": "京东",
"waybillNo": "12345611",
"carriageStatus": "在途",
"shipper": "123",
"deliveryTime": "20180410",
"receiver": "12345",
"receiverTime": "20180411",
"contractNo": "12345",
"variety": "12345",
"startCount": "12345",
"endCount": "12345",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "12345",
"driverName": "Lucy",
"driverTele": "18811776463",
"inType": "自主",
"createdAt": "2018-04-15T07:16:51.930Z",
"updatedAt": "2018-04-15T07:16:51.930Z",
"id": "5ad2fc63ee507c3d0430c003"
}
]
}
```
<a id="carriage/carriageInfo/find"></a>
- [x] [回到顶部](#top)
#### 4.查找登记信息
##### 接口地址
```
GET /carriage/carriageInfo/find
```
接口作用:
```
物流企业查找登记信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"2"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "123451",
"companyName": "京东",
"waybillNo": "12345611",
"carriageStatus": "在途",
"shipper": "123",
"deliveryTime": "20180410",
"receiver": "12345",
"receiverTime": "20180411",
"contractNo": "12345",
"variety": "12345",
"startCount": "12345",
"endCount": "12345",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "12345",
"driverName": "Lucy",
"driverTele": "18811776463",
"inType": "自主",
"createdAt": "2018-04-15T07:16:51.930Z",
"updatedAt": "2018-04-15T07:16:51.930Z",
"id": "5ad2fc63ee507c3d0430c003"
}
]
}
```
### 司机管理
<a id="carriage/driver/add"></a>
- [x] [回到顶部](#top)
#### 1.添加司机
##### 接口地址
```
POST /carriage/driver/add
```
接口作用:
```
物流企业添加司机
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
driverName|enum|司机姓名(汽车,船舶)|Y
driverTele|string|司机电话|Y
##### input:
```
{
"socialcreditCode":"2",
"driverName":"Lucy",
"driverTele":"18811715625"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "2",
"driverName": "Lucy",
"driverTele": "18811715625",
"createdAt": "2018-04-09T02:32:04.724Z",
"updatedAt": "2018-04-09T02:32:04.724Z",
"id": "5acad0a4f11587d9033e35c3"
}
}
```
<a id="carriage/driver/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除司机
##### 接口地址
```
POST /carriage/driver/delete
```
接口作用:
```
物流企业删除司机
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|编号|Y
##### input:
```
{
"id":"5acad0a4f11587d9033e35c3"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "2",
"driverName": "时昌雪2",
"driverTele": "18811715625",
"createdAt": "2018-04-09T02:32:04.724Z",
"updatedAt": "2018-04-09T02:32:04.724Z",
"id": "5acad0a4f11587d9033e35c3"
}
]
}
```
<a id="carriage/driver/update"></a>
- [x] [回到顶部](#top)
#### 3.更新司机
##### 接口地址
```
POST /carriage/driver/update
```
接口作用:
```
物流企业更新司机
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|编号|Y
carriageType|string|运力类型|N
##### input:
```
{
"id":"5acad351ae906d3604cdd3bc",
"driverName":"Lucy"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "2",
"driverName": "Lucy",
"driverTele": "18811715625",
"createdAt": "2018-04-09T02:43:29.057Z",
"updatedAt": "2018-04-09T02:43:43.837Z",
"id": "5acad351ae906d3604cdd3bc"
}
]
}
```
<a id="carriage/driver/find"></a>
- [x] [回到顶部](#top)
#### 4.查找司机
##### 接口地址
```
GET /carriage/driver/find
```
接口作用:
```
物流企业查找运力
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"2"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "2",
"driverName": "时昌雪2",
"driverTele": "18811715625",
"createdAt": "2018-04-09T02:43:29.057Z",
"updatedAt": "2018-04-09T02:43:43.837Z",
"id": "5acad351ae906d3604cdd3bc"
}
]
}
```
### 四.库区管理
<a id="/reservoir/add"></a>
- [x] [回到顶部](#top)
#### 1.添加库区
##### 接口地址
```
POST /reservoir/add
```
##### 接口作用:
```
物流企业添加库区
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
reservoirNo|string|库区号|Y
reservoirName|string|库区名称|Y
QHDM|string|行政区域代码|Y
province|string|省份|Y
city|string|城市|Y
county|string|县|Y
town|string|镇|Y
postalCode|string|邮政编码|Y
##### input:
```
{
"socialcreditCode":"1",
"reservoirNo":"001",
"reservoirName":"库区2",
"QHDM":"1fsd3435n1",
"province":"北京",
"city":"北京",
"county":"北京",
"town":"北京",
"postalCode":"1000439"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区2",
"QHDM": "1fsd3435n1",
"province": "北京",
"city": "北京",
"county": "北京",
"town": "北京",
"postalCode": "1000439",
"createdAt": "2018-04-09T02:48:07.943Z",
"updatedAt": "2018-04-09T02:48:07.943Z",
"id": "5acad467ae906d3604cdd3bd"
}
}
```
<a id="/reservoir/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除库区
##### 接口地址
```
POST /reservoir/delete
```
##### 接口作用:
```
物流企业删除库区
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"5acad467ae906d3604cdd3bd"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区2",
"QHDM": "1fsd3435n1",
"province": "北京",
"city": "北京",
"county": "北京",
"town": "北京",
"postalCode": "1000439",
"createdAt": "2018-04-09T02:48:07.943Z",
"updatedAt": "2018-04-09T02:48:07.943Z",
"id": "5acad467ae906d3604cdd3bd"
}
]
}
```
<a id="/reservoir/update"></a>
- [x] [回到顶部](#top)
#### 3.更新库区
##### 接口地址
```
POST /reservoir/update
```
##### 接口作用:
```
物流企业删除库区
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|库区名称|N
##### input:
```
{
"id":"5acae874ae906d3604cdd3be",
"reservoirName": "库区1"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"QHDM": "1fsd3435n1",
"province": "北京",
"city": "北京",
"county": "北京",
"town": "北京",
"postalCode": "1000439",
"createdAt": "2018-04-09T04:13:40.120Z",
"updatedAt": "2018-04-09T04:14:46.713Z",
"id": "5acae874ae906d3604cdd3be"
}
]
}
```
<a id="/reservoir/find"></a>
- [x] [回到顶部](#top)
#### 4.查找库区
##### 接口地址
```
POST /reservoir/find
```
##### 接口作用:
```
物流企业删除库区
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"1"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"QHDM": "1fsd3435n1",
"province": "北京",
"city": "北京",
"county": "北京",
"town": "北京",
"postalCode": "1000439",
"createdAt": "2018-04-09T04:13:40.120Z",
"updatedAt": "2018-04-09T04:14:46.713Z",
"id": "5acae874ae906d3604cdd3be"
}
]
}
```
### 仓库管理
<a id="/stock/add"></a>
- [x] [回到顶部](#top)
#### 1.添加仓库
##### 接口地址
```
POST /stock/add
```
##### 接口作用:
```
物流企业添加仓库
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
reservoirNo|string|库区号|Y
reservoirName|string|库区名称|Y
QHDM|string|行政区域代码|Y
province|string|省份|Y
city|string|城市|Y
county|string|县|Y
town|string|镇|Y
postalCode|string|邮政编码|Y
##### input:
```
{
"socialcreditCode":"1",
"reservoirNo":"001",
"reservoirName":"库区1",
"stockNo":"001",
"stockName":"仓库1",
"stockType":"圆筒仓",
"capacity":"10000",
"vaildCapacity":"10000"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"stockType": "圆筒仓",
"capacity": "10000",
"vaildCapacity": "10000",
"createdAt": "2018-04-09T04:24:31.460Z",
"updatedAt": "2018-04-09T04:24:31.460Z",
"id": "<KEY>"
}
}
```
<a id="/stock/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除库区
##### 接口地址
```
POST /stock/delete
```
##### 接口作用:
```
物流企业删除库区
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"5acad467ae906d3604cdd3bd"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"stockType": "圆筒仓",
"capacity": "10000",
"vaildCapacity": "10000",
"createdAt": "2018-04-09T04:24:31.460Z",
"updatedAt": "2018-04-09T04:24:31.460Z",
"id": "<KEY>"
}
]
}
```
<a id="/stock/update"></a>
- [x] [回到顶部](#top)
#### 3.更新仓库
##### 接口地址
```
POST /stock/update
```
##### 接口作用:
```
物流企业删除仓库
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|仓库名称|N
##### input:
```
{
"id":"5acae874ae906d3604cdd3be",
"reservoirName": "库区1"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"stockType": "圆筒仓",
"capacity": "10000",
"vaildCapacity": "10000",
"createdAt": "2018-04-09T04:32:31.964Z",
"updatedAt": "2018-04-09T04:33:03.019Z",
"id": "5acaecdfae906d3604cdd3c0"
}
]
}
```
<a id="/stock/find"></a>
- [x] [回到顶部](#top)
#### 4.查找仓库
##### 接口地址
```
POST /stock/find
```
##### 接口作用:
```
物流企业删除库区
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"1"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"stockType": "圆筒仓",
"capacity": "10000",
"vaildCapacity": "10000",
"createdAt": "2018-04-09T04:32:31.964Z",
"updatedAt": "2018-04-09T04:33:03.019Z",
"id": "5acaecdfae906d3604cdd3c0"
}
]
}
```
### 廒间管理
<a id="/ao/add"></a>
- [x] [回到顶部](#top)
#### 1.添加廒间
##### 接口地址
```
POST /ao/add
```
##### 接口作用:
```
物流企业添加廒间
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
reservoirNo|string|库区号|Y
reservoirName|string|库区名称|Y
stockNo|string|仓库号|Y
stockName|string|仓库名称|Y
aoNo|string|廒间号|Y
aoName|string|廒间名称|Y
capacity|string|容量|Y
##### input:
```
{
"socialcreditCode":"1",
"reservoirNo":"001",
"reservoirName":"库区1",
"stockNo":"001",
"stockName":"仓库1",
"aoNo":"001",
"aoName":"廒间1",
"capacity":"200"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"aoNo": "001",
"aoName": "廒间1",
"capacity": "200",
"createdAt": "2018-04-10T11:50:37.898Z",
"updatedAt": "2018-04-10T11:50:37.898Z",
"id": "5acca50d0968d70e6cac7d78"
}
}
```
<a id="/ao/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除廒间
##### 接口地址
```
POST /ao/delete
```
##### 接口作用:
```
物流企业删除廒间
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"5acad467ae906d3604cdd3bd"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"aoNo": "001",
"aoName": "廒间1",
"capacity": "200",
"createdAt": "2018-04-10T12:57:56.475Z",
"updatedAt": "2018-04-10T12:57:56.475Z",
"id": "5accb4d4c8df950620674b21"
}
]
}
```
<a id="/ao/update"></a>
- [x] [回到顶部](#top)
#### 3.更新廒间
##### 接口地址
```
POST /stock/update
```
##### 接口作用:
```
物流企业更新廒间
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|仓库名称|N
##### input:
```
{
"id":"5acae874ae906d3604cdd3be",
"reservoirName": "库区1"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"stockType": "圆筒仓",
"capacity": "10000",
"vaildCapacity": "10000",
"createdAt": "2018-04-09T04:32:31.964Z",
"updatedAt": "2018-04-09T04:33:03.019Z",
"id": "5acaecdfae906d3604cdd3c0"
}
]
}
```
<a id="/ao/find"></a>
- [x] [回到顶部](#top)
#### 4.查找廒间
##### 接口地址
```
POST /stock/find
```
##### 接口作用:
```
物流企业查找廒间
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"1"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"aoNo": "001",
"aoName": "廒间1",
"capacity": "200",
"createdAt": "2018-04-10T13:01:22.009Z",
"updatedAt": "2018-04-10T13:01:22.009Z",
"id": "5accb5a2c8df950620674b22"
}
]
}
```
### 货位管理
<a id="/goods/add"></a>
- [x] [回到顶部](#top)
#### 1.添加货位
##### 接口地址
```
POST /goods/add
```
##### 接口作用:
```
物流企业添加货位
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
reservoirNo|string|库区号|Y
reservoirName|string|库区名称|Y
stockNo|string|仓库号|Y
stockName|string|仓库名称|Y
aoNo|string|廒间号|Y
aoName|string|廒间名称|Y
goodsNo|string|货位号|Y
goodsName|string|货位名称|Y
sealingMark|string|封仓时间|Y
goodsCode|string|货位编码|Y
capacity|integer|容量|Y
##### input:
```
{
"socialcreditCode":"1",
"reservoirNo":"001",
"reservoirName":"库区1",
"stockNo":"001",
"stockName":"仓库1",
"aoNo":"001",
"aoName":"廒间1",
"goodsNo":"001",
"goodsName":"货位1",
"sealingMark":"20180408",
"goodsCode":"001001001",
"capacity":"200"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"aoNo": "001",
"aoName": "廒间1",
"goodsNo": "001",
"goodsName": "货位1",
"sealingMark": "20180408",
"goodsCode": "001001001",
"capacity": "200",
"createdAt": "2018-04-10T13:10:24.429Z",
"updatedAt": "2018-04-10T13:10:24.429Z",
"id": "<KEY>"
}
}
```
<a id="/goods/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除货位
##### 接口地址
```
POST /goods/delete
```
##### 接口作用:
```
物流企业删除货位
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"<KEY>"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"aoNo": "001",
"aoName": "廒间1",
"goodsNo": "001",
"goodsName": "货位1",
"sealingMark": "20180408",
"goodsCode": "001001001",
"capacity": "200",
"createdAt": "2018-04-10T13:10:24.429Z",
"updatedAt": "2018-04-10T13:10:24.429Z",
"id": "5accb7c0e2957fe40f231b66"
}
]
}
```
<a id="/goods/update"></a>
- [x] [回到顶部](#top)
#### 3.更新货位
##### 接口地址
```
POST /goods/update
```
##### 接口作用:
```
物流企业更新货位
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|仓库名称|N
##### input:
```
{
"id":"5acae874ae906d3604cdd3be",
"reservoirName": "库区1"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "2",
"reservoirNo": "001",
"reservoirName": "库区1",
"stockNo": "001",
"stockName": "仓库1",
"aoNo": "001",
"aoName": "廒间1",
"goodsNo": "001",
"goodsName": "货位1",
"sealingMark": "20180408",
"goodsCode": "001001001",
"capacity": "200",
"createdAt": "2018-04-10T13:14:22.084Z",
"updatedAt": "2018-04-10T13:14:33.641Z",
"id": "5accb8aec8df950620674b24"
}
]
}
```
<a id="/goods/find"></a>
- [x] [回到顶部](#top)
#### 4.查找货位
##### 接口地址
```
POST /goods/find
```
##### 接口作用:
```
物流企业查找货位
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"1",
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": {
"goods": [
{
"socialcreditCode": "xiamenjiagongB1234",
"reservoirNo": "001",
"reservoirName": "库区001",
"stockNo": "P01",
"stockName": "平房仓P01",
"aoNo": "001",
"aoName": "廒间001",
"goodsNo": "01",
"goodsName": "货位01",
"sealingMark": "20180411",
"goodsCode": "001P0100101",
"capacity": "500",
"createdAt": "2018-04-12T12:59:31.400Z",
"updatedAt": "2018-04-12T12:59:31.400Z",
"id": "5acf58338dcb0e6feebef41d"
},
{
"socialcreditCode": "xiamenjiagongB1234",
"reservoirNo": "001",
"reservoirName": "库区001",
"stockNo": "P01",
"stockName": "平房仓P01",
"aoNo": "001",
"aoName": "廒间001",
"goodsNo": "02",
"goodsName": "货位02",
"sealingMark": "20180411",
"goodsCode": "001P0100102",
"capacity": "500",
"createdAt": "2018-04-12T12:59:46.308Z",
"updatedAt": "2018-04-12T12:59:46.308Z",
"id": "5acf58428dcb0e6feebef41e"
},
{
"socialcreditCode": "xiamenjiagongB1234",
"reservoirNo": "002",
"reservoirName": "库区002",
"stockNo": "L02",
"stockName": "楼房仓L02",
"aoNo": "002",
"aoName": "廒间002",
"goodsNo": "04",
"goodsName": "货位04",
"sealingMark": "20180411",
"goodsCode": "002L0200204",
"capacity": "500",
"createdAt": "2018-04-15T05:44:16.354Z",
"updatedAt": "2018-04-15T05:44:16.354Z",
"id": "5ad2e6b004ca76686242a177"
},
{
"socialcreditCode": "xiamenjiagongB1234",
"reservoirNo": "001",
"reservoirName": "库区001",
"stockNo": "P01",
"stockName": "平房仓P01",
"aoNo": "001",
"aoName": "廒间001",
"goodsNo": "03",
"goodsName": "货位03",
"sealingMark": "20180411",
"goodsCode": "001P0100103",
"capacity": "500",
"createdAt": "2018-04-15T05:44:54.043Z",
"updatedAt": "2018-04-15T05:44:54.043Z",
"id": "5ad2e6d604ca76686242a178"
},
{
"socialcreditCode": "xiamenjiagongB1234",
"reservoirNo": "001",
"reservoirName": "库区001",
"stockNo": "P01",
"stockName": "平房仓P01",
"aoNo": "001",
"aoName": "廒间001",
"goodsNo": "05",
"goodsName": "货位05",
"sealingMark": "20180411",
"goodsCode": "001P0100105",
"capacity": "500",
"createdAt": "2018-04-15T05:45:21.805Z",
"updatedAt": "2018-04-15T05:45:21.805Z",
"id": "5ad2e6f104ca76686242a179"
}
],
"goodsInfo": [
{
"socialcreditCode": "xiamenjiagongB1234",
"goodsCode": "001P0100101",
"variety": "小麦",
"grade": "二级",
"production": "哈尔滨",
"producingYear": "2018",
"packing": "包装",
"capacity": 100,
"createdAt": "2018-04-16T07:45:42.923Z",
"updatedAt": "2018-04-16T07:45:42.923Z",
"id": "5ad454a697c64dfc0be4d1d5"
}
]
}
}
```
### 货位粮食信息管理
<a id="/goodsInfo/add"></a>
- [x] [回到顶部](#top)
#### 1.添加货位
##### 接口地址
```
POST /goodsInfo/add
```
##### 接口作用:
```
物流企业添加货位
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
companyName|string|企业名称|Y
goodsCode|string|货位编码|Y
variety|string|品种|Y
grade|string|等级|Y
production|string|产地|Y
producingYear|string|生产年份|Y
packing|string|包装方式|Y
capacity|integer|容量|Y
QC|boolean|是否已有质检信息(默认:false)|Y
##### input:
```
{
"socialcreditCode":"12345671",
"companyName":"baidu",
"goodsCode":"001001001001",
"variety":"小麦",
"grade":"二级",
"production":"哈尔滨",
"producingYear":"2018",
"packing":"包装",
"capacity":100
}
```
##### output:(若当前货位有粮食就更新,没有就新建)
```
{
"code": 1,
"message": "更新货位信息成功",
"data": {
"preGoodsInfo": [
{
"socialcreditCode": "12345671",
"companyName": "baidu",
"goodsCode": "001001001001",
"variety": "小麦",
"grade": "二级",
"production": "哈尔滨",
"producingYear": "2018",
"packing": "包装",
"capacity": 500,
"createdAt": "2018-04-16T03:40:31.413Z",
"updatedAt": "2018-04-16T03:54:44.835Z",
"id": "5ad41b2f85d4970c0774ae2d"
}
],
"currentGoodsInfo": [
{
"socialcreditCode": "12345671",
"companyName": "baidu",
"goodsCode": "001001001001",
"variety": "小麦",
"grade": "二级",
"production": "哈尔滨",
"producingYear": "2018",
"packing": "包装",
"capacity": 600,
"createdAt": "2018-04-16T03:40:31.413Z",
"updatedAt": "2018-04-16T04:00:07.030Z",
"id": "5ad41b2f85d4970c0774ae2d"
}
]
}
}
```
<a id="/goodsInfo/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除货位粮食信息
##### 接口地址
```
POST /goodsInfo/delete
```
##### 接口作用:
```
物流企业删除货位粮食信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"5accb7c0e2957fe40f231b66"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "12345671",
"companyName": "baidu",
"goodsCode": "001001001001",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2019",
"packing": "包装",
"capacity": 100,
"createdAt": "2018-04-16T03:40:31.413Z",
"updatedAt": "2018-04-16T04:21:44.476Z",
"id": "<KEY>"
}
]
}
```
<a id="/goodsInfo/update"></a>
- [x] [回到顶部](#top)
#### 3.更新货位粮食信息
##### 接口地址
```
POST /goodsInfo/update
```
##### 接口作用:
```
物流企业更新货位粮食信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|仓库名称|N
##### input:
```
{
"id":"<KEY>",
"socialcreditCode":"12345671",
"capacity":100
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "12345671",
"companyName": "baidu",
"goodsCode": "001001001001",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2019",
"packing": "包装",
"capacity": 100,
"createdAt": "2018-04-16T03:40:31.413Z",
"updatedAt": "2018-04-16T04:21:44.476Z",
"id": "5ad41b2f85d4970c0774ae2d"
}
]
}
```
<a id="/goodsInfo/find"></a>
- [x] [回到顶部](#top)
#### 4.查找货位粮食信息
##### 接口地址
```
POST /goodsInfo/find
```
##### 接口作用:
```
物流企业查找货位粮食信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
goodsCode|string|货位编码|Y
##### input:
```
{
"socialcreditCode":"12345671",
"goodsCode":"001001001001"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "12345671",
"companyName": "baidu",
"goodsCode": "001001001001",
"variety": "小麦",
"grade": "二级",
"production": "哈尔滨",
"producingYear": "2018",
"packing": "包装",
"capacity": 600,
"createdAt": "2018-04-16T03:40:31.413Z",
"updatedAt": "2018-04-16T04:00:07.030Z",
"id": "5ad41b2f85d4970c0774ae2d"
}
]
}
```
### 五.企业客户管理
<a id="/client/add"></a>
- [x] [回到顶部](#top)
#### 1.添加客户
##### 接口地址
```
POST /client/add
```
##### 接口作用:
```
企业添加客户
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
companysocialcreditCode|string|客户的社会信用代码|Y
companyName|string|客户名称|Y
province|string|省|Y
city|string|城市|Y
address|string|企业地址|Y
lealPerson|string|法人代表|Y
legalTele|string|法人代表联系电话|Y
companyLinkman|string|企业联系人|Y
companyTele|string|联系电话|Y
email|string|邮箱|Y
fax|string|传真|Y
##### input:
```
{
"socialcreditCode":"1",
"companysocialcreditCode":"百度2",
"companyName":"百度2",
"province":"北京",
"city":"北京",
"address":"北京朝阳区",
"lealPerson":"lucy",
"legalTele":"18811715625",
"companyLinkman":"jeo",
"companyTele":"188116653748",
"email":"<EMAIL>",
"fax":"123-47899"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"companysocialcreditCode": "百度2",
"companyName": "百度2",
"province": "北京",
"city": "北京",
"address": "北京朝阳区",
"lealPerson": "lucy",
"legalTele": "18811715625",
"companyLinkman": "jeo",
"companyTele": "188116653748",
"email": "<EMAIL>",
"fax": "123-47899",
"createdAt": "2018-04-11T04:51:56.404Z",
"updatedAt": "2018-04-11T04:51:56.404Z",
"id": "<KEY>"
}
}
```
<a id="/client/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除客户
##### 接口地址
```
POST /client/delete
```
##### 接口作用:
```
企业删除客户
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"<KEY>"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"companysocialcreditCode": "百度2",
"companyName": "百度2",
"province": "北京",
"city": "北京",
"address": "北京朝阳区",
"lealPerson": "lucy",
"legalTele": "18811715625",
"companyLinkman": "jeo",
"companyTele": "188116653748",
"email": "<EMAIL>",
"fax": "123-47899",
"createdAt": "2018-04-11T04:51:56.404Z",
"updatedAt": "2018-04-11T04:51:56.404Z",
"id": "<KEY>"
}
]
}
```
<a id="/client/update"></a>
- [x] [回到顶部](#top)
#### 3.更新客户
##### 接口地址
```
POST /client/update
```
##### 接口作用:
```
企业更新客户
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|仓库名称|N
##### input:
```
{
"id":"5acae874ae906d3604cdd3be",
"companysocialcreditCode": "百度"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"companysocialcreditCode": "百度",
"companyName": "百度2",
"province": "北京",
"city": "北京",
"address": "北京朝阳区",
"lealPerson": "lucy",
"legalTele": "18811715625",
"companyLinkman": "jeo",
"companyTele": "188116653748",
"email": "<EMAIL>",
"fax": "123-47899",
"createdAt": "2018-04-11T05:00:49.284Z",
"updatedAt": "2018-04-11T05:01:09.269Z",
"id": "5acd9681c8df950620674b2a"
}
]
}
```
<a id="/client/find"></a>
- [x] [回到顶部](#top)
#### 4.查找客户
##### 接口地址
```
POST /client/find
```
##### 接口作用:
```
企业查找客户
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"1"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"companysocialcreditCode": "百度2",
"companyName": "百度2",
"province": "北京",
"city": "北京",
"address": "北京朝阳区",
"lealPerson": "lucy",
"legalTele": "18811715625",
"companyLinkman": "joe",
"companyTele": "188116653748",
"email": "<EMAIL>",
"fax": "123-47899",
"createdAt": "2018-04-11T04:51:56.404Z",
"updatedAt": "2018-04-11T04:51:56.404Z",
"id": "5acd946cc8df950620674b29"
}
]
}
```
### 六.企业入库管理
<a id="/stockIn/add"></a>
- [x] [回到顶部](#top)
#### 1.添加入库信息
##### 接口地址
```
POST /stockIn/add
```
##### 接口作用:
```
企业添加入库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
stockinidentificationCode|string|入库识别码|Y
stockinNo|string|入库流水号|Y
variety|string|种类|Y
grade|string|等级|Y
production|string|产地|Y
producingYear|string|生产年份|Y
stockinCount|integer|入库数量|Y
packing|string|包装方式(枚举:包装,散装)|Y
waybillNo|string|运单号|Y
carriage|string|承运方的社会信用代码|Y
carriageName|string|承运方的名称|Y
carriageType|string|运输类型(枚举:汽车,船舶)|Y
carriageNo|string|车号/船号|Y
driverName|string|司机姓名|Y
driverTele|string|司机电话|Y
stockinTime|string|入库时间|Y
upstreamorganizationCode|string|上游企业社会信用代码|Y
upstreamcompanyName|string|上游企业名称|Y
contractNo|string|合同号|Y
goodsCode|string|货位编码|Y
IPFS|string|上传至ipfs的hash|Y
inType|string|办理入库类型(枚举:待办,自主)|Y
##### input:
```
{
"socialcreditCode":"1234567",
"companyName": "百度",
"stockinidentificationCode":"123400100100100120180410",
"stockinNo":"shichangxue",
"variety":"小麦",
"grade":"二级",
"production":"黑龙江",
"producingYear":"2018",
"stockinCount":"10000",
"packing":"包装",
"carriageType":"汽车",
"carriageNo":"34hhjk8",
"stockinTime":"20180410",
"upstreamorganizationCode":"123456",
"upstreamcompanyName":"百度",
"contractNo":"dsahkflj43794",
"goodsCode":"001001001001",
"IPFS":"Qmdsfhuoefonfsdnncnsjf",
"inType": "待办"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1234567",
"companyName": "百度",
"stockinidentificationCode": "123400100100100120180410",
"stockinNo": "shichangxue",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2018",
"stockinCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockinTime": "20180410",
"upstreamorganizationCode": "123456",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"inType": "待办",
"createdAt": "2018-04-15T07:03:53.629Z",
"updatedAt": "2018-04-15T07:03:53.629Z",
"id": "<KEY>"
}
}
```
<a id="/stockIn/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除入库信息
##### 接口地址
```
POST /stockIn/delete
```
##### 接口作用:
```
企业删除入库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"<KEY>"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1234567",
"stockinidentificationCode": "123400100100100120180410",
"stockinNo": "shichangxue",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2018",
"stockinCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockinTime": "20180410",
"upstreamorganizationCode": "123456",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"IPFS": "Qmdsfhuoefonfsdnncnsjf",
"createdAt": "2018-04-11T05:33:23.832Z",
"updatedAt": "2018-04-11T05:33:23.832Z",
"id": "5acd9e23c8df950620674b2e"
}
]
}
```
<a id="/stockIn/update"></a>
- [x] [回到顶部](#top)
#### 3.更新入库信息
##### 接口地址
```
POST /stockIn/update
```
##### 接口作用:
```
企业更新入库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|仓库名称|N
##### input:
```
{
"id":"5acae874ae906d3604cdd3be",
"stockinNo": "123"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1234567",
"stockinidentificationCode": "123400100100100120180410",
"stockinNo": "123",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2018",
"stockinCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockinTime": "20180410",
"upstreamorganizationCode": "123456",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"IPFS": "Qmdsfhuoefonfsdnncnsjf",
"createdAt": "2018-04-11T05:33:23.832Z",
"updatedAt": "2018-04-11T05:33:23.832Z",
"id": "5acd9e23c8df950620674b2e"
}
]
}
```
<a id="/stockIn/find"></a>
- [x] [回到顶部](#top)
#### 4.查找入库信息
##### 接口地址
```
POST /stockIn/find
```
##### 接口作用:
```
企业查找入库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
inType|string|入库类型(待办,自主)|Y
##### input:
```
{
"socialcreditCode":"1234567"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1234567",
"stockinidentificationCode": "123400100100100120180410",
"stockinNo": "shichangxue",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2018",
"stockinCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockinTime": "20180410",
"upstreamorganizationCode": "123456",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"IPFS": "Qmdsfhuoefonfsdnncnsjf",
"createdAt": "2018-04-11T05:33:23.832Z",
"updatedAt": "2018-04-11T05:33:23.832Z",
"id": "5acd9e23c8df950620674b2e"
}
]
}
```
### 企业出库管理
<a id="/stockOut/add"></a>
- [x] [回到顶部](#top)
#### 1.添加出库信息
##### 接口地址
```
POST /stockOut/add
```
##### 接口作用:
```
企业添加出库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
stockoutNo|string|入库识别码|Y
variety|string|种类|Y
grade|string|等级|Y
production|string|产地|Y
producingYear|string|生产年份|Y
stockoutCount|string|入库数量|Y
packing|string|包装方式(枚举:包装,散装)|Y
carriageType|string|运输类型(枚举:汽车,船舶)|Y
carriageNo|string|车号/船号|Y
driverName|string|司机姓名|Y
driverTele|string|司机电话|Y
stockoutTime|string|入库时间|Y
carriage|string|运输企业社会信用代码|Y
carriageName|string|运输企业名称|Y
waybillNo|string|运单号|Y
downstreamorganizationCode|string|下游企业社会信用代码|Y
downstreamcompanyName|string|下游企业名称|Y
contractNo|string|合同号|Y
goodsCode|string|货位编码|Y
IPFS|string|上传至ipfs的hash|Y
##### input:
```
{
"socialcreditCode":"1234567",
"stockoutNo":"1234001001001001220180410",
"variety":"小麦",
"grade":"二级",
"production":"黑龙江",
"producingYear":"2018",
"stockoutCount":"10000",
"packing":"包装",
"carriageType":"汽车",
"carriageNo":"34hhjk8",
"stockoutTime":"20180410",
"carriage":"123456",
"carriageName":"dsahkflj43794",
"waybillNo":"123",
"downstreamorganizationCode":"001001001001",
"downstreamcompanyName":"Qmdsfhuoefonfsdnncnsjf",
"contractNo":"dsahkflj43794",
"goodsCode":"001001001001",
"IPFS":"dshkljauefsdg"
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1234567",
"stockoutNo": "1234001001001001220180410",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2018",
"stockoutCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockoutTime": "20180410",
"carriage": "123456",
"carriageName": "dsahkflj43794",
"waybillNo": "123",
"downstreamorganizationCode": "001001001001",
"downstreamcompanyName": "Qmdsfhuoefonfsdnncnsjf",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"IPFS":"dshkljauefsdg",
"createdAt": "2018-04-11T05:59:00.616Z",
"updatedAt": "2018-04-11T05:59:00.616Z",
"id": "5acda424c8df950620674b2f"
}
}
```
<a id="/stockOut/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除出库信息
##### 接口地址
```
POST /stockOut/delete
```
##### 接口作用:
```
企业删除出库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
##### input:
```
{
"id":"<KEY>"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1234567",
"stockoutNo": "1234001001001001220180410",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2018",
"stockoutCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockoutTime": "20180410",
"carriage": "123456",
"carriageName": "dsahkflj43794",
"waybillNo": "123",
"downstreamorganizationCode": "001001001001",
"downstreamcompanyName": "Qmdsfhuoefonfsdnncnsjf",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"IPFS":"dshkljauefsdg",
"createdAt": "2018-04-11T05:59:00.616Z",
"updatedAt": "2018-04-11T05:59:00.616Z",
"id": "<KEY>"
}
]
}
```
<a id="/stockOut/update"></a>
- [x] [回到顶部](#top)
#### 3.更新出库信息
##### 接口地址
```
POST /stockOut/update
```
##### 接口作用:
```
企业更新出库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|删除编号|Y
reservoirName|string|仓库名称|N
##### input:
```
{
"id":"5acae874ae906d3604cdd3be",
"stockoutNo": "123"
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1234567",
"stockoutNo": "1234001001001001220180410",
"variety": "小麦",
"grade": "一级",
"production": "黑龙江",
"producingYear": "2018",
"stockoutCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockoutTime": "20180410",
"carriage": "123456",
"carriageName": "dsahkflj43794",
"waybillNo": "123",
"downstreamorganizationCode": "001001001001",
"downstreamcompanyName": "Qmdsfhuoefonfsdnncnsjf",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"createdAt": "2018-04-11T05:59:00.616Z",
"updatedAt": "2018-04-11T06:12:47.615Z",
"id": "5acda424c8df950620674b2f"
}
]
}
```
<a id="/stockOut/find"></a>
- [x] [回到顶部](#top)
#### 4.查找出库信息
##### 接口地址
```
POST /stockOut/find
```
##### 接口作用:
```
企业查找出库信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"1234567"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1234567",
"stockoutNo": "1234001001001001220180410",
"variety": "小麦",
"grade": "二级",
"production": "黑龙江",
"producingYear": "2018",
"stockoutCount": "10000",
"packing": "包装",
"carriageType": "汽车",
"carriageNo": "34hhjk8",
"stockoutTime": "20180410",
"carriage": "123456",
"carriageName": "<PASSWORD>43794",
"waybillNo": "123",
"downstreamorganizationCode": "001001001001",
"downstreamcompanyName": "Qmdsfhuoefonfsdnncnsjf",
"contractNo": "dsahkflj43794",
"goodsCode": "001001001001",
"createdAt": "2018-04-11T05:59:00.616Z",
"updatedAt": "2018-04-11T05:59:00.616Z",
"id": "5acda424c8df950620674b2f"
}
]
}
```
### 七.质检信息管理
<a id="/QCInfo/add"></a>
- [x] [回到顶部](#top)
#### 1.添加质检信息
##### 接口地址
```
POST /QCInfo/add
```
##### 接口作用:
```
质检企业添加货位质检信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|质检企业社会信用代码|Y
companyName|string|质检企业名称|Y
checksocialcreditCode|string|被抽检企业社会信用代码|Y
checkcompanyName|string|被抽检企业名称|Y
checkgoodsCode|string|被抽检货位编码|Y
reportNumber|string|报告编号|Y
taskSource|string|任务来源|Y
sampleName|string|样品名称|Y
sampleNo|string|样品编号|Y
taskType|string|任务类型|Y
client|json|委托单位|Y
samplingInfo|json|抽样情况|Y
sampleInfo|string|样品概况|Y
sampleLog|string|抽样日志|Y
reportDate|string|报告日期|Y
testStandard|string|检验依据|Y
testResult|string|检验结果|Y
testConclusion|string|检验结论|Y
instruction|string|说明|Y
qualityIndex|json|质量指标|Y
characterIndex|json|品质指标|Y
healthIndex|json|卫生指标|Y
isComplete|boolean|是否保存(true:之后不可编辑false(默认):不修改此字段,质检信息可编辑)|Y
##### input:
```
{
"socialcreditCode":"1",
"companyName":"1",
"checksocialcreditCode":"1",
"checkcompanyName":"1",
"checkgoodsCode":"1",
"reportNumber":"1",
"taskSource":"1",
"sampleName":"1",
"sampleNo":"1",
"taskType":"1",
"client":{},
"samplingInfo":{"sampleMethod":"简单抽样","sampleDate":"2018-04-15"},
"sampleInfo":{},
"sampleLog":"1",
"reportDate":"1",
"testStandard":"1",
"testResult":"1",
"testConclusion":"1",
"instruction":"1",
"qualityIndex":{},
"characterIndex":{},
"healthIndex":{},
"isComplete":false
}
```
##### output:
```
{
"code": 1,
"message": "创建成功",
"data": {
"socialcreditCode": "1",
"companyName": "1",
"checksocialcreditCode": "1",
"checkcompanyName": "1",
"checkgoodsCode": "2",
"reportNumber": "1",
"taskSource": "1",
"sampleName": "1",
"sampleNo": "1",
"taskType": "1",
"client": {},
"samplingInfo": {
"sampleMethod": "简单抽样",
"sampleDate": "2018-04-15"
},
"sampleInfo": {},
"sampleLog": "1",
"reportDate": "1",
"testStandard": "1",
"testResult": "1",
"testConclusion": "1",
"instruction": "1",
"qualityIndex": {},
"characterIndex": {},
"healthIndex": {},
"isComplete": false,
"createdAt": "2018-04-15T12:25:55.530Z",
"updatedAt": "2018-04-15T12:25:55.530Z",
"id": "5<PASSWORD>d3ba2ff<PASSWORD>b<PASSWORD>"
}
}
```
<a id="/QCInfo/delete"></a>
- [x] [回到顶部](#top)
#### 2.删除质检信息
##### 接口地址
```
POST /QCInfo/delete
```
##### 接口作用:
```
质检企业删除货位质检信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|id|Y
##### input:
```
{
"id":"<KEY>"
}
```
##### output:
```
{
"code": 1,
"message": "删除成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "1",
"checksocialcreditCode": "1",
"checkcompanyName": "1",
"checkgoodsCode": "1",
"reportNumber": "1",
"taskSource": "1",
"sampleName": "1",
"sampleNo": "1",
"taskType": "1",
"client": {},
"samplingInfo": {},
"sampleInfo": {},
"sampleLog": "1",
"reportDate": "1",
"testStandard": "1",
"testResult": "1",
"testConclusion": "1",
"instruction": "1",
"qualityIndex": {},
"characterIndex": {},
"healthIndex": {},
"isComplete": false,
"createdAt": "2018-04-15T12:02:29.699Z",
"updatedAt": "2018-04-15T12:02:29.699Z",
"id": "<KEY>"
}
]
}
```
<a id="/QCInfo/update"></a>
- [x] [回到顶部](#top)
#### 3.更新质检信息
##### 接口地址
```
POST /QCInfo/update
```
##### 接口作用:
```
质检企业更新货位质检信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
id|string|id|Y
##### input:
```
{
"id":"<KEY>",
"":""(更新的字段)
}
```
##### output:
```
{
"code": 1,
"message": "更新成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "1",
"checksocialcreditCode": "1",
"checkcompanyName": "1",
"checkgoodsCode": "1",
"reportNumber": "1",
"taskSource": "1",
"sampleName": "1",
"sampleNo": "1",
"taskType": "1",
"client": {},
"samplingInfo": {},
"sampleInfo": {},
"sampleLog": "1",
"reportDate": "1",
"testStandard": "1",
"testResult": "1",
"testConclusion": "1",
"instruction": "1",
"qualityIndex": {},
"characterIndex": {},
"healthIndex": {},
"isComplete": false,
"createdAt": "2018-04-15T12:02:29.699Z",
"updatedAt": "2018-04-15T12:02:29.699Z",
"id": "<KEY>"
}
]
}
```
<a id="/QCInfo/find"></a>
- [x] [回到顶部](#top)
#### 4.查找质检信息
##### 接口地址
```
POST /QCInfo/find
```
##### 接口作用:
```
质检企业查找货位质检信息
```
接口参数
参数名 | 类型 |参数解释 | 是否必选(Y必选N可选)
---|---|---|---
socialcreditCode|string|社会信用代码|Y
##### input:
```
{
"socialcreditCode":"<KEY>351819"
}
```
##### output:
```
{
"code": 1,
"message": "查找成功",
"data": [
{
"socialcreditCode": "1",
"companyName": "1",
"checksocialcreditCode": "1",
"checkcompanyName": "1",
"checkgoodsCode": "1",
"reportNumber": "1",
"taskSource": "1",
"sampleName": "1",
"sampleNo": "1",
"taskType": "1",
"client": {},
"samplingInfo": {},
"sampleInfo": {},
"sampleLog": "1",
"reportDate": "1",
"testStandard": "1",
"testResult": "1",
"testConclusion": "1",
"instruction": "1",
"qualityIndex": {},
"characterIndex": {},
"healthIndex": {},
"isComplete": false,
"createdAt": "2018-04-15T12:02:29.699Z",
"updatedAt": "2018-04-15T12:02:29.699Z",
"id": "5ad33f55bdddc0230b351819"
}
]
}
```
### 八.监管企业
<a id="/supervise/find"></a>
- [x] [回到顶部](#top)
#### 查询所有企业
##### 接口地址:
```
POST /supervise/find
```
接口作用:
```
查询所有企业注册的信息
```
##### output:
```
{
"code": 1,
"message": "注册企业信息",
"data": {
"carriage": [
{
"socialcreditCode": "fuzhouwuliua123456",
"companyName": "福州物流企业A",
"organizationCode": "liua12345",
"password": "<PASSWORD>",
"companyType": "运输企业",
"companyLinkman": "王二",
"companyTele": "18811391890",
"address": "福建省福州市",
"lealPerson": "张三",
"legalID": "531819198706171829",
"legalTele": "13267890987",
"status": "待审核",
"createdAt": "2018-04-11T06:18:04.033Z",
"updatedAt": "2018-04-11T06:18:04.033Z",
"id": "<KEY>"
},
{
"socialcreditCode": "sanmingwuliuA12345",
"companyName": "三明物流企业A",
"organizationCode": "uliuA1234",
"password": "<PASSWORD>",
"companyType": "运输企业",
"companyLinkman": "李四",
"companyTele": "18811391890",
"address": "福建省三明市",
"lealPerson": "张三",
"legalID": "531819198706171829",
"legalTele": "13516789098",
"status": "待审核",
"createdAt": "2018-04-11T07:19:22.276Z",
"updatedAt": "2018-04-11T07:19:22.276Z",
"id": "5acdb6<KEY>3c"
},
{
"socialcreditCode": "fuzhouwuliuB123456",
"companyName": "福州物流企业B",
"organizationCode": "liuB12345",
"password": "<PASSWORD>",
"companyType": "运输企业",
"companyLinkman": "李四",
"companyTele": "18811391890",
"address": "福建省福州市",
"lealPerson": "张三",
"legalID": "531819198706171829",
"legalTele": "13516789098",
"status": "待审核",
"createdAt": "2018-04-11T07:55:07.103Z",
"updatedAt": "2018-04-11T07:55:07.103Z",
"id": "5acdbf5bbe54251c4cd02c8f"
},
{
"socialcreditCode": "xiamenwuliuA123456",
"companyName": "厦门物流企业A",
"organizationCode": "liuA12345",
"password": "<PASSWORD>",
"companyType": "运输企业",
"companyLinkman": "李四",
"companyTele": "18811391890",
"address": "福建省厦门市",
"lealPerson": "张三",
"legalID": "531819198706171829",
"legalTele": "13516789098",
"status": "待审核",
"createdAt": "2018-04-11T10:42:43.136Z",
"updatedAt": "2018-04-11T10:42:43.136Z",
"id": "5acde6a3be54251c4cd02c98"
},
{
"socialcreditCode": "xiamenyunshuB12345",
"companyName": "厦门运输企业B",
"organizationCode": "nshuB1234",
"password": "<PASSWORD>",
"companyType": "运输企业",
"companyLinkman": "李敬",
"companyTele": "15910765432",
"address": "福建省厦门市",
"lealPerson": "王磊",
"legalID": "531819198306171829",
"legalTele": "13817098765",
"status": "待审核",
"createdAt": "2018-04-12T12:48:14.802Z",
"updatedAt": "2018-04-12T12:48:14.802Z",
"id": "5acf558e02d3fd6a6f7afd5f"
},
{
"socialcreditCode": "quanzhouyunshuC123",
"companyName": "泉州运输企业A",
"organizationCode": "yunshuC12",
"password": "<PASSWORD>",
"companyType": "运输企业",
"companyLinkman": "李军",
"companyTele": "18617895431",
"address": "福建省泉州市",
"lealPerson": "冯清远",
"legalID": "531819198706171829",
"legalTele": "13910651342",
"status": "待审核",
"createdAt": "2018-04-16T11:55:35.906Z",
"updatedAt": "2018-04-16T11:55:35.906Z",
"id": "5ad48f371539e13aaed43453"
}
],
"processor": [
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-11T05:22:41.561Z",
"updatedAt": "2018-04-11T05:22:41.561Z",
"id": "5acd9ba1c8df950620674b2c"
},
{
"code": 1,
"message": "创建成功!",
"data": {
"socialcreditCode": "1",
"companyName": "美团",
"organizationCode": "美团",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "望京",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "美团",
"status": "待审核",
"createdAt": "2018-04-11T05:22:41.561Z",
"updatedAt": "2018-04-11T05:22:41.561Z",
"id": "5acd9ba12345650620674b2c"
},
"createdAt": "2018-04-11T05:24:32.867Z",
"updatedAt": "2018-04-11T05:24:32.867Z",
"id": "5acd9c10c8df950620674b2d"
},
{
"socialcreditCode": "sanmingjiagongA123",
"companyName": "福州加工企业A",
"organizationCode": "iagongA12",
"password": "123456",
"companyType": "小麦加工企业",
"companyLinkman": "张华",
"companyTele": "13567893456",
"address": "福建省三明市",
"lealPerson": "李静",
"legalID": "531819198706171829",
"legalTele": "18818906543",
"status": "待审核",
"createdAt": "2018-04-12T12:17:13.138Z",
"updatedAt": "2018-04-12T12:17:13.138Z",
"id": "5acf4e4902d3fd6a6f7afd54"
},
{
"socialcreditCode": "xiamenjiagongB1234",
"companyName": "厦门加工企业B",
"organizationCode": "agongB123",
"password": "<PASSWORD>",
"companyType": "小麦加工企业",
"companyLinkman": "李小花",
"companyTele": "15678900987",
"address": "福建省厦门市",
"lealPerson": "刘明",
"legalID": "531819198707231829",
"legalTele": "13267890987",
"status": "待审核",
"createdAt": "2018-04-12T12:46:41.037Z",
"updatedAt": "2018-04-12T12:46:41.037Z",
"id": "5acf553102d3fd6a6f7afd5e"
},
{
"socialcreditCode": "quanzhoujiagongB12",
"companyName": "泉州加工企业B",
"organizationCode": "jiagongB1",
"password": "<PASSWORD>",
"companyType": "大米加工企业",
"companyLinkman": "张云龙",
"companyTele": "13516741321",
"address": "福建省泉州市",
"lealPerson": "杨乐",
"legalID": "531819198706171829",
"legalTele": "18817895643",
"status": "待审核",
"createdAt": "2018-04-16T11:53:57.720Z",
"updatedAt": "2018-04-16T11:53:57.720Z",
"id": "5<PASSWORD>"
}
],
"storages": [
{
"socialcreditCode": "12",
"companyName": "百度2",
"organizationCode": "百度2",
"password": "123",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-10T12:47:25.727Z",
"updatedAt": "2018-04-10T12:47:25.727Z",
"id": "5accb25dc8df950620674b20"
},
{
"socialcreditCode": "1",
"companyName": "百度1",
"organizationCode": "百度1",
"password": "<PASSWORD>",
"companyType": "companyType",
"companyLinkman": "123343",
"companyTele": "organizationCode",
"address": "百度3",
"lealPerson": "123343",
"legalID": "organizationCode",
"legalTele": "百度3",
"status": "待审核",
"createdAt": "2018-04-11T05:20:18.759Z",
"updatedAt": "2018-04-11T05:20:18.759Z",
"id": "5acd9b12c8df950620<PASSWORD>b"
},
{
"socialcreditCode": "fuzhoucangchuA1234",
"companyName": "福州仓储企业A",
"organizationCode": "ngchuA123",
"password": "<PASSWORD>",
"companyType": "仓储企业",
"companyLinkman": "李四",
"companyTele": "18811391890",
"address": "福建省福州市",
"lealPerson": "张三",
"legalID": "531819198706171829",
"legalTele": "13516789098",
"status": "待审核",
"createdAt": "2018-04-11T07:46:32.347Z",
"updatedAt": "2018-04-11T07:46:32.347Z",
"id": "5<KEY>"
},
{
"socialcreditCode": "fuzhoucangchuB1234",
"companyName": "福州仓储企业B",
"organizationCode": "ngchuB123",
"password": "<PASSWORD>",
"companyType": "仓储企业",
"companyLinkman": "李四",
"companyTele": "18811391890",
"address": "福建省福州市",
"lealPerson": "张三",
"legalID": "531819198706171829",
"legalTele": "13516789098",
"status": "待审核",
"createdAt": "2018-04-11T07:53:48.650Z",
"updatedAt": "2018-04-11T07:53:48.650Z",
"id": "5acdbf0cbe54251c4cd02c8e"
},
{
"socialcreditCode": "xiamencangchuB4321",
"companyName": "厦门仓储企业B",
"organizationCode": "ngchuB432",
"password": "<PASSWORD>",
"companyType": "仓储企业",
"companyLinkman": "李先云",
"companyTele": "18811391890",
"address": "福建省厦门市",
"lealPerson": "张文文",
"legalID": "532189198505161890",
"legalTele": "18818906543",
"status": "待审核",
"createdAt": "2018-04-12T12:44:50.938Z",
"updatedAt": "2018-04-12T12:44:50.938Z",
"id": "5acf54c202d3fd6a6f7afd5d"
},
{
"socialcreditCode": "quanzhoucangchuA12",
"companyName": "泉州仓储企业A",
"organizationCode": "cangchuA1",
"password": "<PASSWORD>",
"companyType": "仓储企业",
"companyLinkman": "王勇",
"companyTele": "13267891456",
"address": "福建省泉州市",
"lealPerson": "李贤禹",
"legalID": "531819198706171829",
"legalTele": "15617891455",
"status": "待审核",
"createdAt": "2018-04-16T11:51:39.066Z",
"updatedAt": "2018-04-16T11:51:39.066Z",
"id": "5<PASSWORD>"
}
]
}
}
```<file_sep>/**
* StockIn.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/documentation/concepts/models-and-orm/models
*/
module.exports = {
tableName: 'stockIn',
attributes: {
socialcreditCode: {
type: 'string'
},
companyName: {
type: 'string'
},
//入库识别码(组织机构代码,货位识别码,封藏时间)
stockinidentificationCode: {
type: 'string'
},
//入库单号(流水号)唯一
stockinNo: {
type: 'string'
},
variety: {
type: 'string'
},
//等级
grade: {
type: 'string'
},
production: {
type: 'string'
},
producingYear: {
type: 'string'
},
//入库数量
stockinCount: {
type: 'integer'
},
packing: {
type: 'string',
enum: ['包装', '散装']
},
//运单号
waybillNo: {
type: 'string'
},
//承运方的社会信用代码
carriage: {
type: 'string'
},
//承运方的名称
carriageName: {
type: 'string'
},
//运力类型
carriageType: {
type: 'string'
},
//车号/船号
carriageNo: {
type: 'string'
},
driverName: {
type: 'string'
},
driverTele: {
type: 'string'
},
stockinTime: {
type: 'string'
},
//上游企业社会信用代码
upstreamorganizationCode: {
type: 'string'
},
//上游企业名称
upstreamcompanyName: {
type: 'string'
},
contractNo: {
type: 'string'
},
//货位编码
goodsCode: {
type: 'string'
},
IPFS: {
type: 'string'
},
//企业推送或者自主录入
inType: {
type: 'string',
enum: ['待办', '自主']
},
//是否质检
QC: {
type: 'boolean'
},
}
};
<file_sep>/**
* SuperviseController
*
* @description :: Server-side logic for managing supervises
* @help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
find: (req, res) => {
async.waterfall([(callback) => {
//物流
Carriage.find({}).exec((err, result) => {
if (err) return callback(err);
callback(null, result);
});
}, (carriage, callback) => {
Processor.find({}).exec((err, result) => {
if (err) return callback(err);
callback(null, carriage, result);
});
}, (carriage, processor, callback) => {
Storages.find({}).exec((err, result) => {
if (err) return callback(err);
callback(null, carriage, processor, result);
});
}], (err, carriage, processor, storages) => {
if (err) return res.send(Message.messages(0, '出错啦!', err));
res.send(Message.messages(1, '注册企业信息', { carriage, processor, storages }));
});
}
};
<file_sep>/**
* StockOut.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/documentation/concepts/models-and-orm/models
*/
module.exports = {
tableName: 'stockOut',
attributes: {
socialcreditCode: {
type: 'string'
},
companyName: {
type: 'string'
},
stockoutNo: {
type: 'string'
},
variety: {
type: 'string'
},
grade: {
type: 'string'
},
production: {
type: 'string'
},
producingYear: {
type: 'string'
},
stockoutCount: {
type: 'integer'
},
packing: {
type: 'string',
enum: ['包装', '散装']
},
carriageType: {
type: 'string'
},
carriageNo: {
type: 'string'
},
stockoutTime: {
type: 'string'
},
//运单号
waybillNo: {
type: 'string'
},
//承运方的社会信用代码
carriage: {
type: 'string'
},
//承运方的名称
carriageName: {
type: 'string'
},
//运力类型
carriageType: {
type: 'string'
},
//车号/船号
carriageNo: {
type: 'string'
},
driverName: {
type: 'string'
},
driverTele: {
type: 'string'
},
//下游社会信用代码
downstreamorganizationCode: {
type: 'string'
},
//下游企业名称
downstreamcompanyName: {
type: 'string'
},
contractNo: {
type: 'string'
},
goodsCode: {
type: 'string'
}
}
};
<file_sep>/**
* 常用工具类
*/
const crypto = require('crypto');
class AppUtil {
/**
* 非空验证
* @param args 传入需要验证的参数
*/
static emptyVaild(...args) {
let empty = false;
for (let e of args) {
if (e !== 0 && !e) {
empty = true;
break;
}
}
return empty;
}
/**
* 判断相等
* @param str 传入需要验证的2个参数
*/
static equalVaild(str1, str2) {
let equal = false;
if (str1 === str2) {
equal = true;
}
return equal;
}
/**
* 生成sha256加密
* @param arg 传入需要hash的参数
*/
static createSha256(arg) {
let sha256 = crypto.createHash('sha256');
return sha256.update(arg).digest('hex');
}
/**
* 取小数点后两位
* @param arg 传入需要的参数
*/
static decimalTwo(arg) {
return arg.toFixed(2);
}
}
module.exports = AppUtil;<file_sep>/**
* QCInfo.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/documentation/concepts/models-and-orm/models
*/
module.exports = {
tableName: 'QCInfo',
attributes: {
//质检社会信用代码
socialcreditCode: {
type: 'string'
},
//质检企业名称
companyName: {
type: 'string'
},
//抽检企业社会信用代码
checksocialcreditCode: {
type: 'string'
},
//抽检企业名称
checkcompanyName: {
type: 'string'
},
//抽检货位编码
checkgoodsCode: {
type: 'string'
},
//报告编号
reportNumber: {
type: 'string'
},
//任务来源
taskSource: {
type: 'string'
},
//样品名称
sampleName: {
type: 'string'
},
//样品编号
sampleNo: {
type: 'string'
},
//任务类型
taskType: {
type: 'string'
},
//委托单位
client: {
},
//抽样情况
samplingInfo: {
},
//样品概况
sampleInfo: {
},
//抽样日志
sampleLog: {
type: 'string'
},
//报告日期
reportDate: {
type: 'string'
},
//检验依据
testStandard: {
type: 'string'
},
//检验结果
testResult: {
type: 'string'
},
//检验结论
testConclusion: {
type: 'string'
},
//说明
instruction: {
type: 'string'
},
//质量指标
qualityIndex: {
},
//品质指标
characterIndex: {
},
//卫生指标
healthIndex: {
},
isComplete: {
type: 'boolean'
}
}
};
<file_sep>/**
* QCInfoController
*
* @description :: Server-side logic for managing Qcinfoes
* @help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
add: (req, res) => {
let params = req.body;
let isExitParams = {
checksocialcreditCode: params.checksocialcreditCode,
checkcompanyName: params.checkcompanyName,
checkgoodsCode: params.checkgoodsCode
}
async.waterfall([(callback) => {
QCInfo.find(isExitParams).exec((err, result) => {
if (err) return callback(err);
if (result.length) {
callback(null, true);//即将插入的已经存在
} else {
callback(null, false);
}
});
}, (isExit, callback) => {
if (!isExit) {
QCInfo.create(params).exec((err, result) => {
if (err) return callback(err);
callback(null, isExit, result);
})
} else {
callback(null, isExit, []);
};
}], (err, isExit, result) => {
if (isExit) {
res.send(Message.messages(0, '创建失败,该货位已抽检!', result));
} else {
res.send(Message.messages(1, '创建成功', result));
}
});
},
delete: (req, res) => {
let params = req.body;
QCInfo.destroy(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '删除失败!', err));
if (!result.length) return res.send(Message.messages(0, '删除失败!', result));
res.send(Message.messages(1, '删除成功', result));
});
},
update: (req, res) => {
let params = { id: req.body.id };
let updateParams = req.body;
QCInfo.update(params, updateParams).exec((err, result) => {
if (err) return res.send(Message.messages(0, '更新失败!', err));
if (!result.length) return res.send(Message.messages(0, '更新失败!', result));
res.send(Message.messages(1, '更新成功', result));
});
},
find: (req, res) => {
let params = req.query;
QCInfo.find(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '查找失败!', err));
if (!result.length) return res.send(Message.messages(0, '查找失败!', result));
res.send(Message.messages(1, '查找成功', result));
});
}
};
<file_sep>/**
* Client.js
*
* @description :: TODO: You might write a short summary of how this model works and what it represents here.
* @docs :: http://sailsjs.org/documentation/concepts/models-and-orm/models
*/
module.exports = {
tableName: 'client',
attributes: {
socialcreditCode: {
type: 'string'
},
companysocialcreditCode:{
type: 'string'
},
companyName: {
type: 'string'
},
province: {
type: 'string'
},
city: {
type: 'string'
},
address: {
type: 'string'
},
lealPerson: {
type: 'string'
},
legalTele: {
type: 'string'
},
companyLinkman: {
type: 'string'
},
companyTele: {
type: 'string'
},
email: {
type: 'string'
},
fax: {
type: 'string'
}
}
};
<file_sep>/**
* CarriageInfoController
*
* @description :: Server-side logic for managing carriageinfoes
* @help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
add: (req, res, callback) => {
let params = req.body;
CarriageInfo.create(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '创建失败', err));
res.send(Message.messages(1, '创建成功', result));
})
},
delete: (req, res) => {
let params = req.body;
CarriageInfo.destroy(params).exec((err, result) => {
if (err) return res.send(Message.messages(0, '删除失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '删除失败!', result));
res.send(Message.messages(1, '删除成功', result));
});
},
update: (req, res) => {
let params = { id: req.body.id };
let updateParams = req.body;
CarriageInfo.update(params, updateParams).exec((err, result) => {
if (err) return res.send(Message.messages(0, '更新失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '更新失败!', result));
res.send(Message.messages(1, '更新成功', result));
});
},
find: (req, res) => {
let params = req.query;
CarriageInfo.find(params || {}).exec((err, result) => {
if (err) return res.send(Message.messages(0, '查找失败!', err));
if (result.length === 0) return res.send(Message.messages(0, '查找失败!', result));
res.send(Message.messages(1, '查找成功', result));
});
}
};
| c1a9966ecfb51885320a77cc378ff2f965b66cd8 | [
"JavaScript",
"Markdown"
] | 16 | JavaScript | Charlotte1018/mongo-API | 32792ad0bce9371bc8ed4e3f4b5e513344d06244 | 810f86d5e6ded7ab1a30dddb81372ecce74e7fe2 | |
refs/heads/master | <file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-05-03/principios-first.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-11/wifi-de-la-uclm-bajo-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-06-13/backup-para-mviles-siemens.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-10-21/cumpleaos-feliz.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-12-13/convertidor-de-video.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2012-01-07/wm8650.html
---
<file_sep>---
layout: post
title: "How to annotate/tag stdout and stderr outputs with bash"
author: david_villa
category: recipe
tags: bash shell
lang: en
comments: true
---
Lets create a test script `test-std.sh`:
<pre class="bash">
#!/bin/bash --
echo info
echo error >&2
</pre>
We will use `sed` to prepend some text to each output line. To perform redirection we use a less known feature called [process substitution](https://www.gnu.org/software/bash/manual/html_node/Process-Substitution.html).
<pre class="console">
$ ./test-std.sh 2> >(sed 's/^/err: /g') > >(sed 's/^/out: /g')
out: info
err: error
</pre>
And... it's possible to use a different color to each of them too
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-13/compartir-la-conexin-a-internet-router-domstico-con-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-11-11/instalar-emacs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-08-16/desactivacin-de-pitidos-varios.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-01-23/java-como-primer-lenguaje-mala-idea.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-06-14/nuevo-sistema-wifi-en-la-uclm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-21/mantener-ficheros-de-configuracin-con-subversion.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-10-10/steve-jobs-and-sus-isubnormales-diciendo-que-fu-un-genio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-11-28/concentrador-openvpn-en-debian-gnu-linux-o-ubuntu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-25/dvb-t-realtek-2831u-on-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-20/configurar-mltiples-interfaces-de-red.html
---
<file_sep>---
migrated: node/1354
layout: post
title: Instalación de un servidor Apache, PHP y MySQL en Debian
created: 1272971234
author: Trini
comments: true
category: recipe
---
<h2>Introducción</h2>
<!--break-->
Cuando queremos crear una web dinámica y orientada al contenido, podemos hacer uso de lo que conocemos como gestor de contenidos o CSM (Content Management System). Drupal es un CMS que ofrece gran modularidad y una serie de ventajas que lo caracterizan. Es por ello que en este documento nos centraremos en la instalación de esta herramienta para demostrar, a pesar de lo que pueda parecer a primera vista, la sencillez de su configuración.
<h2>Instalación de Drupal</h2>
Para realizar la instalación de esta herramienta tiene que tener en cuenta una serie de requisitos previos como, por ejemplo, la instalación de unos determinados paquetes como son:
<ul>
<li> apache2: es un servidor web de código abierto y se desarrolla dentro del proyecto HTTP Server de la Apache Software Foundation.</li>
<li> php5: es un lenguaje de programación interpretado que utilizará para la creación de páginas web dinámicas. </li>
<li> mysql-server: es el servidor de bases de datos de MySQL</li>
<li> php5-mysql: es el módulo de MySQL para php5</li>
</ul>
Una vez que tiene disponibles todos estos paquetes, tendrá que instalarlos. Para llevar a cabo esta instalación, el comando que utilizará es <em>aptitude</em> y lo haremos de la siguiente forma:
<div class="console">
{% highlight console %}
aptitude install apache2 php5 mysql-server php5-mysql
{% endhighlight %}
</div>
<h2>Referencias</h2>
<a href="http://drupal.org/">Página oficial de DRUPAL</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-28/magia-negra-con-scapy.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-02/usando-gphoto2-desde-consola.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-08-18/scapy-2-0-0-5-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-10-29/pgina-web-rplica-en-espaol-de-emms.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-19/unicode-utf-8-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-09-29/xpweek-mis-conclusiones.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-06-25/la-forma-ms-sencilla-de-usar-repositorios-git-es-mercurial.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-21/router-nat-proxy-firewall.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-09-13/lexmark-e250d-en-debian-y-gnu-linux-en-general.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-12-09/creacin-de-un-parser-con-flex-y-bison-en-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-02-24/problemas-con-la-red-en-java-openjdk-6-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-05-12/notificacin-de-eventos-con-pynotify.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-11-27/la-informtica-dejar-de-ser-una-ingeniera.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2009-02-26/install-party-v-4.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-02-25/exposicin-por-tierras-de-molinux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-05-17/cairo-tutorial-en-castellano.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-08-29/real-como-la-vida-misma.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-21/xplanet-como-salvapantallas-con-gnome-screensaver.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-18/grficas-con-gnuplot.html
---
<file_sep>---
migrated: node/50
layout: post
title: "¡Usuarios Ubuntu!, ¿quereis Howto's?"
created: 1129715028
author: cleto
comments: true
category: new
---
¡Pues tomad Howto's! :-)
Es para la versión Hoary pero hay ciertas cosas que no dependen ni de que sea Hoary, ni que de sea Ubuntu. Muy chulo y muy completo. <a href="http://ubuntuguide.org/">AQUÍ LO TENEIS</a>.
Un saludo!
<file_sep>---
migrated: node/1436
layout: post
title: Archivos nuevos con el mismo grupo que el directorio padre
created: 1291501744
author: luis_munoz
comments: true
---
ASIGNAR UN GRUPO POR DEFECTO A LOS ARCHIVOS NUEVOS
<!--break-->
<a href="http://www.ssc.wisc.edu/sscc/ssccnews/linuxgroups.htm">http://www.ssc.wisc.edu/sscc/ssccnews/linuxgroups.htm</a>
<a href="http://es.wikipedia.org/wiki/Chmod">http://es.wikipedia.org/wiki/Chmod</a>
MÁSCARA CONCRETA PARA LOS ARCHIVOS
<a href="http://es.wikipedia.org/wiki/Umask">http://es.wikipedia.org/wiki/Umask</a>
<a href="http://www.escomposlinux.org/iarenaza/articulo-acls/acls-linux-samba.html">http://www.escomposlinux.org/iarenaza/articulo-acls/acls-linux-samba.html</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-11-21/el-piratear-se-va-a-acabar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-11/monitorizacin-de-los-sensores-de-tu-pc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-18/adaptador-wifi-usb-zyair-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-17/crear-un-iconview.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/recuperar-grub-despus-de-instalar-windows.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-09-03/gnu-emacs-el-cliente-de-twitter-definitivo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-02-28/compilacin-cruzada-de-iceservices-para-arquitecturas-arm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-24/uboot-arm-levntate-y-anda.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-11-12/aprender-a-escribir-en-la-web.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-10-11/una-curiosidad-huevo-de-pascua.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-05-10/rabietas-de-patio-de-colegio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-04-04/nueva-web-contra-el-canon-digital.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-02-09/tele-enseanza-en-terminales.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-19/televisin-digital-terrestre-tdt-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2007-01-06/slax-qemu-y-cintas-de-video.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-03-30/freeband-guitarra-y-batera-ms-arcade-que-nunca.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-02-20/iii-jornadas-de-software-libre-en-albacete.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-04-10/subttulos-fuera-de-la-imagen-en-mplayer.html
---
<file_sep>---
migrated: node/193
layout: post
title: '"El reto de la semana", 2ª edición'
created: 1137530448
author: paco
comments: true
category: challenge
---
Vale, hora del siguiente reto (aunque sea la misma semana)
<h2>Subreto 1:</h2>
El fácil: Codificar un programa que admita una cadena de texto como argumento y detecte si esa cadena de texto es un palíndromo o no.
<!--break-->
Un palíndromo es un texto que se lee igual al derecho que al revés (ej: Dábale arroz a la zorra el abad).
Nótese que las tildes y las mayúsculas no cuentan.
<h2>Subreto 2</h2>
Mismo programa que antes pero ahora imprime por pantalla el mayor palíndromo que pueda extraer desde el comienzo de la cadena de texto.
Ale, a currelar.
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-07-21/gua-del-aspirante-a-mal-programador.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-03-28/kicad-resultado-final.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /humor/2009-01-17/soy-linux-y-mi-hermano-mi-perro-y-mi-coche-tambin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-08-04/gnu-linux-y-nintendo-ds-2-parte.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-03-01/nexentaos-esto-es-gnu-no-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-28/mini-tutorial-de-gnu-make.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-04-19/abierta-inscripcin-a-da-de-software-libre-en-la-esii.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-17/pygtk-tips-n-tricks.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-14/la-pgina.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2010-06-17/por-qu-el-programador-no-es-la-estrella.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-06/configurar-adaptador-inalmbrico-de-red-usb-con-ndiswrapper.html
---
<file_sep>---
migrated: node/790
layout: post
title: SQL in'y'ection
created: 1192056756
author: david_villa
comments: true
tags:
- ocio
---
<center>
<a href="http://xkcd.com/327/"><img src="http://imgs.xkcd.com/comics/exploits_of_a_mom.png"/></a>
</center>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-03/solucin-al-problema-de-captura-por-firewire.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-07-23/el-put-canon.html
---
<file_sep>---
migrated: node/393
layout: post
title: Resucitando PentiumII
created: 1158319375
author: ixmith
comments: true
category: enquiry
---
Ahora que los exámenes han terminado he tenido tiempo para plantearme muchos interrogantes. Entre los más mundanos se encuentra el de qué hacer con mi viejo PII(mmx :-P)a 333Mhz, 128MB de RAM y 3,2GB de disco.
<!--break-->
En principio había pensado que me sería muy provechoso montarmelo "guay" utilizándolo como firewall en casa para los otros dos ordenadores con los que nos conectamos, y en un futuro ampliar como servidor Apache con web en plan amateur para colgar y tener a mano mis historias de la uni desde cualquier sitio. Aparte de compar tarjetas de red, cable cruzado etc. Necesito una distribución que pueda correr agusto. Algunos me diréis que Debian Sarge en modo consola (o al menos es lo que tengo pensado) pero sería preferible que también pudiera utilizar algún entorno gráfico, y no sé si de "correr", que era lo que pretendía, pasaría a "arrastrarse"...
Por ello os pido consejos, opiniones, experiencias parecidas, anécdotas y lo que se os ocurra :D
Saludos
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-08-21/stdeb-o-cmo-crear-paquetes-debian-de-mdulos-python-como-churros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-02-01/caducidad-de-las-claves-gpg-de-los-repositorios-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2009-12-03/pasito-a-pasito.html
---
<file_sep>jekyll website source for http://CRySoL.github.io
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-11-25/consulta-sobre-distro.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-07-11/cmo-instalar-opencv-en-ubuntu-9-04.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-10-11/abierta-inscripcin-al-vi-concurso-universitario-de-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-04-17/fotos-de-las-jornadas-de-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-24/la-esi-organizar-la-party-quijote-2006.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-02-03/enviar-correo-a-travs-de-gmail-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-17/arranque-remoto-de-un-ordenador-con-debian-usando-wake-on-lan.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-11-07/introduccin-a-la-programacin-en-emacs-lisp-de-chassell.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-08/carga-dinmica-de-contenido-html-con-xmlhttprequest.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-03-10/cambiar-accels-de-gtk-sobre-la-marcha.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-12-11/gnu-emacs-construir-un-major-mode-paso-a-paso.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-10-31/soy-nuevo-en-esta-comunidad-saludos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-10-18/necesito-ayuda-con-la-compilacion-en-emacs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-04-01/dinerocracia.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-10-17/novato-en-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-27/un-10-para-la-install-party.html
---
<file_sep>---
migrated: node/965
layout: post
title: Patrones de diseño en Python
created: 1218306829
author: Vigilante
comments: true
tags:
- Python
---
Navegando por la red me he encontrado este par de vídeos que me parecen muy interesantes, considerando que por aquí hay muchos amantes de los patrones de diseño y del lenguaje de programación Python.
<!--break-->
Los vídeos son de una charla de Google TechTalks por <NAME> (el autor de Python Cookbook y Python in a Nutshell).
<a href="http://video.google.com/googleplayer.swf?docId=-3035093035748181693">Part I</a>
<a href="http://video.google.com/googleplayer.swf?docId=-288473283307306160">Part II</a>
<file_sep>---
migrated: node/1490
layout: post
title: Antiprogramación
created: 1302168376
author: nacho
comments: true
tags:
- platanada
---
Muchas veces se habla sobre cómo deberían hacerse las cosas, pero a veces no se muestran los <em>contraejemplos</em>. A raíz de una conversación en la <a href="/listas">lista de correo</a> se me ha ocurrido que podría ser útil tener una lista de ejemplos de malas prácticas de programación, cosas a evitar, desde fallos de novato hasta cosas más complejas.
<!--break-->
Empezaré con un par de ejemplos sencillos que me vienen a la cabeza, ejemplos <b>reales</b> que he visto, y espero que os animéis y publiquéis los que recordéis. Aquí los que más aportarán serán los profesores, que corrigen nuestras prácticas y seguro que más de una vez se llevan las manos a la cabeza (ahora los modernos lo llaman <em>facepalm</em>):
<h2>Condición múltiple</h2>
Este contraejemplo apareció en una práctica sobre un juego de ajedrez. Se trataba de comprobar las coordenadas de la pieza:
<div>
{% highlight java %}
if (row==1 || row==2 || row==3 || row==4 || row==5 || row==6 || row==7 || row==8) {
if (col=='A' || col=='B' || col=='C' || col=='D' || col=='E' || col=='F' || col=='G' || col=='H' ){
// coordenadas correctas, hacer cosas
} else {
// columna incorrecta
}
} else {
// fila incorrecta
}
{% endhighlight %}
</div>
Una solución mejor habría sido algo como:
<div>
{% highlight java %}
if (row<1 || row>8 || col<'A' || col>'H'){
throw new InvalidCoordException(row, col);
}
// todo correcto, hacer cosas
}
{% endhighlight %}
</div>
<h2>Condición inversa</h2>
En la misma práctica también encontré esto:
<div>
{% highlight java %}
if (condición) {
} else {
// hacer cosas
}
{% endhighlight %}
</div>
No recuerdo exactamente cuál era la condición, pero la idea era esa. Obviamente, hubo que sustituirlo por:
<div>
{% highlight java %}
if(!condición){
//hacer cosas
}
{% endhighlight %}
</div>
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-03-08/la-docencia-en-secundara-no-ser-una-salida-profesional-para-los-titulados-universitarios-en-informtica.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-11-13/aboutmozilla.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-05-14/un-milln-de-gracias.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-04-03/el-sndrome-del-amigo-informtico.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-28/arreglar-emacs-sin-fuentes.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-27/grabar-cd-y-dvd-ms-rpido.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-03-20/intel-core-duo-34-fallos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-22/convertir-ficheros-ape-a-formato-wav-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-10-24/comienza-el-desarrollo-de-gnesis-3-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2012-02-19/pasta-con-pollo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-06-06/el-da-de-la-bestia.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-09-06/y-no-es-broma-el-plagio-se-paga.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-04-21/configuracin-de-claws-mail-para-gmail-con-imap.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-07/buenas-referencias-malas-terribles-referencias.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-11-17/de-ingenieros-informticos-y-otras-criaturas-fantsticas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-04-15/patentes-inconcebibles-y-basura-espacial.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2011-03-17/un-pequeo-script-para-tener-un-jukebox-de-modarchive-org.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-02-17/gnu-linux-venezuela.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2005-12-06/pollo-a-la-cebolla.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-02-10/simulador-ns2-principios-bsicos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2013-02-14/debian-gnu-linux-install-party-v-7.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-05-19/clase-latex-para-escribir-el-pfc.html
---
<file_sep>#!/bin/bash --
# -*- mode:shell-script; coding:utf-8; tab-width:4 -*-
set -e
if [ ! -e _site ]; then
git clone https://github.com/CRySoL/CRySoL.github.io _site
fi
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-08-20/cambiar-el-timezone-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-01-25/dell-vende-porttiles-con-ubuntu-linux-de-serie-pero.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-02-07/updated-debian-packages-for-devkitpro.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-08-12/drupal-6-3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-03-29/charlas-sobre-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-13/gnu-emacs-ortografa-al-vuelo-con-flyspell.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-04-15/cmo-hacer-una-metadistro-usb.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-26/nohands-convirtiendo-nuestro-pc-en-un-manos-libres-bluetooth.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-25/cmake-construir-una-librera-esttica-y-o-dinmica.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-15/correr-programas-dos-en-molinux-con-dosemu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-06/mezclador-de-audio-por-software-con-alsa.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-15/aceleracin-de-video-por-hardware-en-powerpc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-10-09/http-unidadlocal-com-nos-plagia-y-quiz-a-ti-tambin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-04-01/anti-hoygan-en-desarrollo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-09-20/empezando-con-mercurial.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-20/afbackup.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-09/instalar-un-sistema-bacula-en-gnu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-16/bilo-y-nano-te-necesitan.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-05/salvar-y-recuperar-una-base-de-datos-mysql.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-03-09/energa-en-ns2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-02-28/arte.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-17/port-knocking-llama-antes-de-entrar.html
---
<file_sep>---
migrated: node/1346
layout: post
title: 'CMake: Enlazado de librerías'
created: 1269625517
author: cleto
comments: true
category: recipe
tags:
- Arco
- devel
---
<blockquote>En esta receta se explica cómo linkar binarios con librerías estáticas y dinámicas.</blockquote>
<h2>Linkado básico</h2>
A estas alturas ya conocemos cómo construir archivos binarios y librerías en CMake. Ahora supón que la librería dummy que se viene utilizando en los ejemplos está instalada en un path accesible y que el programa hello.cpp hace uso de la librería de forma dinámica. Una posible implementación de este escenario sería la siguiente:
<div>
{% highlight text %}
CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
PROJECT(hello)
# Para includes
FIND_PATH(dummy_path dummy.h)
IF(NOT dummy_path)
MESSAGE(FATAL_ERROR "** Falta la cabecera dummy.h, dummy!")
ENDIF()
INCLUDE_DIRECTORIES(${dummy_path})
# Búsqueda del .so
FIND_LIBRARY(dummy_lib dummy)
IF(NOT dummy_path)
MESSAGE(FATAL_ERROR "** Falta la librería dummy, dummy!")
ENDIF()
ADD_EXECUTABLE(hello hello.cpp)
TARGET_LINK_LIBRARIES(hello ${dummy_lib})
{% endhighlight %}
</div>
En primer lugar, se comprueba que es posible acceder a la librería dummy (tanto la cabecera como el archivo .so). Utilizando el comando TARGET_LINK_LIBRARIES se indica a CMake que el objetivo <tt>hello</tt> debe ser linkado con la librería <tt>dummy_lib</tt>.
Para una librería estática, sería muy similar el código ya que CMake detecta que se trata de un tipo u otro y genera el código necesario para linkar estática o dinámicamente según la librería encontrada.
<h2>FindPkgConfig</h2>
Habitualmente, muchas librerías que se utilizan en sistemas como GNU utilizan la herramienta pkg-config que ayuda a obtener las distintas variables necesarias (CFLAGS, LDFLAGS, etc).
CMake proporciona un módulo llamado FindPkgConfig que incluye comandos útiles para encontrar librerías que utilizan pkg-config. Pese a que se incluye en la distribución estándar, CMake no recomienda el uso de este módulo porque es posible que haya sistemas privativos que no tengan instalada la aplicación pkg-config. Pero eso, no nos preocupa... ¿verdad? :-)
Supón que el programa <tt>hello</tt> utiliza glib. Podríamos comprobar que está instalado y enlazarlo como sigue:
<div>
{% highlight text %}
#Carga del módulo
include(FindPkgConfig)
PKG_CHECK_MODULES(glib REQUIRED glib-2.0) # REQUIRED=Fallará si no lo encuentra
# En "glib_INCLUDE_DIRS" se han cargado los directorios (-I del compilador)
INCLUDE_DIRECTORIES(${glib_INCLUDE_DIRS})
ADD_EXECUTABLE(hello hello.cpp)
# En "glib_LIBRARIES están el nombre de las librerías (-l del compilador)
TARGET_LINK_LIBRARIES(hello ${glib_LIBRARIES}
{% endhighlight %}
</div>
¿Y cómo es que en glib_LIBRARIES y glib_INCLUDE_DIRS se guardan esos datos?. Los módulos Find* de CMake crean variables para que el usuario las pueda utilizar en caso de encontrar la librería o el objeto que se esté buscando. Una buena forma de saber sobre qué variables se dispone es consultando la documentación. En la cabecera de los módulos también viene esta información (en Debian <tt>/usr/share/cmake2.8/Modules/*.cmake</tt>).
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-11-06/uimanager-con-actions-avanzadas-en-pygtk.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-21/svn-externals-dependencias-entre-repos-subversion.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-04-15/gnu-emacs-macros-de-teclado.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-02-28/gran-acogida-de-la-plataforma-de-formacin-de-la-junta-con-el-ceslcam.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/ejecutar-programas-x-en-otra-mquina-vindolos-en-local.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-25/manipulacin-de-ficheros-avi.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-09/gnu-emacs-the-kill-ring.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-06-22/bindings-python-de-libreras-c-con-boost-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-02-12/cmo-funciona-apt-dpkg.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-24/particin-cifrada-con-dm-crypt-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-11-13/libro-de-pegatinas-sobre-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-02/ejecutar-un-comando-como-si-fueras-otro-usuario-sudo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2011-07-24/la-historia-de-opengl-vs-direct3d.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-02-27/mas-librerias-para-kicad.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-09-15/resucitando-pentiumii.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-02-12/preguntas-inteligentes-lectura-obligatoria.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-01-07/nuevo-window-xp-le-acercamos-al-holocausto-nuclear.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-09-23/mini-watchdog-en-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-12-02/manifiesto-en-defensa-de-los-derechos-fundamentales-en-internet.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-05-12/contenidos-libres.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-08-06/gracias-al-canon.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-12-01/los-ilegales-intentan-engaarte-no-te-dejes-manipular-para-que-nadie-te-time.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-06-01/trucos-y-cosas-gnome-y-cambio-de-ventanas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2010-07-01/procrastinacin-y-el-mal-de-la-computadora.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-12-19/encuentro-digital-con-david-bravo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-01-20/3-meses-de-estadsticas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-25/arranque-desde-red-sin-disco-duro.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-24/reproductores-gstreamer.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-07-01/apple-wireless-keyboard-con-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-25/montar-un-repositorio-estndar-de-paquetes-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2011-07-05/nombres-de-teclas-en-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-10-29/visita-guiada-a-emacs-rplica-en-castellano.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-09/para-reir-un-poco.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2006-11-17/ensalada-de-queso-de-cabra-con-anchoas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-12-13/emulador-de-cisco-ios-dynamips-y-dynagen.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-12-25/virtual-machine-unattended-debian-installations-with-libvirt-and-d-i-preseeding.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-02-17/por-qu-no-me-va-el-compilador-de-c-gcc-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-07-05/inotify-acciones-disparadas-por-cambios-en-el-sistema-de-ficheros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-05-05/plugin-de-bsqueda-para-crysol.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-05/configuracin-y-disfrute-de-un-sai-nut-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-24/lantronix-xport.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-10/mdem-comtrend-ct-350-en-gnu-linux-con-el-driver-ueagle-atm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-19/recuperar-fotos-y-videos-borrados-de-una-cmara-digital.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-10-19/ati-y-libgl-so-1.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-09/subir-gnesis-a-www-espaciolinux-com.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-15/mdem-comtrend-ct-351-con-adsl-de-telefnica-en-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-01-25/inyeccin-de-trfico-en-chipsets-atheros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-10-26/grub-perdido.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-01-17/ver-eventos-deportivos-usando-sopcast-y-xbmc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-08-21/el-fin-del-ethereal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-02-01/cambiar-contrasea-en-particin-cifrada.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /offer/2007-11-14/bolsa-de-trabajo-ceslcam.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-05-10/libremeeting-en-miraflores-de-la-sierra-madrid.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-05-08/presentacin-del-club-net.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-07-05/ga-canon-y-presuncin-de-culpabilidad.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-11-10/el-supremo-de-ee-uu-decide-qu-invenciones-son-patentables.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-08-19/microsoft-somos-todos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-12-24/servir-un-repositorio-subversion-con-apache-2-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-08-05/la-esi-de-ciudad-real-distribuir-gnesis-a-sus-alumnos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-16/samsung-syncmaster-203b-con-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-11/como-aprovechar-la-ram-al-mximo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2008-09-26/reto-de-la-semana-display-7-segmentos-oblcuo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-11-16/widows-vista-rtm-leaked-y-activado.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-06-04/windows-vista-ideal-si-tu-porttil-va-sobrado-de-batera.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-02-18/m-libera-la-especificacin-del-formato-de-los-ficheros-de-office.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-04-11/gaim-cambia-de-nombre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-09-29/probando-os.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-03-19/adios-google-reader-hola-tiny-tiny-rss.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-09-29/eduroam-en-android-con-certificado-usando-un-qr-code.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-03-24/si-es-legal-es-legal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-09-10/configurar-paneles-de-gnome-con-doble-monitor.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-12/oscilador-interno-del-pic16f690.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-12-10/analizador-lxico-sintctico-y-semntico-con-jflex-y-cup.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-06-10/crysol-en-baquia-tv.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-03-05/comienzan-los-preparativos-para-la-fase-final-del-i-concurso-de-sl-de-c-lm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-05-04/instalacin-de-un-servidor-apache-php-y-mysql-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-01-26/easygit-git-para-gente-normal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-27/tcpstat-estadsticas-de-la-red.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-07-06/instalar-debian-gnu-hurd-bajo-qemu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-10-29/redes-neuronales-eso-es-todo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-17/conectar-por-ssh-a-travs-de-un-proxy-http.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-03/utilizar-un-mando-wii-con-tu-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-11-16/al-parecer-utilizar-gnu-linux-es-delito-en-la-universidad-de-boston.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-05-06/el-software-libre-favorece-a-la-empresa-www-expansion-com.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-31/crear-un-instalador-auto-extraible-para-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-11-04/control-de-acceso-con-pam.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-05-02/fluendo-busca-gente.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-08/cintas-de-backup-y-cargadores-mt-y-mtx.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-17/patrn-threadpool-en-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-02-28/gstreamer-whats-new.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2008-11-21/ayuda-con-mail-sendmail.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-09-27/acceso-a-red-cmpus-con-firefox-y-greasemonkey.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-05-25/nueva-versin-de-uclmwifi-1-5-5.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-19/arreglar-fuentes-pequeas-en-aplicaciones-gtk.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2007-01-04/poner-etiquetas-a-los-discos-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-10-15/software-libre-y-recetas-de-cocina.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-22/recuperacin-de-un-sistema-completo-mondo-y-bacula.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-06-18/qu-hay-detrs-de-opera-unite.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2008-01-16/sesin-tcnica-sun-opensource-technologies-en-la-esi.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-14/gnu-emacs-mejorando-tabbar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-01-29/redmine-pam-authentication-plugin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-05-04/proxy-socks-con-ssh-ms-fcil-imposible.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-02-19/unos-tests-de-logo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-07/cups-o-dnde-est-mi-cola.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-06-20/yo-plagio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2006-08-13/el-reto-de-la-semana-5-edicin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-07-24/linus-tolvards-eres-mu-tonto-tontismo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2010-11-11/manual-de-auctex-para-gnu-emacs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-20/empezar-en-zope-plone.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2005-10-30/tarta-de-manzana.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-11-10/introduccin-a-los-hilos-con-la-librera-glib.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-12-07/conectar-a-internet-por-medio-de-un-mvil-3g-editado.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-12-07/entrevista-a-ana-mara-mndez-apemit.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-11-06/creative-zen-con-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-10-25/grupo-de-usuarios-de-gnu-linux-de-boston-usa.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-16/gaim-2-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-20/canon-ixus-55-con-gphoto2-en-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-03-04/quin-teme-al-lobo-feroz.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-21/bogoutil-completa-bogofilter.html
---
<file_sep>---
migrated: node/1380
layout: post
title: 'FoxG20 '
created: 1279288389
author: icaro
comments: true
tags:
- embedded
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-26/cmake-compilando-aplicaciones-zeroc-ice.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-09/hacer-un-makefile-para-paquete-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-09-26/buenas-a-tods.html
---
<file_sep>---
migrated: node/1438
layout: post
title: Kit de desarrollo libre para PS3
created: 1291925549
author: int-0
comments: true
category: recipe
tags:
- embedded
---
<blockquote>Hola amigüitos! si sois alegres poseedores de una PS3 con FW3.50 o anterior y tenéis a vuestra disposición algún dispositivo <em>jailbreaker</em> sabréis que la ejecución de programas caseros o <em>homebrew</em> está en vuestras manos. Pero claro, eso para está web no es nada... a nosotros nos gusta más <em>desarrollar</em> esos programas caseros. Existen por internet varios <em>SDK's</em> oficiales filtrados, pero que no debéis usar (ni, por tanto, publicar aplicaciones compiladas con esos kits). Es ilegal que los uséis porque no tenéis la licencia, así que si encontráis alguno, como material didáctico están muy bien, pero hasta ahí su utilidad práctica. En esta receta explicaremos cómo compilarnos nuestro propio kit de desarrollo legal para hacer nuestras aplicaciones caseras legales que correrán en cosolas <em>jailbreakadas</em>.</blockquote>
<h2>Qué necesitamos</h2>
Bueno, nuestra distro tiene que contar con los siguientes paquetes:
<div class="console">
{% highlight console %}
# aptitude install git libelf-dev python zlib-dev autotools autoconf automake bison flex gcc make autotools patch subversion wget texinfo
{% endhighlight %}
</div>
Lamentablemente puede que se me olvide alguno, si es así rogaría que me lo comentaseis.
Ahora necesitamos un sitio donde meterlo todo, es decir, <tt>/usr/local/</tt>. Ahí crearemos un directorio y le daremos permisos de escritura al grupo <em>staff</em> (al que perteneceremos) para no tener que hacer las cosas como <em>root</em>:
<div class="console">
{% highlight console %}
# cd /usr/local
# mkdir ps3dev
# chown root:staff ps3dev
# chmod 775 ps3dev
{% endhighlight %}
</div>
Vale y ahora ajustaremos las variables de entorno para que todo quede bien organizadito (estas lineas, además de ejecutarlas, añadirlas en vuestro <tt>~/.bashrc</tt> o similar) (esto hacedlo ya como vuestro usuario):
<div class="console">
{% highlight console %}
$ export PS3DEV=/usr/local/ps3dev
$ export PATH=$PATH:$PS3DEV/bin
$ export PATH=$PATH:$PS3DEV/ppu/bin
$ export PATH=$PATH:$PS3DEV/spu/bin
{% endhighlight %}
</div>
Bien, con esto podremos compilarnos e instalar la <em>toolchain</em>, pero nos hará falta el <em>SDK</em> para poder crear nuestros ejecutables para la PS3. Le haremos sitio al <em>SDK</em> de la siguiente manera:
<div class="console">
{% highlight console %}
$ cd $PS3DEV
$ mkdir psl1ght
{% endhighlight %}
</div>
Y de la misma manera que antes, creamos la variable de entorno (meted esto también en vuestro <tt>~/.bashrc</tt>):
<div class="console">
{% highlight console %}
$ export PSL1GHT=$PS3DEV/psl1ght
{% endhighlight %}
</div>
Vale, ya lo tenemos todo listo... ahora a descargar y compilar...
<h2>Compilar e instalar la <em>toolchain</em></h2>
Bueno, yo tengo mi directorio <tt>~/repos</tt> donde guardo todas estas cosas, vosotros podéis meterlo donde os plazca... si lo hacéis como yo:
<div class="console">
{% highlight console %}
$ cd ~/repos
{% endhighlight %}
</div>
Y a descargar:
<div class="console">
{% highlight console %}
$ git clone https://github.com/ooPo/ps3toolchain.git
{% endhighlight %}
</div>
Cuando esto termine: a compilar, parchear e instalar, es decir:
<div class="console">
{% highlight console %}
$ cd ps3toolchain
$ ./toolchain.sh
{% endhighlight %}
</div>
Y esto tardará la vidaaaa... (bueno, según vuestros sistemas...) el caso es que tiene que crear dos <em>toolchains</em> una para el <em>PowerPC</em> (conocido como <em>PPU</em>) y otra para los <em>Synergistic Processors</em> (conocidos como <em>SPU's</em>).
<blockquote>Atención: debido a que algunas URL's que utiliza el script pueden estar <em>offline</em>, habría que realizar los siguientes cambios en estos scripts: <tt>scripts/002-gcc-4.5.1-PPU-stage1.sh</tt>, <tt>scripts/004-gcc-4.5.1-PPU-stage2.sh</tt>, <tt>scripts/007-gcc-4.5.1-SPU-stage1.sh</tt> y <tt>scripts/009-gcc-4.5.1-SPU-stage2.sh</tt>
La línea:
<pre>wget --continue http://www.mpfr.org/mpfr-2.4.2/mpfr-2.4.2.tar.bz2 || { exit 1; }</pre>
sustituirla por:
<pre>wget --continue http://ftp.gnu.org/gnu/mpfr/mpfr-2.4.2.tar.bz2 || {exit 1; }</pre>
</blockquote>
Si todo ha salido bien podréis hacer algo como:
<div class="console">
{% highlight console %}
$ ppu-gcc --version
ppu-gcc (GCC) 4.5.1
Copyright (C) 2010 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
{% endhighlight %}
</div>
<div class="console">
{% highlight console %}
$ spu-gcc --version
spu-gcc (GCC) 4.5.1
Copyright (C) 2010 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
{% endhighlight %}
</div>
<h2>Compilar e instalar el <em>SDK</em>: <em>PSL1GHT</em></h2>
Bueno, como antes, si tenéis vuestros repos en <tt>~/repos</tt> haced lo siguiente y si no, hacedlo donde queráis:
<div class="console">
{% highlight console %}
$ cd ~/repos
{% endhighlight %}
</div>
Descargamos el <em>SDK</em>:
<div class="console">
{% highlight console %}
$ git clone https://github.com/HACKERCHANNEL/PSL1GHT.git
{% endhighlight %}
</div>
Cuando esto termine: a compilar, parchear e instalar, es decir:
<div class="console">
{% highlight console %}
$ cd PSL1GHT
$ make
$ make install
{% endhighlight %}
</div>
Como su nombre indica... es ligerito así que no tardará mucho... después de esto ya estaréis en condiciones de crear vuestros programas para la PS3... peero...
<h2>Algunas librerías útiles</h2>
Si queremos instalarnos algunas librerías portadas a la PS3 como <em>zlib</em>, <em>libpng</em>, <em>libjpeg</em>, <em>freetype</em>, <em>pixman</em>, <em>libogg</em>, <em>libvorbis</em>, <em>libzip</em> o incluso <em>cairo</em>, haremos lo siguiente:
<div class="console">
{% highlight console %}
$ cd ~/repos
{% endhighlight %}
</div>
Descargamos los scripts de <em>ooPo</em> que es muy majo:
<div class="console">
{% highlight console %}
$ git clone https://github.com/ooPo/ps3libraries.git
{% endhighlight %}
</div>
Y a compilar e instalar, es decir:
<div class="console">
{% highlight console %}
$ cd ps3libraries
$ ./libraries.sh
{% endhighlight %}
</div>
Ya tenemos bastantes cositas para empezar a programar un <em>hola mundo</em> y algo más... pero como ya os dije antes, hay muchas librerías útiles por ahí que podemos ir instalando según nuestra necesidad.
<h2><em>vpcomp:</em> compilador de <em>programas</em> de vértices</h2>
Primero: ¿qué <em>demonios</em> es esto? pues más o menos lo siguiente: resulta que para mover vértices y todo ese rollo, los señores de <em>NVIDIA</em> han creado una especie de "máquina virtual", la cual interpreta una serie de <em>opcodes</em> para trabajar con datos escalares y vectoriales. En la PS3 esta máquina virtual puede interpretarse mediante una <em>SPU</em>, pero claro, alguien debe compilar esos <em>programas de vértices</em> en estos opcodes para la <em>SPU</em>, esto es: <b>vpcomp</b>
<h3>Requisitos no muy libres</h3>
Bueno, esto necesita el <em>NVidia Cg Toolkit</em> que es un compilador/intérprete de una especie de lenguaje ensamblador para <em>OpenGL</em>, podemos descargarnos <em>gratis</em> del <a href="http://developer.nvidia.com/page/cg_main.html">NVIDIA Cg website</a>. Descargaros el <b>tgz</b> de vuestra arquitectura (x86 o x86/64).
Ahora viene una cutrez: no vamos a instalar este paquete en nuestro sistema (bueno, si vosotros queréis dadle caña, lo tenéis también como paquete Debian). Como sólo necesitamos una biblioteca, vamos descomprimir el tgz y a copiar sólo lo que necesitamos en <tt>/usr/local</tt> (suponemos que estamos en el directorio donde descargamos el fichero):
<div class="console">
{% highlight console %}
$ unp Cg-3.0_November2010_x86.tgz
$ cp usr/lib/libCg.so /usr/local/lib/
$ cp -r usr/lib/include/Cg /usr/local/include/
$ sudo ldconfig
{% endhighlight %}
</div>
La última línea refresca la caché de bibliotecas, para asegurarnos que la tenemos "disponible".
<h3>Compilar e instalar</h3>
Bien, ya podemos volver al repositorio de <em>PSL1GHT</em> para compilarnos el <em>compilador</em>...
<div class="console">
{% highlight console %}
$ cd PSL1GHT/psl1ght/tools/vpcomp
$ make
{% endhighlight %}
</div>
Se siente, el <tt>Makefile</tt> no tiene el target <em>install</em>, así que a <em>manímetro</em>:
<div class="console">
{% highlight console %}
$ cp vpcomp $PSL1GHT/bin/
{% endhighlight %}
</div>
Y ya tenemos listo nuestro compilador de vértices... ;)
<h2><em>Tiny3D:</em> algo parecido al <em>OpenGL</em> en la PS3</h2>
Si, el <em>RSX</em> devora <em>OpenGL</em> que da gusto... pero para explotar eso (por ahora) tendríamos que echar mano del <em>SDK oficial</em> así que por ahora nos contentaremos con el magnífico <em>Tiny3D</em> de <em>Hermes</em>.
<h3>Requisitos</h3>
Todos los pasos anteriores, incluído el <em>vpcomp</em>.
<h3>Compilación e instalación</h3>
Pues nada, en nuestro directorio de repositorios:
<div class="console">
{% highlight console %}
$ git clone https://github.com/hermesEOL/tiny3d.git
$ cd tiny3d
$ make all
{% endhighlight %}
</div>
Si no hay ningún error, estaremos listos para usar la biblioteca. En <tt>tiny3d/docs</tt> tenéis un documentillo sobre la biblioteca y el <em>RSX</em> de la PS3.
<h2><em>ps3soundlib:</em> audio fácil para nuestros programas</h2>
También gracias a <em>Hermes</em> tenemos esta biblioteca que nos permite utilizar samples de 8 y 16 bits en mono y stereo. Reproducir los samples en <em>loop</em>, una vez , o incluso llamando a un <em>callback</em> al finalizar. En fin... más cositas útiles.
<h3>Requisitos</h3>
Los pasos para obtener la <em>toolchain</em> y el <em>PSL1GHT</em>, con eso es suficiente...
<h3>Compilar e instalar</h3>
Desde nuestro directorio de repositorios:
<div class="console">
{% highlight console %}
$ git clone https://github.com/hermesEOL/ps3soundlib.git
$ cd ps3soundlib
$ make
$ make install
{% endhighlight %}
</div>
Al igual que antes, si no obtuvimos ningún error, ya tendremos la biblioteca lista para usar en nuestros <em>homebrews</em>.
<h2>Guinda: la librería <em>SDL</em></h2>
<blockquote>Antes de que alguien se lleve a equívocos, hace tiempo que <em>SDL</em> fue oficialmente portado a PS3, entonces: <em>¿para qué todo esto?</em>. Pues fácil, el <em>port</em> en realidad es para el <em>OtherOS</em>, es decir, algún <em>GNU/Linux</em> que tengáis corriendo en aquellas PS3 con un FW que disponga de esa opción. Y os permitiría crear aplicaciones para <em>esa</em> distribución. En cambio este <em>port</em> os permitirá crear aplicaciones que utilicen <em>SDL</em> de forma <em>nativa</em> en la PS3. Es decir, sin necesidad de <em>OtherOs</em> ni nada por el estilo: compiláis, obtenéis un <tt>PKG</tt> que podréis instalar y a funcionar... ;)</blockquote>
<h3>Requisitos</h3>
Los pasos para obtener la <em>toolchain</em>, el <em>PSL1GHT</em> y las <em>ps3libraries</em> (ya sabéis: <em>Algunas librerías útiles</em>).
<h3>Compilación e instalación</h3>
Como siempre, desde nuestro directorio de repositorios:
<div class="console">
{% highlight console %}
$ git clone https://github.com/cebash/SDL_PSL1GHT.git
$ cd SDL_PSL1GHT
$ ./script.sh
$ make
$ make install
{% endhighlight %}
</div>
Y si no tuvimos ningún error tendremos <em>libSDL</em> listo para usar en nuestras aplicaciones <em>homebrew</em>.
Y ya está... creo que con esto podremos empezar a hacer nuestras aplicaciones caseras. De todas formas existen más librerías y utilidades por ahí que según vaya probando iré añadiendo en la receta, ok?
<h2>Para terminar: <tt>Makefile</tt></h2>
Os pongo un <tt>Makefile</tt> típico para que os cree los <em>targets</em> automáticos para generar los archivos <em>PKG</em> que contienen nuestras aplicaciones y que podremos instalar en la consola para ejecutar/distribuír nuestras creaciones.
<pre>.SUFFIXES:
ifeq ($(strip $(PSL1GHT)),)
$(error "PSL1GHT must be set in the environment.")
endif
include $(PSL1GHT)/Makefile.base
TARGET := $(notdir $(CURDIR))
BUILD := build
SOURCE := source
INCLUDE := include
DATA := data
LIBS := -lzip -lz -lgcm_sys -lreality -lsysutil -lio -ljpgdec -lnet -lsysmodule
TITLE := Hello World - PSL1GHT
APPID := TEST00003
CONTENTID := UP0001-$(APPID)_00-0000000000000000
CFLAGS += -g -O2 -Wall --std=gnu99
CXXFLAGS += -g -O2 -Wall
ifneq ($(BUILD),$(notdir $(CURDIR)))
export OUTPUT := $(CURDIR)/$(TARGET)
export VPATH := $(foreach dir,$(SOURCE),$(CURDIR)/$(dir)) \
$(foreach dir,$(DATA),$(CURDIR)/$(dir))
export BUILDDIR := $(CURDIR)/$(BUILD)
export DEPSDIR := $(BUILDDIR)
CFILES := $(foreach dir,$(SOURCE),$(notdir $(wildcard $(dir)/*.c)))
CXXFILES := $(foreach dir,$(SOURCE),$(notdir $(wildcard $(dir)/*.cpp)))
SFILES := $(foreach dir,$(SOURCE),$(notdir $(wildcard $(dir)/*.S)))
BINFILES := $(foreach dir,$(DATA),$(notdir $(wildcard $(dir)/*.bin)))
export OFILES := $(CFILES:.c=.o) \
$(CXXFILES:.cpp=.o) \
$(SFILES:.S=.o) \
$(BINFILES:.bin=.bin.o)
export BINFILES := $(BINFILES:.bin=.bin.h)
export INCLUDES := $(foreach dir,$(INCLUDE),-I$(CURDIR)/$(dir)) \
-I$(CURDIR)/$(BUILD)
.PHONY: $(BUILD) clean pkg run
$(BUILD):
@[ -d $@ ] || mkdir -p $@
@make --no-print-directory -C $(BUILD) -f $(CURDIR)/Makefile
clean:
@echo Clean...
@rm -rf $(BUILD) $(OUTPUT).elf $(OUTPUT).self $(OUTPUT).a $(OUTPUT).pkg
pkg: $(BUILD)
@echo Creating PKG...
@mkdir -p $(BUILD)/pkg
@mkdir -p $(BUILD)/pkg/USRDIR
@cp $(ICON0) $(BUILD)/pkg/
@$(FSELF) -n $(BUILD)/$(TARGET).elf $(BUILD)/pkg/USRDIR/EBOOT.BIN
@$(SFO) --title "$(TITLE)" --appid "$(APPID)" -f $(SFOXML) $(BUILD)/pkg/PARAM.SFO
@$(PKG) --contentid $(CONTENTID) $(BUILD)/pkg/ $(OUTPUT).pkg
run: $(BUILD)
@$(PS3LOADAPP) $(OUTPUT).self
else
DEPENDS := $(OFILES:.o=.d)
$(OUTPUT).self: $(OUTPUT).elf
$(OUTPUT).elf: $(OFILES)
$(OFILES): $(BINFILES)
-include $(DEPENDS)
endif</pre>
Está sacado del ejemplo <em>Hola mundo</em> del PSL1GHT.
<h2> Ps3sdk Builder</h2>
Bueno, aquí podréis encontrar un proyecto propio en fase ultrabeta quick & dity pero que instala (probado) el <em>toolchain</em>, el <em>psl1ght</em>, las <em>ps3libraries</em> y la <em>sdl_psl1ght</em> configurando las variables de entorno y todo ese rollo. <b>¡Cuidado porque no comprueba que tengáis los paquetes necesarios comentados en la receta!</b>.
<a href="http://github.com/int-0/Ps3sdk-Builder">Ps3sdk-Builder at github.com</a>
Disfruten! :D
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-08/configurar-el-superservidor-inetd.html
---
<file_sep>---
migrated: node/963
layout: post
title: 'Truco tonto: engañar a wget'
created: 1217975009
author: int-0
comments: true
category: recipe
tags:
- networking
---
<blockquote>Hola majetes! quizá alguna vez hayáis intentado descargar una página completa con <em>wget -r</em> y sin ningún motivo aparente se ha negado a llevar a cabo tal tarea... quizás podáis con este <em>cutre truco</em></blockquote>
<!--break-->
<h2>Quien necesita este truco</h2>
Veamos... suponed que queréis descargar todo un sitio <b>completo</b> con <em>wget</em>. El sitio detecta que intentáis conectar con un navegador <em>extraño</em> y os deniega las páginas. Cambiáis el <em>user agent</em> del <em>wget</em> con la opción adecuada... ¡y tampoco funciona!... pero esta vez habéis obtenido un fichero más: <b>robot.txt</b>. Este es un fichero que envían los servidores a los <em>robots</em> webs (<em>spiders</em>) con información sobre qué pueden y qué no pueden descargar (por ejemplo para evitar que un <em>spider</em> se quede mandando peticiones <em>php</em> sin fin).
Dado que este <tt>robots.txt</tt> se supone que es para hacerle caso, <em>wget</em> se compromete a usarlo y no ofrece ninguna opción para ignorarlo... (en la página <em>man</em> te vienen a decir que es compromiso de los desarrolladores) así que si la página que quieres descargar manda un <tt>robots.txt</tt> como el siguiente:
<pre>User-Agent: *
Disallow: /</pre>
Pues ya la hemos liado porque nuestro <em>wget</em> va a negarse a descargar nada...
<h2>El cutre truco</h2>
Vale... supongamos que queremos descargar completamente el dominio <em><a href="http://www.listillos.com">www.listillos.com</a></em> y al intentarlo hemos obtenido un directorio <tt>\<a href="http://www.listillos.com">www.listillos.com</a></tt> dentro del cual tenemos el temido archivo <tt>robots.txt</tt> que comentábamos anteriormente. Pues bien, si al <em>wget</em> le damos la opción <b>-c</b> (<em>continue</em>), cuando intente descargar <tt>robots.txt</tt> no lo hará porque ya se descargó uno (y será el que use), así pues sólo tendremos que modificar el archivo por algo parecido a:
<pre>User-Agent: *
Allow: *
Disallow: /nada_de_nada_hombre_que_las_cosas_son_pa_compartirlas</pre>
Es importante la última línea, porque si sólo cambiamos el <em>Disallow</em> por el <em>Allow</em> nuestro <tt>robots.txt</tt> ocupará menos que el que hay en el servidor y entonces <em>wget</em> lo volverá a descargar. Con la última línea inútil final nos aseguramos que <em>wget</em> no se descargue el fichero real... entonces... ¿cuál usa? pues el nuestro :P.
Como veis el truco es tan tonto que casi me da vergüenza ponerlo, pero quizá ayude a alguien... :). De todas formas los autores de <em>wget</em> podrían corregir esto de una forma bastante sencilla: descargando <tt>robots.txt</tt> y procesándolo directamente en memoria, sin necesidad de grabarlo en disco...
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-30/instalar-gnesis-en-un-usb-desde-windows-con-perdn.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-06-01/gnu-linux-como-espantapjaros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-02-05/programacin-de-usb-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-11-06/implementacin-de-cifrado-rc4-en-awk.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-13/configuracin-manual-de-una-conexin-de-red-en-gnu-linux-con-net-tools.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-08/tes-emacs-server.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-15/nokia-6300-como-mdem-para-debian-con-simyo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-24/entorno-de-desarrollo-para-el-chumby.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-11-30/gnu-emacs-reemplazar-texto-en-mltiples-ficheros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-12-25/manipulacin-de-ficheros-postscript.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2006-05-29/el-reto-de-la-semana-3-edicin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-27/pygtk-y-glade-guis-instantneas.html
---
<file_sep><div class="sidebar span3">
{% include custom/logo %}
{% include sidebar/for_posts %}
</div>
<div class="node span9">
<div class="page-header">
<h1>{{ page.title }} {% if page.tagline %}<small>{{page.tagline}}</small>{% endif %}</h1>
</div>
<div class="row-fluid post-full">
<div class="span12">
<div class="content">
{{ content }}
</div>
{% for link_hash in page.links %}
{% for link in link_hash %}
{{ link[0] }}
{% endfor %}
{% endfor %}
<hr/>
{% include custom/post_embedded_paginator %}
<hr/>
{% include JB/comments %}
</div>
</div>
</div>
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2010-11-22/curso-de-introduccin-a-gnu-linux-en-la-esi-de-ciudad-real.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-26/hacer-paquetes-deb-a-partir-de-rpm-y-tar-gz.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-10-24/el-colmo-del-colmo-especulando-con-el-contenido-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-18/configurar-eclipse.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-01/proponiendo-actividades.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-10-25/zeroc-icestorm-gestionando-eventos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-02-08/configurar-apt-para-que-no-descargue-traducciones.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-05-05/comparaciones-odiosas-git-contra-el-mundo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2010-12-28/tomates-piratas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-08-21/utilizar-htc-magic-g2-como-mdem-en-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-12-29/solucin-al-problema-con-los-overlays-en-monitores-secundarios.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2011-05-09/liberado-el-cdigo-de-adventure-game-studio-ags.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-02/error-de-pmount-en-ubuntu-5-10.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-04-25/nintendo-wii-y-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-08-26/administrar-scripts-de-arranque.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-09-16/recuperar-iconos-perdidos-en-gnome.html
---
<file_sep>---
migrated: node/93
layout: post
title: Animo chicos
created: 1131707974
author: sqater07
comments: true
category: tale
---
Animo chicos con la party.
Soy un antiguo alumno, de informática de gestion.
Mola ver como os moveis.
Animo de <a href="http://www.lacoctelera.com/quijote/">quijote</a>
Si yo pudiera, instalaria y probaría molinux a fondo.
Creo que en inalambricas habría que meter algun .deb mas para que lo configurara antes.
Suerte.
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-11/crear-un-paquete-debian-binario-sencillito.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-21/ice-en-la-fonera-ice-e-en-openwrt.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-07-17/la-realidad-amazon-supera-a-la-ficcin-derecho-a-leer.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-11-28/tontuna-sobre-gates.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-11/micro-servidor-dns-con-scapy.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-10/edicin-de-imgenes-en-consola-con-imagemagick.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-03-19/m-utiliza-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-21/tabletas-wacom-en-x-org.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-23/sacando-el-jugo-a-las-tarjetas-atheros-madwifi.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-06-27/metaclase-para-invocacin-automtica-del-constructor-de-la-superclase.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-20/tes-emacs-bonito.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-10-10/editar-men-de-gnome-y-solucin-al-men-debian-desaparecido.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-08-04/metareceta-creacin-de-paquetes-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-09/servidor-tftp-con-inetd-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-10-01/el-inesperado-valor-docente-del-absentismo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-20/increble-pero-cierto.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-01-21/wuming-la-guerrilla-cultural.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-06/respuesta-de-m-a-los-porttiles-a-100-para-el-tercer-munndo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-07-26/intro-psp-pseudo-demoscene-a-k-a-respuesta-a-nuestro-int-0.html
---
<file_sep>---
migrated: node/655
layout: post
title: Streaming RTP usando GStreamer
created: 1177438611
author: diego
comments: true
category: recipe
tags:
- gstreamer
---
<blockquote class="head">
Veremos como podemos hacer streaming utilizando el protocolo RTP (Real Time Protocol), usando tuberías GStreamer. Transmitiremos tanto audio como vídeo a través de las tuberías.
</blockquote>
<h2>Introducción</h2>
Necesitas tener instalados los paquetes gstreamer0.10-tools y los plugins necesarios: gstreamer0.10-plugins-base, gstreamer0.10-farsight, gstreamer0.10-plugins-good, gstreamer0.10-plugins-ugly, gstreamer0.10-alsa, gstreamer0.10-ffmpeg y gstreamer0.10-x.
El soporte de RTP en GStreamer actualmente es parcial, aún así, ya es posible ejecutar algunas tuberías que nos transmiten vídeo y audio utilizando RTP encima de UDP.
RTP se compone de dos protocolos: RTP y RTCP. RTP es el encargado de transmitir los datos del flujo multimedia, y RTCP se encarga del control de flujo. En los elementos udpsrc y udpsink de la tubería es necesario especificar un puerto par, que será usado por RTP. RTCP usará justo el siguiente, un puerto impar. También puede especificarse un host opcional para especificar la interfaz por donde mandaremos los paquetes.
<h2>Vídeo MPEG4</h2>
Ejecuta la siguiente tubería (emisor) con la opción verbose:
<pre class="console">$ gst-launch-0.10 -v filesrc location=~/streams/sincity.mp4 ! ffdemux_mov_mp4_m4a_3gp_3g2_mj2 ! rtpmp4vpay ! udpsink port=5000
Setting pipeline to PAUSED ...
Pipeline is PREROLLING ...
/pipeline0/rtpmp4vpay0.src: caps = application/x-rtp, media=(string)video, payload=(int)96, clock-rate=(int)90000, encoding-name=(string)MP4V-ES, ssrc=(guint)2642861434, clock-base=(guint)3234275225, seqnum-base=(guint)52397, profile-level-id=(string)0, config=(string)000001200086c5d4c307d314043c1463000001b25876694430303334
/pipeline0/rtpmp4vpay0.sink: caps = video/mpeg, width=(int)640, height=(int)480, framerate=(fraction)30000/1001, systemstream=(boolean)false, mpegversion=(int)4, codec_data=(buffer)000001200086c5d4c307d314043c1463000001b25876694430303334
/pipeline0/rtpmp4vpay0.src: caps = application/x-rtp, media=(string)video, payload=(int)96, clock-rate=(int)90000, encoding-name=(string)MP4V-ES, ssrc=(guint)2642861434, clock-base=(guint)3234275225, seqnum-base=(guint)52397, profile-level-id=(string)3, config=(string)000001b003000001b50900000100000001200086c5d4c307d314043c1463000001b25876694430303334
/pipeline0/udpsink0.sink: caps = application/x-rtp, media=(string)video, payload=(int)96, clock-rate=(int)90000, encoding-name=(string)MP4V-ES, ssrc=(guint)2642861434, clock-base=(guint)3234275225, seqnum-base=(guint)52397, profile-level-id=(string)3, config=(string)000001b003000001b50900000100000001200086c5d4c307d314043c1463000001b25876694430303334
Pipeline is PREROLLED ...
Setting pipeline to PLAYING ...
New clock: GstSystemClock</pre>
Presta atención a las Capabilities (caps) de udpsink, es el tipo de datos que se transmitirán por la red. Copia los caps del elemento udpsink. Los caps cambian cada vez que se ejecuta el emisor.
A continuación, lanza el receptor, especificando los caps en el udpsrc
<pre class="console">$ gst-launch-0.10 udpsrc port=5000 caps ="application/x-rtp, media=(string)video, payload=(int)96, clock-rate=(int)90000, encoding-name=(string)MP4V-ES, ssrc=(guint)2642861434, clock-base=(guint)3234275225, seqnum-base=(guint)52397, profile-level-id=(string)3, config=(string)000001b003000001b50900000100000001200086c5d4c307d314043c1463000001b25876694430303334" ! rtpmp4vdepay ! ffdec_mpeg4 ! xvimagesink sync=false</pre>
En el receptor, la opción "sync=false" en los sink finales (xvimagesink, alsasink, etc) es importante, puesto que aún no hay implementado un gestor de la sesión RTP que controle la sincronización en la tubería.
Puedes especificar en el udpsrc una URI del tipo "udp://host:puerto" de donde debe coger los datos.
<h2>Audio Vorbis</h2>
La forma de proceder es la misma. Primero ejecuta el emisor:
<pre class="console">$ gst-launch-0.10 -v audiotestsrc ! audioconvert ! vorbisenc ! rtpvorbispay ! udpsink port=5000</pre>
Ahora debes copiar los caps en la tubería del receptor, y ejecutar el receptor.
<pre class="console">$ gst-launch-0.10 udpsrc port=5000 caps="CAPS_DEL_EMISOR" ! rtpvorbisdepay ! vorbisdec ! audioconvert ! alsasink sync=false</pre>
Todo esto teóricamente se podría utilizar para transmitir en directo el escritorio de nuestro equipo a numerosos equipos mediante multicast, útil por ejemplo, para que los alumnos vean la pantalla del profesor en su ordenador.
También hay soporte para transmitir vídeo Theora y vídeo H.263, pero no está suficientemente probado.
<h2>Referencias</h2>
<a href="http://gstreamer.freedesktop.org/documentation/rtp.html">GStreamer: RTP support</a>
<a href="http://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-0.10.5.tar.gz">Código fuente de los plugins Good, donde se encuentra RTP, versión 0.10.5</a>
<a href="http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-good-plugins/html/gst-plugins-good-plugins-udpsrc.html">Elemento udpsrc</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-03/configuracin-bsica-para-jed.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-04-24/el-software-es-un-producto.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-20/crear-y-editar-huellas-para-componentes-con-pcb.html
---
<file_sep>---
migrated: node/262
layout: post
title: Objetivos de la web
created: 1142614101
author: admin
comments: true
category: new
---
<blockquote>Uno de los objetivos de CRySoL es la defensa de la libertad del software y otras manifestaciones culturales. Cualquier otra demostracción de defensa de libertades está literalmente fuera de lugar, deberá hacerse en otros foros dedicados a tales asuntos. Por favor: aquél que quiera escribir, que tenga en mente cuál es el <a href="/?q=quees">objetivo de esta web</a>.
</blockquote>
Este comentario no va por la mayor parte de vosotros, ya que existimos desde hace menos de un año y no ha habido ningún altercado digno de mención. Sin embargo, hoy he tenido que "banear" dos artículos y un usuario por su contenido. No importa si me encuentro a favor; no importa si me encuentro en contra. Lo que importa es que dicho contenido no tiene cabida en esta web.
Por favor: Si alguien ve contenido fuera tono/tema, que avise a los administradores para su inmediata eliminación. Recordad que estamos utilizando los servicios de la Universidad, y necesitamos sus favores. No queremos que nos cierren el sitio por un único alborotador.
Muchas gracias.
-- MagMax, uno de los admin.
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-09-09/una-curiosidad-que-me-pas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-03-27/montar-y-desmontar-una-psp.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-08-12/sincronizacin-del-calendario-de-gnome-con-google-calendar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-08-26/hasta-nunca-ati.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-08-29/asignatura-sobre-software-libre-en-la-universidad-de-sevilla.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2012-10-03/distribuyendo-programas-python-en-el-pypi-python-package-index.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-04-03/neutralidad-tecnolgica.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-04-21/ayuda-audio-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-05-20/cmo-realizar-bsquedas-con-el-emule-descargar-desde-otro-ordenador.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-09-05/ghdl-vhpidirect-o-como-crear-compilar-y-ejecutar-un-programa-vhdl-con-llamadas-a-cdigo-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-08-24/ows-osd-workspace-switcher.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-09-03/bilbliografa-sobre-mtodos-giles.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-31/ati-radeon-9600-en-debian-gnu-linux-con-el-driver-privativo-fglrx.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-28/synergy-una-consola-para-dominarlas-todas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2009-04-15/fase-final-concurso-univ-software-libre-de-clm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-13/acer.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-02-04/ejecutar-acciones-disparadas-por-eventos-en-repositorios-mercurial-hooks.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-11-15/acerca-de-novell-y-microsoft.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2005-12-06/berenjenas-rellenas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2008-03-25/se-buscan-colaboradores-para-talleres-en-fase-final-del-i-concurso-sl-de-c-lm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-03-06/gnu-emacs-enviar-emails.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-05-13/unas-reflexiones-sobre-sintactic-sugar-y-el-goto.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-13/cambia-el-aspecto-de-google.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-05/instalacin-de-tinyos-1-x-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2012-01-29/espaguetti-con-brcoli.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-12-14/planificacin-manual-de-cpus-con-taskset.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/instalar-el-driver-privativo-de-nvidia-en-debian-gnu-linux.html
---
<file_sep>---
migrated: node/1498
layout: post
title: Ayuda
created: 1303686431
author: Marot77
comments: true
---
Buenas crysoleros,antes de empezar quiero dar la enhorabuena a todos los componentes,por su maravillosa organización el día del software libre cuando asistió R.stallman,no pude asistir anteriores por falta de transporte y estudios,pero aquí estaba la ansiosa y esperada foto <a href="http://arco.esi.uclm.es/~cleto.martin/fotos_jornadas/IMG_4103_900x677.html">http://arco.esi.uclm.es/~cleto.martin/fotos_jornadas/IMG_4103_900x677.html</a> que muchas gracias por publicarla bueno,al grano;
<!--break-->
Tengo un router livebox V1.1 ST que encontré en el punto limpio de mi pueblo(Suelo ir para componerme servidores a pura terminal y aprender a comandos) y lo estuve probando y tal y parece ser que funciona perfectamente,tiene una entrada USB,he pensado si se podía poner en esa entrada usb un modem usb y que el router de la señal inalámbrica del modem usb,este router tiene una entrada USB2 el cual tengo un cable,el router va por linux claramente,asique he pensado acceder desde SSH o telnet(no se si desde Gnu/Linux se pueda) y configurarlo de alguna manera...etc ¿Alguna idea o algo por el estilo? Un saludo.
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-02-24/patentes-esta-vez-paga-microoft-1-5-millones-de-millones-de-dlares.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-03-06/nautilus-con-samba.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-12-04/un-googol-para-dominarlos-a-todos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-12-30/python-3-0-a-k-a-python-3000-para-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-07-14/jccm-quiere-windows-7.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/abrir-una-sesin-x-en-otra-mquina.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-10-30/frases-clebres-perdidas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-27/imprmelo-todo-con-gnu-a2ps.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2005-12-15/prohiben-el-software-libre-en-francia.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-02-06/ya-est-disponible-en-la-plataforma-de-formacin-del-ceslcam-el-curso-molinux-3-2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2007-03-13/i-jornadas-sobre-piratera-en-la-uclm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-06-06/pluin-de-tomcat-en-eclipse.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-18/mantener-un-paquete-debian-con-svn-buildpackage.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-02-09/fallos-en-el-repositorio-de-mesa-a-da-9-de-febrero-de-2007-drm_vblank_secondary.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-11-03/escribir-caracteres-unicode-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-10-29/para-los-que-tengan-amd64.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-05-02/bindings-python-de-libreras-c-con-sip.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-04-17/latex-listados-de-cdigo-cmodos-y-resultones-con-listings.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-04-16/gnu-emacs-pestaas-realmente-tiles-cambiando-con-m-num.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-06/obtener-la-ip-con-scapy.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-09-13/ssh-y-scp-para-acceder-a-equipos-remotos-sin-escribir-la-clave-public-key-authentication.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-09-19/i-concurso-universitario-de-software-libre-de-c-lm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-08-25/grandes-inventos-de-microsoft-el-ratn-y-la-interfaz-grfica.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-05-23/configurar-las-impresoras-de-la-esi-en-tu-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2011-05-27/la-arquitectura-de-aplicaciones-software.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-23/manipulacin-de-documentos-pdf-desde-python-con-pypdf.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2011-01-24/actualizado-el-manual-de-gnu-emacs-a-22-2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-06/problemas-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-10/router-chupachups-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-07-21/tutorial-para-montar-un-hub-de-direct-connect.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-10-20/arco-devel-speedbar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-09-26/logging-con-colorcitos-en-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-03-03/por-lo-menos-hay-alguno.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-16/compartir-ficheros-con-shfs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-14/descargar-las-iso-de-los-dvd-de-debian-con-jigdo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-05/save-and-restore-a-mysql-data-base.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2011-02-20/campaa-para-la-candidatura-de-la-comunidad-del-software-libre-a-los-premios-prncipe-de-asturias.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-02-01/eth2_rename-el-problema-de-udev.html
---
<file_sep>---
migrated: node/180
layout: post
title: Instalación de NINO en Debian GNU/Linux
created: 1137003786
author: KO_mOd_O
comments: true
category: recipe
tags:
- security
- networking
- Debian
---
<blockquote class="head">
Esta receta explica la instalación y configuración inicial de NINO (NINO is not Openview) en Debian GNU/Linux. Concretamente se ha realizado la instalación en Debian Sid con núcleo Linux 2.6.14.
NINO es una solución para la gestión de red vía interfaz web que emplea SNMP, WMI, y SNMP para monitorizar equipos y dispositivos de red.
Para más información visitar la página oficial de <a href="http://nino.sourceforge.net/nino/index.html">NINO</a>.
</blockquote>
<!--break-->
<h1> 0.- Requerimientos. </h1>
<ul>
<li>Perl 5.8 o superior.</li>
<li>MySQL (cliente y servidor).</li>
<li>Net-SNMP.</li>
<li>Apache y módulos Perl para Apache.</li>
<li>Módulos Perl DBD/DBI, Time Hires, y Net-SNMP.</li>
</ul>
<h1> 1.- Instalación. </h1>
Usando un gestor de paquetes como apt:
<div class="console">
{% highlight console %}
# apt-get update
# apt-get install perl mysql-client mysql-server
# apt-get install snmp snmpd
# apt-get install apache2 apache-perl libapache2-mod-perl2
# apt-get install libdbd-mysql-perl
# apt-get install libsnmp-perl
{% endhighlight %}
</div>
<h1> 2.- Configurar la base de datos. </h1>
Para ello debemos de tener creado un usuario. En este ejemplo y como superusuario se crea un usuario con todos los privilegios para acceder de forma local y remota:
<div class="console">
{% highlight console %}
mysql> GRANT ALL PRIVILEGES ON *.* TO 'usuario'@'localhost'
-> IDENTIFIED BY 'password' WITH GRANT OPTION;
mysql> GRANT ALL PRIVILEGES ON *.* TO 'usuario'@'%
-> IDENTIFIED BY 'password' WITH GRANT OPTION;
{% endhighlight %}
</div>
<h1> 3.- Instalar NINO. </h1>
Podemos obtener el código fuente <a href="http://prdownloads.sourceforge.net/nino/nino-4.1.8.tar.bz2?download"> aquí
</a>.
<div class="console">
{% highlight console %}
# tar xjvf nino-4.1.8.tar.bz2
# mv nino-4.1.8/var/nino /var
# cd /var/nino/scripts
# ./install
{% endhighlight %}
</div>
Antes de instalar NINO es necesario modificar la línea 27 del script de instalación eliminando la opción -v, de manera que quede de la siguiente forma:
<div class="console">
{% highlight console %}
HTTP_VERSION=` ${HTTPD} | grep "version" `
{% endhighlight %}
</div>
<h1> 4.- Volver a arrancar servicios. </h1>
<div class="console">
{% highlight console %}
# /etc/init.d/mysql restart
# /etc/init.d/apache2 restart
{% endhighlight %}
</div>
<h1> 5.- Configuración inicial de NINO. </h1>
En un navegador web:
<div class="console">
{% highlight console %}
http://localhost/nino/install.html
{% endhighlight %}
</div>
El usuario y la clave por defecto son admin, admin.
Llegados a este punto deberemos indicar los parámetros básicos de configuración, como el e-mail en el cual se notificarán los eventos, dónde se encuentra la base de datos a usar (por ejemplo, en localhost), el usuario de la base de datos, y la clave de la base de datos.
Tras pinchar en Submit nos aparecerá una ventana confirmando que las tablas han sido creadas. Por último pinchamos en el botón Next.
<h1> 6.- Arrancar los servicios de NINO. </h1>
<div class="console">
{% highlight console %}
# /etc/init.d/nino start
{% endhighlight %}
</div>
<h1> 7.- Disfrutar de NINO. </h1>
En un navegador web:
<div class="console">
{% highlight console %}
http://localhost/nino/main.html
{% endhighlight %}
</div>
<file_sep>---
migrated: node/214
layout: post
title: WiFi RT2500 de Ralink en GNU/Linux
created: 1138886434
author: javieralso
comments: true
category: recipe
---
<blockquote>Cómo compilar e instalar los drivers para las tarjetas WiFi RT2500 de Ralink en GNU/linux
</blockquote>
<h2>Introducción</h2>
Las tarjetas wifi con chipset Ralink RT2500 cada vez son más comunes. Se encuentran integradas en muchos equipos portátiles y como tarjetas PCMCIA para portátiles, por ejemplo. Por desgracia, los núcleos Linux no traen soporte para este chipset. Poe eso hay tienes que añadirlo tú, aunque la tarea es bastante más fácil de lo que puede parecer.
<h2>Lo primero, ¿Tienes una RT2500?</h2>
Bueno, para los menos versados en este tema, la forma de saber qué chipset utiliza tu interfaz wireless (así como cualquier hardware de nuestro equipo) es con el comando <tt>lspci</tt>.
Cuando ejecutes este comando, podrás ver una lista de todos los dispositivos PCI que tienes en tu sistema (si tienes insertada alguna PCMCIA en tu también aparecerá). Ahora solo queda buscar el dispositivo de red que tengas instalado y si ves que su chipset es RT2500 puedes continuar.
<h2>Qué necesitamos</h2>
Necesitas los fuentes del driver. Éstos se pueden obtener de varias maneras:
<ul>
<li>Paquete debian:
<pre class="console">
$apt-get -f install rt2500-source
</pre>
</li>
<li>
Bajarte el <a href="http://sourceforge.net/projects/rt2400">Código Fuente</a> del driver directamente de internet. De esta forma podrás conseguir la última versión disponible.</li>
</ul>
También necesitas los <em>headers</em> del núcleo que estés utilizando. En Debian:
<pre class="console">
# apt-get -f install linux-headers-$(uname -r)
</pre>
Ahora ya puedes continuar con la compilación.
<h2>Compilación e instalación</h2>
En primer lugar deberemos crear un enlace hacia el directorio en el que están nuestros headers que se llame <em>linux</em> para que a la hora de compilar se encuentren todas las librerias necesarias:
<pre class="console">
$ cd /usr/src/
$ ln -s kernel-headers-2.6.8-1-686 linux
</pre>
Otra cosa importante es que debes compilar este driver con la versión 3.4 de gcc, si no, es muy probable que surjan problemas. Si no lo tienes instalado:
<pre class="console">
# apt-get -f install gcc-3.4
</pre>
Ahora ya estás en condiciones de descomprimir los fuentes y compilarlos. Si has descargado los fuentes con apt, éstos se encontrarán en <tt>/usr/src/rt2500.tar.gz</tt>. Descomprímelos y compílamos:
<pre class="console">
$ tar -zxvf rt2500.tar.gz
$ cd modules
$ make CC=gcc-3.4
</pre>
Una vez compilado todo sin problemas, puedes proceder con la instalación:
<pre class="console">
$ make install
</pre>
La instalación habrá creado el fichero <tt>/etc/modprobe.conf</tt>. Dentro de este fichero habrá algo del estilo '<em>alias ra0 rt2500</em>'. Tienes que copiar esa línea dentro del archivo <tt>/etc/modules.conf</tt> y después borrar el archivo <tt>/etc/modprobe.conf</tt>.
Con esto, se supone que el driver ha quedado instalado. Ahora tan solo queda cargar el módulo:
<pre class="console">
# modprobe rt2500
</pre>
Si esto no funciona (que debería), puedes probar esto otro (dentro del directorio donde has compilado el driver):
<pre class="console">
# insmod rt2500.ko
</pre>
y con esto el driver deberia quedar perfectamente instalado y tu dispositivo de red detectarse como <tt>ra0</tt>.
A veces (sobre todo con las versiones más nuevas de Linux) el comando <tt>depmod</tt> parece no funcionar bien, por ello, es conveniente, después de tener el módulo cargado ejecutar:
<pre class="console">
# update-modules
</pre>
Con ello habrás actualizado correctamente la lista de módulos y el correspondiente a tu WiFi se cargará durante el arranque.
<h2>Enlaces</h2>
<ul>
<li>
<a href="http://sourceforge.net/projects/rt2400">RT2400/2500 para GNU/Linux</a>
</li>
<li>
<a href="http://rt2x00.serialmonkey.com/wiki/index.php/Main_Page">Web del proyecto en serialmonkey.com</a>
</li>
<li>
<a href="http://www.esdebian.org/forum/viewtopic.php?forum=7&showtopic=61164">dichosa wireless ralink rt2500</a>
</li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-15/contenedores-y-downcasting-en-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-11-28/el-sistema-de-3-avisos-funciona-en-uk.html
---
<file_sep>---
migrated: node/666
layout: post
title: Fluendo busca gente
created: 1178086479
author: fsancho
comments: true
category: tale
tags:
- gstreamer
---
Esto es solo un apunte para los que puedan estar interesados. <a href="http://www.fluendo.com/">Fluendo</a>, una compañía asentada en Barcelona, que se dedica al desarrollo y consultoría de sistemas multimedia sobre Unix y GNU/Linux y aporta bastante al desarrollo de Gstreamer, está buscando gente. Hoy me he encontrado <a href="http://elisa.fluendo.com/2007/04/24/were-hiring/">la oferta</a> en el blog de <a href="http://elisa.fluendo.com/">Elisa</a>, el media center de Fluendo.
Se requiere en primer lugar conocimientos de Inglés, ya que no es una empresa española si no europea, se trabaja en inglés. El resto lo podéis ver en la propia página.
<!--break-->
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-20/debian-xen-3-0-en-5-minutos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2009-04-28/stallman-devuelve-el-dinero-de-los-pisos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-15/compartir-la-conexin-mediante-un-mdem-adsl-comtrend-ct-351.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-10-07/el-juego-hexglass.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-08-25/gnu-emacs-modo-para-programacin-en-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-04/plugin-opacity-en-xgl-compiz.html
---
<file_sep>---
migrated: node/769
layout: post
title: Molinux en la UCLM
created: 1189075105
author: ottoreuss
comments: true
category: new
tags:
- Molinux
---
Supongo que vosotros ya lo sabréis, pero lo he leído hoy y lo pongo aquí por si alguien no lo sabe. Parece que la Consejería de Industria y Sociedad de la Información impartirá durante el curso 2007/2008 formación avanzada Molinux, el software libre de Castilla-La Mancha, a los alumnos de la Escuela Politécnica de Cuenca y de la Escuela Superior de Informática de Ciudad Real.
<!--break-->
La info entera podéis verla aquí: <a href="http://www.lanzadigital.com/diariolanza/pb/periodico/periodicodetalle.asp?REG=12169&sec=CIUDADREAL">http://www.lanzadigital.com/diariolanza/pb/periodico/periodicodetalle.asp?REG=12169&sec=CIUDADREAL</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-12-11/sincronizacin-remota-en-4-pasos-con-rsync.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-27/servir-un-repo-mercurial-por-http-solo-lectura.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-16/crear-nuevos-elementos-para-gstreamer.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-12-28/richard-stallman-socio-del-betis-broma-del-28-diciembre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-04-13/latex-ayuda-en-lnea-en-gnu-emacs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-11/animo-chicos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-11-04/para-que-dices-que-serva-esto.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-02-16/curiosidades.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-09/utilidades-para-medir-el-ancho-de-banda-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-09-16/comprar-un-porttil.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2006-10-28/lasaa-a-la-boloesa.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-07-07/gnu-emacs-cursor-en-forma-de-lnea-vertical-delgada.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-07-10/ayuda-con-visin-por-computador.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-03-28/pena-de-muerte-para-los-delitos-contra-la-propiedad-intelectual.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-18/empezar-con-zeroc-ice-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-02-27/armonizando.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-01-11/qu-es-eso-de-rest.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-11/escribir-en-chino-en-gnu-linux.html
---
<file_sep>---
migrated: node/1460
layout: post
title: Tele-enseñanza en terminales
created: 1297259678
author: int-0
comments: true
category: recipe
tags:
- shell
---
<blockquote>Seguramente alguna vez habréis tenido que explicar a alguien (via pidgin o teléfono) cómo hacer tal o pascual en su terminal, pegando los comandillos para que los ejecute y tal... bueno pues, como siempre, hay una forma más fácil y divertida de hacerlo.</blockquote>
<!--break-->
<h2>Qué necesitamos</h2>
Probablemente ya lo tengáis instalado y si no pues lo instaláis:
<div class="console">
{% highlight console %}
# aptitude install bsdutils
{% endhighlight %}
</div>
Este paquetillo tiene algunas herramientas interesantes, entre ellas la que usaremos: <em>script</em>. Esta herramienta está pensada para la teleenseñanza (o aprendizaje) aunque son muchas las aplicaciones que podría tener (como podréis comprobar).
<h2>Al grano</h2>
Supongamos que nuestro alumno <em>tobias</em> necesita de un empujoncito. Nos conectaremos (via <em>ssh</em> por ejemplo) a la máquina donde esté trabajando y averiguaremos cuantos terminales tiene (para saber con cuál trabajaremos):
<div class="console">
{% highlight console %}
$ who
tobias tty7 2011-02-09 09:10 (:0)
tobias pts/0 2011-02-09 09:11 (:0.0)
tobias pts/1 2011-02-09 10:01 (:0.0)
tobias pts/2 2011-02-09 10:49 (:0.0)
{% endhighlight %}
</div>
Supongamos que queremos "conectarnos" con su terminal 0 (debemos ser el propio usuario o el señor don <em>root</em>):
<div class="console">
{% highlight console %}
# script -f /dev/pts/0
{% endhighlight %}
</div>
Al almuno <em>tobias</em> le aparecerá lo siguiente por pantalla:
<div class="console">
{% highlight console %}
$ Script iniciado (mié 09 feb 2011 12:38:30 CET)
$
{% endhighlight %}
</div>
Y nosotros entraremos en una nueva <em>shell</em>... <b>su</b> <em>shell</em>... ;) . Ahora ya podemos explicarle en vivo qué comandos tiene que lanzar, etc. Cuando hayamos terminado:
<div class="console">
{% highlight console %}
$ exit
Script terminado; el fichero es /dev/pts/0
{% endhighlight %}
</div>
Y habremos terminado nuestras enseñanzas... :)
<h2>¿Más cosillas?</h2>
Pues claro, <em>script</em> permite "grabar" las sesiones y reproducirlas después... aunque todo eso os lo dejo a vosotros :P (sólo tenéis que echar un vistazo a la página del manual)
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/instalar-debian-sarge-en-un-porttil.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-17/programacin-de-tareas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-03-10/que-te-cambies-ya-coiiio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-02-22/configurar-altavoces-auriculares-bluetooth-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-08/averiguar-la-ip-de-un-chisme-que-la-consigue-por-dhcp.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-11/video-streaming-en-flash-con-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-04-27/aadir-soporte-multimedia-en-openoffice-org.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-03-01/firemacs-el-iceweasel-intuitivo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-09-13/downloading-films-is-stealing.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-06-26/configuracin-manual-de-una-conexin-de-red-en-debian-con-iproute2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-01/java-ice-netbeans-eclipse-y-otras-malas-hierbas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-04/redcampus-desde-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2011-06-18/programadores-de-poca-fe.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /humor/2008-08-20/primeras-reacciones-al-banner-anti-software-privativo-de-crysol.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-04-12/microsoft-paga-por-publicitarse-bajo-la-palabra-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-24/streaming-rtp-usando-gstreamer.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-08-11/concurso-universitario-de-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-09-02/microsoft-patenta-el-apagado-del-sistema-operativo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-27/tu-gnu-linux-siempre-en-hora-y-con-tiempo-contnuo-con-ntp.html
---
<file_sep>---
migrated: node/127
layout: post
title: En Nautilus también tenemos "Abrir terminal aquí"!
created: 1133168714
author: int-0
comments: true
category: recipe
---
<blockquote>Esta receta está dedicada a todas aquellas personas que van por ahí diciendo que su KDE o su file-roller son mejores que Nautilus por el simple hecho de que no tiene la opción "Abrir terminal aquí"... bueno pues nada, buscad nuevos argumentos porque ese hace tiempo que no os vale... Debéis saber que a Nautilus se le pueden añadir toda la funcionalidad que queráis de una forma muy sencilla: Nautilus-scripts (o G-scripts).</blockquote>
<h1>Nautilus-scripting</h1>
Nautilus permite al usuario crear sus propios scripts en cualquier lenguaje, ya sea python, perl, bash-script, etc. Para ello, lo único que debemos hacer es copiarlos en el siguiente directorio:
<div class="console">
{% highlight console %}
~/.gnome2/nautilus-scripts
{% endhighlight %}
</div>
La primera vez que ponemos un script debemos reiniciar nautilus para que nos aparezca la nueva entrada en el menú contextual, después se actualizará dinámicamente con los scripts y directorios que creemos en dicha carpeta.
<h1>Abrir terminal aquí</h1>
Además, nautilus crea unas variables de entorno para facilitar más aún escribir scripts útiles, una de ellas nos permite obtener la lista de ficheros seleccionados, si seleccionamos una carpeta y abrimos una shell ahí tendremos lo que queremos... en bash-script esto es extremadamente fácil: creamos el archivo "terminal aqui" con el siguiente contenido:
<pre>
#!/bin/bash
cd $NAUTILUS_SCRIPT_SELECTED_FILE_PATHS
gnome-terminal
</pre>
Cambiamos los permisos y lo movemos a la carpeta de scripts de nautilus:
<div class="console">
{% highlight console %}
$ chmod +x "terminal aqui"
$ mv "terminal aqui" ~/.gnome2/nautilus-scripts/
{% endhighlight %}
</div>
Reiniciamos nautilus (esto no parece muy elegante...):
<div class="console">
{% highlight console %}
$ killall nautilus
{% endhighlight %}
</div>
Si ahora pinchamos con el botón derecho sobre una carpeta nos aparecerá una nueva opción en el menú contextual y ahí la opción "terminal aqui". Podeis crear subcarpetas para hacer subsecciones, etc. Hay muchos <a href="http://g-scripts.sourceforge.net/">scripts útiles</a> (aunque algunos ya están un poco obsoletillos...)
<h1>Referencias</h1>
<ul>
<li><a href="http://g-scripts.sourceforge.net/">http://g-scripts.sourceforge.net/</a></li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-02-23/empezando-con-systemc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-03-15/dot-net-club-dont-set-club.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-02-18/dependency-injection-and-mocking-classes-using-c-and-google-mock-library.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2005-10-26/crema-de-calabacn.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-04-14/abierto-plazo-de-inscripcin-fase-final-concurso-software-libre-de-clm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-26/zeroc-icee-para-el-chumby.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-03/en-mi-tintero-personal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-26/tes-eliminar-ficheros-basura-generados-por-emacs.html
---
<file_sep>---
migrated: node/574
layout: post
title: Tabletas Wacom en X.Org
created: 1172063119
author: int-0
comments: true
category: recipe
tags:
- drivers
---
<blockquote>Hola! Dado que en GNU tenemos estupendísimos programas como <em>Inkscape</em> o <em>The Gimp</em> es cada vez más común el querer disponer de una tableta digitalizadora chula para poder trabajar con gráficos de una forma más cómoda y fácil. Si habéis pensado en comprar alguna y tenéis miedo del soporte que linux os pueda brindar, os recomiendo las <em>Wacom</em>, tienen un soporte estupendo (parece ser que wacom colabora con el desarrollador de los drivers). </blockquote>
<h2>Instalando software necesario</h2>
Los drivers soportan tanto linux 2.4.x como 2.6.x, nosotros explicaremos como instalarlo y configurarlo en linux de la rama 2.6. En Debian (como no podía ser de otra forma):
<div class="console">
{% highlight console %}
$ m-a ai wacom
{% endhighlight %}
</div>
Y ya tenemos los módulos para el linux, ahora debemos instalar el driver para X.Org:
<div class="console">
{% highlight console %}
$ apt-get install xserver-xorg-input-wacom
{% endhighlight %}
</div>
También puede ser útil contar con:
<div class="console">
{% highlight console %}
$ apt-get install wacom-tools
{% endhighlight %}
</div>
Bien, ya tenemos todo listo para configurarlo, vamos a ello...
<h2>Configurando X.Org</h2>
<blockquote>Si todavía queda alguien que use Xfree86, esto es completamente compatible... ;)</blockquote>
Las wacom disponen de varios dispositivos físicos apuntadores: el lápiz, un ratón, etc. Son capaces de detectar qué dispositivo estamos usando, es decir: si estamos utilizando el lápiz o el ratón (en mi caso). En el archivo de configuración de X.Org configuraremos los distintos tipos que soporta el driver y podremos tener configuraciones específicas para cada uno, así por ejemplo el puntero se comportará de una forma si usamos lápiz o de otra si estamos con el ratón (esto es genial!). Pues aquí va mi código para el archivo <tt>/etc/X11/xorg.conf</tt>:
<pre>## Wacom Volito
#
Section "InputDevice"
Driver "wacom"
Identifier "stylus"
Option "Device" "/dev/input/wacom"
Option "Type" "stylus"
Option "USB" "on"
Option "Threshold" "15"
#Option "Mode" "Relative"
Option "Vendor" "WACOM"
EndSection
Section "InputDevice"
Driver "wacom"
Identifier "eraser"
Option "Device" "/dev/input/wacom"
Option "Type" "eraser"
Option "USB" "on"
Option "Threshold" "15"
#Option "Mode" "Relative"
Option "Vendor" "WACOM"
EndSection
Section "InputDevice"
Driver "wacom"
Identifier "cursor"
Option "Device" "/dev/input/wacom"
Option "Type" "cursor"
Option "USB" "on"
Option "Threshold" "15"
#Option "Mode" "Relative"
Option "Vendor" "WACOM"
EndSection</pre>
...y añadiremos en la <em>Section "ServerLayout"</em> del mismo fichero:
<pre> InputDevice "stylus" "SendCoreEvents"
InputDevice "eraser" "SendCoreEvents"
InputDevice "cursor" "SendCoreEvents"</pre>
En las opciones de los dispositivos se puede controlar TODO, hay muchísimos parámetros (consultad el manual). Pincipalmente tenemos <em>Threshold</em> (sensibilidad) y <em>Mode</em>, que puede ser <em>Relative</em> o <em>Absolute</em> (por defecto). El modo absoluto significa que las esquinas de la tableta son las esquinas de la pantalla, recorrer la tableta es recorrer la pantalla. El modo relativo sirve para trabajar con mayor precisión, pero es más incómodo y necesitaremos usar un ratón también.
<h3>El dispositivo /dev/input/wacom</h3>
Si instaláis en Debian (y supongo que en otras igual) os aparecerá un dispositivo <tt>/dev/input/wacom</tt> que será un enlace simbólico débil a <tt>/dev/input/eventX</tt>, siendo X un número de 1 al que sea. Esto es así porque debe existir una regla para <em>udev</em> tal que así (archivo <tt>/etc/udev/rules.d/10_wacom.rules</tt>):
<pre>KERNEL="event*", SYSFS{idVendor}="056a", NAME="input/%k", SYMLINK="input/wacom%e"</pre>
Con este archivo ahí y ejecutando lo que tenéis en la sección <em>Comportamiento errático del puntero</em> os debería aparecer ese enlace. De todas formas, si no os aparece esta es mi cutre-solución<em>-o-matic-2000</em>:
Mediante <em>wacdump</em> averiguaremos el dispositivo de nuestra wacom:
<div class="console">
{% highlight console %}
$ wacdump /dev/input/event2
{% endhighlight %}
</div>
Si <em>eso</em> se trata de una wacom aparecerá algo como:
<pre>MODEL=Wacom Volito ROM=1.4-1
CLS=USB VNDR=Wacom DEV=Volito SUB=MODEL-VOL</pre>
Pulsáis Ctrl+C (o Ctrl+X+Alt+Esc+X+F+win+5+Ctrl+Esc+U si fuese <em>emacs</em>) para salir y hacéis el enlace:
<div class="console">
{% highlight console %}
$ ln -s /dev/input/event2 /dev/input/wacom
{% endhighlight %}
</div>
Ahora, si reinciamos X.Org, tendremos la tableta funcionando... casi...
<h2>Comportamiento errático del puntero</h2>
Lo más normal es que mováis un poco el lápiz y el puntero se vuelva loco y se empiece a cliquear todo y se os quede la sesión "medio muerta" y me mandéis un mail poniéndome verde... pues bien, os comento el problema: cuando conectáis la tableta, el módulo <em>usbmouse</em> reclama el nuevo hardware y lo maneja él... MAL! Debemos hacer que se cargue primero el módulo <em>wacom</em> y luego lo que tenga que venir. También se puede configurar <em>udev</em> para ello... pero como tampoco me he pegado con eso, ahí va mi solución cutre:
<div class="console">
{% highlight console %}
$ /etc/init.d/udev stop
$ rmmod wacom usbmouse ehci_hcd ohci_hcd usbhid
$ modprobe wacom
$ /etc/inid.d/udev start
{% endhighlight %}
</div>
...y ya podemos reiniciar X.Org y disfrutar de nuestra tableta! :)
<h2>Apéndice A: instalación y compilación <em>from scratch</em></h2>
Pues nada... nos gusta vivir al límite... eh? vayamos a la página del proyecto <a href="http://linuxwacom.sourceforge.net/">Linux Wacom</a> y pinchamos en <em>Project Status</em> en la versión en desarrollo (actualmente 0.7.7-4), guardamos el archivo en disco y lo descomprimimos:
<div class="console">
{% highlight console %}
# unp linuxwacom-0.7.7-4.tar.bz2
#cd linuxwacom-0.7.7-4
{% endhighlight %}
</div>
Veamos qué conseguiremos con esto:
<div class="console">
{% highlight console %}
# ./configure
...morralla sin significado aparente para nosotros...
----------------------------------------
BUILD ENVIRONMENT:
architecture - i486-linux-gnu
linux kernel - yes 2.6.18
module versioning - yes -DCONFIG_MODVERSIONS -DMODVERSIONS -include /lib/modules/2.6.18-3-686/build/include/linux/modversions.h
kernel source - yes /lib/modules/2.6.18-3-686/build
Xorg SDK - yes /usr/include/xorg
XSERVER64 - no
dlloader - yes
XLib - yes /usr/lib
TCL - yes /usr/include/tcl8.4/
TK - yes /usr/include/tcl8.4/
ncurses - yes
BUILD OPTIONS:
wacom.o - no
wacdump - yes
xidump - yes
libwacomcfg - yes
libwacomxi - yes
xsetwacom - yes
hid.o - no
usbmouse.o - no
evdev.o - no
mousedev.o - no
input.o - no
tabletdev.o - no
wacom_drv.so - yes /usr/lib/xorg/modules/input
wacom_drv.o - no
----------------------------------------
{% endhighlight %}
</div>
Aquí nos dice que elementos nos va a generar y de cuáles va a pasar olímpicamente. Nos interesa (sobre todo) que genere <em>wacom.o</em> (el módulo para el kernel) y <em>wacom_drv.so</em> (el driver para X.Org). Si no os aparece a <em>yes</em> puede ser por dos causas: por defecto no se generá (ejecutar <em>./configure --help</em> y os indicarán los parámetros) o porque os falten librerías de desarrollo de las X o las cabeceras del núcleo. El resto de elementos a generar son módulos más o menos útiles cuya utilidad queda fuera de esta receta...
Una vez hayamos ejecutado el <em>configure</em> y esté todo a nuestro gusto:
<div class="console">
{% highlight console %}
# make
$ make install
{% endhighlight %}
</div>
Y si nada de esto da error... ya podemos empezar con la receta como si tal cosa... :)
<h2>Enlaces</h2>
Página del proyecto <a href="http://linuxwacom.sourceforge.net/">Linux Wacom</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-28/mini-tutorial-de-gnu-gdb.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-08-01/m-surface-no-esto-no-es-de-m.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-05-19/megabox-linux-media-center.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-12-05/parted-y-python-pyparted.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-06-21/bill-gates-se-retira-oh-que-pena-ms-grande.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-02-22/avances-en-gnesis-3-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-31/servicios-de-gmx-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-02-05/aprende-linux-con-el-ministerio.html
---
<file_sep>---
layout: post
title: "Testing deterministic behavior with doublex"
category: recipe
tags: Python doublex agile
author: david_villa
lang: en
---
Sometimes you need to test that some procedure is deterministic, that
is, it is done exactly the same way again and again (builder or
factory method patterns are examples of that). The production code does exactly the same invocations for the same
arguments. In these situations were are not checking the "right" behavior but just the invocation sequence does not change.
You may use a doublex Mock to test that. The first execution "trains" the
Mock, the second time verify the same behaviour.
<!--break-->
An example:
{% gist 5728565 %}
## References
* "python-doublex":https://bitbucket.org/DavidVilla/python-doublex
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-01-26/reconfigurando-el-xport-al-vuelo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-05/manipulacin-de-dvd-video.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-05-07/nueva-versin-de-uclm-wifi.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-09-05/utilizar-un-servidor-ftp-remoto-como-backup.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-09-06/molinux-en-la-uclm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2011-01-19/donald-knuth-galardonado-con-el-premio-fronteras-de-la-fundacin-bbva.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-04-01/openwrt-en-la-fonera.html
---
<file_sep>---
migrated: node/281
layout: post
title: Script para escuchar emisoras de radio por internet
created: 1144700740
author: aledelgal
comments: true
category: script
---
<blockquote>
Esta pequeña pequeñísima receta trata sobre cómo hacer un script para escuchar las radios que más nos gusten por internet sin tener que estar mirando cual era la dirección .asx que tenía tal radio.
</blockquote>
<!--break-->
<h1>Introducción</h1>
Este script es muy tonto, lo pongo aquí sólo para que nadie tenga que molestarse en hacerlo,o por si a alguien no se le había ocurrido, ya que lo veo algo de utilidad, porque ahora con un solo comando vamos a tener acceso a todas las radios que nosotros queramos. Es totalmente personalizable, sólo hay que añadirle las radios que nosotros queramos. <a href="/node/158#comment-666">Aquí</a> hay muchas más para quien quiera añadirlas a su script personalizado, el mío es sólo una sugerencia.
<h1>A ver ese script...</h1>
<pre>
#!/bin/bash
echo ""
echo "Script para seleccionar qué radio queremos escuchar"
echo""
echo " 1 -> Los 40 principales"
echo " 2 -> Cadena dial"
echo " 3 -> Europa FM"
echo " 4 -> Cadena Ser"
echo " 5 -> Onda Cero"
echo " 6 -> Kiss FM"
echo " 7 -> RNE 1"
echo " 8 -> COPE"
echo " 9 -> RNE clásica"
echo " 10 -> m80 Radio"
echo " 11 -> Salir"
echo ""
echo "Seleccione una de las radios "
read radio
if [ "$radio" = "1" ]; then
mplayer -playlist http://www.los40.com/nuevo_player/40Principales.asx
elif [ "$radio" = "2" ]; then
mplayer -playlist http://www.los40.com/nuevo_player/dial.asx
elif [ "$radio" = "3" ]; then
mplayer -playlist http://www.ondacero.es/europafm.asx
elif [ "$radio" = "4" ]; then
mplayer -playlist http://www.cadenaser.com/player/SER-TIC.asx
elif [ "$radio" = "5" ]; then
mplayer -playlist mms://www.ondacero.es/live.asx
elif [ "$radio" = "6" ]; then
mplayer -playlist http://pointers.audiovideoweb.com/asxfiles-live/ny60winlive7001.asx
elif [ "$radio" = "7" ]; then
mplayer -playlist http://www.rtve.es/rne/audio/r1live.asx
elif [ "$radio" = "8" ]; then
mplayer -playlist mms://live.cope.edgestreams.net/reflector:34744
elif [ "$radio" = "9" ]; then
mplayer -playlist http://www.rtve.es/rne/audio/RNEclasica.asx
elif [ "$radio" = "10" ]; then
mplayer -playlist http://www.los40.com/nuevo_player/m80.asx
elif [ "$radio" = "11" ]; then
exit
fi
#FIN
</pre>
Lo guardamos con el nombre que nosotros queramos, por ejemplo radios.sh
Le damos permisos de ejecución
<div class="console">
{% highlight console %}
$ chmod +x radios.sh
{% endhighlight %}
</div>
Y si queremos tenerlo siempre disponible para no tener que irnos al directorio donde esté guardado lo movemos a /bin
<div class="console">
{% highlight console %}
# mv radios.sh /usr/local/bin
{% endhighlight %}
</div>
Ahora sólo queda ejecutarlo y ya tenemos tooooodas las radios con sólo pulsar un número
<div class="console">
{% highlight console %}
$ radios.sh
{% endhighlight %}
</div>
<h1>Referencias</h1>
<ul>
<li> <a href="http://www.ubuntu-es.org">Página web de ubuntu en español</a> </li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-08-31/python-para-aprender-a-programar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-10-16/ampliado-plazo-de-inscripcin-al-iii-concurso-univ-de-software-libre-de-clm.html
---
<file_sep>---
migrated: node/988
layout: post
title: Usar Pidgin para unirte a una sala Jabber
created: 1219232446
author: nacho
comments: true
category: recipe
tags:
- ocio
---
<blockquote>Sí tienes una cuenta jabber (o gtalk que es lo mismo) puedes usar Pidgin para unirte a salas de chat al más puro estilo de IRC. En esta receta te explico cómo unirte a la de tu portal favorito: CRySoL</blockquote>
<h2>Unirse a un Chat</h2>
En la ventana principal de Pidgin, pulsa en <strong>Amigos --> Unirse a un chat</strong>
<center>
<img src="/assets/files/salacrysol1.png"/>
</center>
Si conoces los datos de la sala, puedes rellenarlos directamente, tal como se ve en la imagen. Si no, puedes buscar una sala en la lista de salas disponibles, pulsando en <strong>Lista de salas</strong>.
<center>
<img src="/assets/files/salacrysol2.png"/>
<img src="/assets/files/salacrysol3.png"/>
</center>
<h2>Añadir la Sala como contacto</h2>
Se puede también añadir la sala como si fuera un contacto más de jabber. Una vez dentro de la sala, pulsa en <strong>Conversación --> Añadir</strong>. De esta forma podrás tenerlo siempre a mano, a sólo un doble-click de distancia.
<center>
<img src="/assets/files/salacrysol4.png"/>
</center>
Para que se conecte automáticamente cuando abras pidgin, abre el menú contextual de CRySoL en la lista de contactos y marca «Conectar automáticamente».
<file_sep>source "http://rubygems.org"
gem "jekyll"
gem "jekyll-paginate"
gem "jekyll-gist"
gem "jekyll-textile-converter"
gem "pygments.rb"
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-17/redes-inalmbricas-multisalto-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-07/poner-una-imagen-de-fondo-en-grub.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-10/creacin-de-mdulos-e-interfaces-en-nesc-para-tinyos-2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-12-11/lo-nuevo-de-emacs22.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-04-17/material-para-el-taller-de-videojuegos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2008-09-11/mini-reto-estpido-again-and-again.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-07-07/error-para-entrar-a-la-sesion-de-ubuntu-5-10.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-12/gnu-emacs-acelerando-el-arranque-con-xresources.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-02/notificacin-por-e-mail-de-cambios-en-repositorios-mercurial.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-03-21/descargar-videos-de-youtube-para-verlos-offline.html
---
<file_sep>---
migrated: node/1457
layout: post
title: Se acabó el IPv4
created: 1296832672
author: nacho
comments: true
category: new
tags:
- networking
---
Agotado. Extinto. <a href="http://alt1040.com/2011/02/ya-no-hay-direcciones-ip">No quedan más direcciones IPv4 disponibles</a>. La <acronym title="Internet Assigned Numbers Authority>IANA </acronym> ha asignado el último bloque de direcciones que le quedaba libre.
<!--break-->
Esta carencia de direcciones se venía previendo desde hace muchos años, y el <a href="http://tools.ietf.org/html/rfc1883">IPv6 lleva en marcha desde mediados de los '90</a>. Sin embargo, no se ha terminado de implantar (no sé siquiera si se ha empezado), y ya se está hablando de un <a href="http://es.wikipedia.org/wiki/IPv6#Motivaci.C3.B3n_y_or.C3.ADgenes_de_los_IP">IPv8</a>.
¿Creéis que los sistemas están <a href="/node/1335">preparados para afrontar IPv6</a>? ¿Será un cambio suave (fácil) o nos dará dolores y quebraderos de cabeza? ¿Por qué no se ha puesto ya en explotación masivamente?
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-15/empaquetar-un-script-python-para-debian-y-mantenerlo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-08-13/hasta-luego-sco.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-19/recuperar-un-disco-duro-con-dd_rhelp-o-intentarlo-al-menos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-03-21/nds-y-fonera-por-fin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-06-28/auctex-para-torpes.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-06/recuperar-la-clave-de-administrador-de-drupal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-01/sacando-jugo-a-exif-en-consola-y-en-nautilus.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-07/grabar-cds-y-dvds-fcilmente-desde-consola.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-24/doble-monitor-con-ati-radeon-hd-3870.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-07-23/preinscripcin-iii-edicin-curso-java-ceslcam.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-02/buscar-fcilmente-en-el-histrico-de-gnu-bash.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-02-25/gnu-is-not-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-12/tes-emacs-code-browser-ecb.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-01-26/software-libre-copyleft-y-canon-digital.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-05/optimizar-firefox.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-07-23/en-busca-de-la-solucin-definitiva-para-la-autenticacin.html
---
<file_sep>---
migrated: node/1024
layout: post
title: 'udev: Configurando el acceso al USB sin ser root'
created: 1224067192
author: javieralso
comments: true
category: recipe
tags:
- embedded
- drivers
---
<blockquote>Cómo escribir reglas para <a href="http://www.kernel.org/pub/linux/utils/kernel/hotplug/udev.html">udev</a> permitiendo a dispositivos <b>USB</b> ser usados sin necesidad de ser <b>root</b></blockquote>
<h2>Introducción</h2>
A veces sucede que quieres utilizar un dispositivo USB un poco "exótico" y resulta que no puedes utilizarlo sin ser root. Un ejemplo de ello puede ser <a href="http://dfu-programmer.sourceforge.net/">dfu-programmer</a> o <a href="http://groups.google.com/group/pickit-devel/browse_thread/thread/f3478463cf071464">pk2</a>, que son aplicaciones no muy estándard y por lo tanto no se les ha dado el soporte necesario, haciendo que haya que invocarlas como <b>root</b> si queremos utilizarlas.
La solución para ésto pasa por crear unas reglas para <b>udev</b> que permitan que éstas aplicaciones accedan al puerto USB de nuestro PC en modo normal.
<h2>Un ejemplo: dfu-programmer</h2>
Ilustraré el proceso con una aplicación de ejemplo: <b>dfu-programmer</b>. Se trata de un cliente para el bootloader USB instalado en los micros con soporte para dicho interfaz de <a href="www.atmel.com">Atmel</a>.
Para añadir las reglas, abrimos (mas bien creamos) el archivo que contedrá dichas reglas:
<div class="console">
{% highlight console %}
<EMAIL>@rigoberto:~$ sudo emacs /etc/udev/rules.d/99-dfu-programmer.rules
{% endhighlight %}
</div>
En este archivo, deberemos escribir algo tal que así:
<div>
{% highlight text %}
SUBSYSTEM=="usb", ACTION=="add", SYSFS{idVendor}=="03eb", SYSFS{idProduct}=="2ffb", MODE="660", GROUP="plugdev", SYMLINK+="at90usb-%k"
BUS=="usb", ACTION=="add", SYSFS{idVendor}=="03eb", SYSFS{idProduct}=="2ffb", MODE="660", GROUP="plugdev"
{% endhighlight %}
</div>
Básicamente lo que se hace es indicar a <em>udev</em> que <em>dfu-programmer</em> usa el sistema <em>USB</em>, dando información detallada acerca del <em>vendorID</em> y el <em>productID</em> del dispositivo que se va a conectar al puerto. También indica con qué permisos se debe acceder al puerto y especifica el grupo cuyos permisos hereda la aplicación.
<h2>Referencias</h2>
<ul>
<li><a href="http://reactivated.net/writing_udev_rules.html">Writing udev rules</a></li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-20/filtrar-spam-con-sylpheed-y-bogofilter.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-11-18/cree-que-la-administracin-debera-usar-software-libre-en-vez-de-programas-comerciales.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-04/cmo-escribir-una-receta-de-cocina.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2010-09-09/quinto-aniversario.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-27/creacin-ultra-rpida-de-libreras-para-kicad.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-21/instalar-debian-gnu-linux-en-el-sony-vaio-vgn-sz4xn.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-24/eduroam-1-0-conectar-a-la-nueva-wifi-de-la-universidad.html
---
<file_sep>#!/bin/bash --
# -*- mode:shell-script; coding:utf-8; tab-width:4 -*-
set -e
if [ -d _migrate ]; then
rake migrate_on
fi
if ! [ -d _site ]; then
# git clone <EMAIL>:CRySoL/CRySoL.github.io.git _site
git clone https://github.com/CRySoL/CRySoL.github.io.git _site
fi
git --git-dir=_site/.git --work-tree=$(pwd)/_site pull
jekyll build
echo "$(date)<br/>" >> history.html
# git commit -a -m "automatic compilation"
# git push
touch _site/.nojekyll
echo "pushing _site"
cd _site
rm -rf files node
ln -s p/node node
ln -s assets/files files
git add --all
git commit -a -m "automatic compilation"
git push
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-03-21/propuesta-de-creacin-de-sl.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-03-26/pysoya-men-de-ejemplo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-09-07/x-org-con-el-stylus-de-la-tablet-hp-compaq-tc1100-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-27/ah-va-la-virgen-metaclases-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-02-09/gnu-emacs-24-how-to-install-on-debian-ubuntu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-03-02/la-in-justicia-francesa.html
---
<file_sep>---
migrated: node/812
layout: post
title: 'Parted y Python: pyparted'
created: 1196886015
author: int-0
comments: true
category: recipe
tags:
- Python
---
<blockquote>Supongo que muchos de vosotros conoceréis <em>libparted</em>, la librería que hay debajo de los programas de particionado típicos de <em>GNU</em>. Muchos de vosotros conocéis también <em>Python</em>, si juntamos estas dos cosas tenemos que hacer programitas que manejen el particionado del disco va a estar al alcance de cualquiera. Llegados a este punto, no sobra decir que: <b>NO me responsabilizo de los daños que pueda causar el uso de esta receta</b>, yo lo he probado con un disco flash y me ha funcionado sin problemas. Quien quiera garantías... que las pague :P.</blockquote>
<!--break-->
<h2>Instalación</h2>
Pues lamentablemente hay que compilar e instalar... no es muy complicado, pero bueno... empecemos por descargarnos los fuentes del modulo de <em>python</em>, los encontramos en <a href="http://dcantrel.fedorapeople.org/pyparted/">http://dcantrel.fedorapeople.org/pyparted/</a>. Hay que tener en cuenta una cosa, a fecha de hoy (4 de diciembre de 2007) en <em>Debian Unstable</em> la versión de <em>libgparted-dev</em> empaquetada es la antiquísima <b>1.7</b>, lo cual nos obliga a usar una versión del módulo antigua: la <b>1.8.5</b>. Esto es un poco rollo, pero por lo menos no tenemos que instalar y configurar <em>libgparted</em> entera. No he probado versiones más nuevas del módulo de <em>python</em>, salvo la versión <em>1.8.9</em> que compilaba pero luego no funcionaba.
Descargados los fuentes nos tenemos que instalar ciertas cosillas:
<div class="console">
{% highlight console %}
# apt-get install libparted-dev uuid-dev
{% endhighlight %}
</div>
Si ahora intentáis compilar no os va a funcionar porque el módulo está pensado para el <em>libparted</em> de <em>fedora</em>, que tiene algunos "maquillajes" que, se supone, vienen en los <em>libparted</em> más nuevos (realmente la <em>1.7</em> de <em>Debian</em> es <em>antiquísima</em>), nosotros cambiaremos lo siguiente (abrid como admins el archivo <tt>/usr/include/parted/disk.h</tt>):
<pre>typedef enum {
PED_PARTITION_NORMAL = 0x00,
PED_PARTITION_LOGICAL = 0x01,
PED_PARTITION_EXTENDED = 0x02,
PED_PARTITION_FREESPACE = 0x04,
PED_PARTITION_METADATA = 0x08
} PedPartitionType;</pre>
Lo cambiaremos por:
<pre>typedef enum {
PED_PARTITION_NORMAL = 0x00,
PED_PARTITION_LOGICAL = 0x01,
PED_PARTITION_EXTENDED = 0x02,
PED_PARTITION_FREESPACE = 0x04,
PED_PARTITION_METADATA = 0x08,
PED_PARTITION_PROTECTED = 0x10
} PedPartitionType;</pre>
...y ahora sí, a compilar:
<div class="console">
{% highlight console %}
:~$ unp pyparted-1.8.1.tar.gz
~$ cd pyparted-1.8.1
~/pyparted-1.8.1$ ./configure
~/pyparted-1.8.1$ make && sudo make install
{% endhighlight %}
</div>
<h2>Ejemplos de uso</h2>
Para probar el módulo vamos a usar <em>ipython</em>, si no lo tenéis ya sabéis: <em>apt-get</em> o <em>aptitude</em>. Y una cosa más: hay que ejecutarlo en modo <em>superusuario</em> porque si no no podréis abrir los dispositivos de escritura. En los ejemplos vamos a usar un <em>pendrive</em> direccionado en <tt>/dev/sda</tt>. <b>TENED CUIDADO POR SI VUESTRO DISCO PRINCIPAL ES SATA QUE ENTONCES SERÁ /dev/sda</b>. Si queréis hacer las pruebas con un <em>pendrive</em> comprobad con <em>dmesg</em> el nombre del dispositivo. Y una cosa más: si <em>gnome</em> os automonta la unidad, debéis desmontarla primero.
Al tajo, ejecutamos <em>ipython</em>:
<div class="console">
{% highlight console %}
$ sudo ipython
Total number of aliases: 15
Python 2.4.4 (#2, Aug 16 2007, 02:03:40)
Type "copyright", "credits" or "license" for more information.
IPython 0.8.1 -- An enhanced Interactive Python.
? -> Introduction to IPython's features.
%magic -> Information about IPython's 'magic' % functions.
help -> Python's own help system.
object? -> Details about 'object'. ?object also works, ?? prints more.
In [1]:
{% endhighlight %}
</div>
y ahora abrimos la unidad, para ello debemos crear un <em>PedDevice</em> que gestione el dispositivo y con el <em>PedDevice</em> podremos crear un <em>PedDisk</em> que es el objeto que nos permitirá gestionar las particiones, lo haremos todo de una:
<div class="console">
{% highlight console %}
In [1]:import parted
In [2]:myDisk = parted.PedDisk.new(parted.PedDevice.get('/dev/sda'))
{% endhighlight %}
</div>
Para acceder al dispositivo (por ejemplo para obligar a realizar un <em>open()</em>, un <em>close()</em> o un <em>sync()</em>) podemos usar el atributo de clase <em>dev</em>:
<div class="console">
{% highlight console %}
In [3]:myDisk.dev.open()
{% endhighlight %}
</div>
A lo que vamos: leer la información de las particiones. Podemos ver cuántas particiones tenemos y luego recorrer una a una la lista. Hasta lo que creo, no hay un iterador <em>bonico</em> para ello, supongo que en versiones más nuevas ya estará añadido.
<div class="console">
{% highlight console %}
In [4]:myDisk.get_last_partition_num()
Out[4]:1
In [5]:partition = myDisk.get_partition(1)
In [6]:partition.type_name
Out[6]:'primary'
In [7]:geometria = partition.geom
In [8]:geom2 = geometria.duplicate()
In [9]:print geometria.start, geometria.end
19 63359
{% endhighlight %}
</div>
Ahora vamos a hacer cosas <em>divertidas</em>, <b>OJO QUE ESTO ES PELIGROSO</b>, empezamos borrando TODAS las particiones:
<div class="console">
{% highlight console %}
In [10]:myDisk.delete_all()
In [11]:print myDisk.get_partition(1).geom.start
---------------------------------------------------------------------------
parted.error Traceback (most recent call last)
/home/tobias/pyparted-1.7.3/<ipython console>
error: partition not found
{% endhighlight %}
</div>
Ahora, para crear una partición, necesitamos: un <em>tipo</em> (primaria, secundaria, etc.), un sector inicial y una longitud. Esta partición va dentro de unos límites, que pueden ser el disco físico o una partición extendida, esos límites los definimos al añadir la partición al disco, pero no al crearla:
<div class="console">
{% highlight console %}
In [12]:myPartition=myDisk.partition_new(parted.PARTITION_PRIMARY,None,1,4000)
{% endhighlight %}
</div>
El <em>None</em> es el <em>filesystem</em> que llevará la partición, podemos establecerlo en la creación, o a <em>posteriori</em> (como el resto de parámetros):
<div class="console">
{% highlight console %}
In [13]:myFs = parted.file_system_type_get('fat32')
In [14]:myPartition.set_system(myFs)
{% endhighlight %}
</div>
Ahora añadimos la partición al disco, dentro de unos <em>límites</em>, es este caso es todo el disco físico:
<div class="console">
{% highlight console %}
In [14]:entireDisk = parted.PedDevice.get('/dev/sda').constraint_any()
In [15]:myDisk.add_partition(myPartition, entireDisk)
In [16]:myDisk.commit()
Out[16]:1
{% endhighlight %}
</div>
Ese <em>1</em> indica que todo ha ido bien, ahora podemos comprobar con <em>fdisk</em> los destrozos causados :P...
Usando el <em>completion</em> de <em>ipython</em> podemos encontrar muchos métodos interesantes para cálculo de sectores físicos/lógicos, maximización de particiones, etc. De todas formas os puede ser útil la <a href="http://www.gnu.org/software/parted/api/index.html">API de libparted</a>. Para haceros una idea. Por último sólo aclarar que esta receta no va sobre un programa para crear/borrar particiones, para eso tenemos <em>gparted</em>, aquí sólo hemos explicado un poquillo cómo usar los métodos del módulo <em>pyparted</em> por si queréis hacer programillas y tal... ;)
<h2>Enlaces</h2>
Ya están por la receta... :P
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-05-21/quijote-informacin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2007-02-26/lentejas-libres.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-20/escner-epson-v10-en-debian.html
---
<file_sep>---
migrated: node/1138
layout: post
title: Configurar HTC Magic (G2) con Android en Debian GNU/Linux
created: 1248435217
author: cleto
comments: true
category: recipe
tags:
- drivers
- Debian
---
<blockquote>Esta receta explica cómo configurar tu máquina Debian para poder empezar a desarrollar programas con tu flamante móvil HTC Magic (G2).</blockquote>
<h2>Introducción</h2>
Existe un SDK con multitud de herramientas para desarrollar en sobre teléfonos Android (como es el caso del HTC Magic que, a día de hoy, Vodafone vende en exclusividad). Sin embargo, para sacarle todo el partido al entorno de desarrollo es necesario tener configurado correctamente tu ordenador para que al enchufar el móvil via USB puedas utilizar las herramientas del SDK. En esta receta se explica precisamente esto último sobre un sistema Debian GNU/Linux.
<h2>Requisitos</h2>
<ul>
<li><a href="http://developer.android.com/sdk/1.5_r3/index.html">SDK de Android v1.5</a></li>
<li>Ordenador con Debian GNU/Linux sid.</li>
<li>Móvil HTC Magic.</li>
<li>Cable USB del móvil.</li>
</ul>
<h2>Configuración</h2>
<p>
<h3>En el PC</h3>
Para evitar problemas, asegúrate de que el teléfono <b>NO</b> se encuentra enchufado al ordenador. A continuación, creamos el grupo 'androiddev':
<div class="console">
{% highlight console %}
$ sudo addgroup --system androiddev
{% endhighlight %}
</div>
Posteriormente, crea el fichero /etc/udev/rules.d/50-android.rules con el siguiente contenido:
<pre>
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0bb4", ATTRS{idProduct}=="0c02", GROUP="androiddev", SYMLINK+="android%n"
</pre>
Le damos permisos de lectura y ejecución y recargamos las reglas de udev:
<div class="console">
{% highlight console %}
$ sudo chmod a+rx /etc/udev/50-android.rules
$ sudo /etc/init.d/udev reload
{% endhighlight %}
</div>
<h3>En el móvil</h3>
Primero, asegúrate de que el teléfono tiene el modo USB de depuración <b>activado</b>. Esto se encuentra en Ajustes->Aplicaciones->Desarrollo.
Una vez comprobado, conecta el teléfono al PC.
<h2>Probando que todo funciona...</h2>
Descomprime el SDK que te has descargado en el directorio que prefieras. Dentro del árbol de directorios que se ha generado hay uno llamado "tools". Dentro de él, hacemos:
<div class="console">
{% highlight console %}
$ ./adb start-server
$ ./adb shell
{% endhighlight %}
</div>
Si todo ha ido bien, tendrás una bonita shell dentro de tu móvil. Listemos los paquetes Android (ficheros .apk) que tenemos:
<div class="console">
{% highlight console %}
$ ls /system/app/
{% endhighlight %}
</div>
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-02-09/el-porqu-del-cracking.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-07-03/pequeo-faq-de-mailman.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-03-03/hoy-no-hay-party.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-03-14/conoces-fon.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-15/ajustes-al-tinyos-2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-11/instalacin-de-nino-en-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-06-20/manual-bsico-de-iproute2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-03-08/agilismo-dejar-de-hacer-el-gili.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-04/wii-mando-con-libwiimote-en-gnu-linux-incluye-video-demo-p.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-07-25/quin-dice-que-no-hay-arte-en-la-programacin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-09-20/hercules-webcam-deluxe-bajo-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-03-02/gnesis-3-0-frincon-minimal-pre-release-liberada.html
---
<file_sep>---
migrated: node/360
layout: post
title: Arreglar gnome-terminal que no muestra tildes ni eñes
created: 1153985554
author: magmax
comments: true
category: recipe
tags:
- gnome
---
<blockquote>Desde hace algún tiempo (acabo de descubrir que muy posiblemente desde GNOME 2.14), el terminal de GNOME me muestra las tildes, eñes y otros caracteres como caritas. Aunque al principio resulta gracioso, terminas cansándote :-D. Aquí lo arreglaremos de la manera más sencilla.
</blockquote>
<h1>Arreglarlo</h1>
Para arreglarlo basta con editar el fichero <tt>~/.dmrc</tt> y añadir al final la línea <tt>Language=es_ES.iso88591</tt>. Si lo preferís, basta con ejecutar lo siguiente:
<div class="console">
{% highlight console %}
$ echo "Language=es_ES.iso88591" >> .dmrc
{% endhighlight %}
</div>
<h1>¿Qué estaba pasando?</h1>
Lo que pasa es que en los GNOMEs anteriores se tomaban las locales del sistema, pero parece que en las últimas no lo hace muy bien. Por ello, al fallar la carga, tomaba las locales por defecto, es decir, ANSI_X3.4-1968 que no tiene tildes y demás.
Yo sabía que se podía cambiar en el menú del gnome-terminal, en "terminal"->"Establecer codificación de caracteres", pero es un rollo tener que hacerlo para cada pestaña del gnome-términal. Así, sólo lo hacía cuando tenía que leer algo un poco grande :-D
Editando el fichero .dmrc le decimos a GDM cuál es el "encoding" que queremos utilizar, puediendo elegir entre los que configuramos con el paquete locales. Para cambiar los "encodings" compilados en "locales", se hace así (los chicos de UBUNTU no sé cómo leches lo hace, pero en Debian se hace así):
<div class="console">
{% highlight console %}
# dpkg-reconfigure locales
{% endhighlight %}
</div>
Y para saber cuáles son los disponibles (qué cadenitas poner en "Language=XXXXXXX"), basta con hacer:
<div class="console">
{% highlight console %}
$locale -a
{% endhighlight %}
</div>
<h1>Enlaces</h1>
Llevo buscando bastante tiempo, y al final lo encontré en el <a href="http://www.davidpashley.com/cgi/pyblosxom.cgi/computing/gnome-terminal.html">blog de <NAME></a>. Este documento es, básicamente, una transcripción de lo que se comenta en el enlace.
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-17/traffic-shaping-y-qos-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-05/glib-io-channels-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2006-03-07/el-reto-de-la-semana-3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-08-04/citas-de-dijkstra.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-24/blender-efecto-de-resplandor-glow.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-11-27/bolsa-de-trabajo-de-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-28/grficas-con-python-y-gnuplot.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-02-14/buensimo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2010-01-18/cuando-lo-decamos-nos-tachaban-de-locos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-04-29/dos-crysoleros-ganan-en-el-concurso-de-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-19/dashboard-process-ayuda-para-psp.html
---
<file_sep>console.log('This would be the main JS file.');
function show_comments() {
document.getElementById("disqus_thread").style.display = 'block';
document.getElementById("show_comments").style.display = 'none';
}
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-07-21/software-para-electrnicos-kicad.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-03-18/easycap-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-18/acelerando-ssh.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-11-16/blender-herramientas-de-seleccin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-18/conectar-a-la-nueva-wifi-de-la-uclm-aka-eduroam-con-network-manager.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-06/gnome-vfs-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-02-21/entrevista-a-alexey-leonidovich-pazhitnov.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-05-25/python-en-android.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-23/manos-libres-bluetooth-con-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-02-14/kicad-conclusiones.html
---
<file_sep>---
migrated: node/765
layout: post
title: 'Grandes inventos de Microsoft: El ratón y la interfaz gráfica.'
created: 1188052321
author: magmax
comments: true
category: tale
tags:
- ocio
---
<blockquote>
Cualquiera que lea el título puede caer en la desgracia de creérselo. Sin embargo, son afirmaciones que cualquiera que no sepa un poquitín de historia cree. He aquí una pequeña historieta con lo que me ha pasado a mí para descubrir al inventor del ratón.
</blockquote>
<!--break-->
<h1>Fue Xerox</h1>
Hace tiempo vi una película llamada "Los piratas de Sillicon Valley" (que como todos sabemos, es "el Valle de la Silicona" debido a las cantidades de Silicona que fabrican :P). Os destriparé un trozo por si no la habéis visto: <NAME> logra meter a todo el departamento de desarrollo de Apple en la empresa Xerox, y allí les enseñan sus tesoros más preciados: el ratón y la interfaz gráfica. Rápidamente, Apple se pone a plagiar estos inventos, contándole a su aliado, <NAME>, de qué van. Unos días antes del lanzamiento del nuevo Mac (Lisa, me parece recordar), Microsoft saca un sistema operativo con entorno gráfico y... sí, y ratón.
Desde entonces pensé que el inventor del ratón era Xerox. No me planteé contrastar la información ni nada por el estilo. Sin embargo, las <a href="http://mnm.uib.es/gallir/posts/2007/08/11/1142">10 razones de que no eres tan buen informático como piensas</a> me hicieron pensar, y decidí buscar a aquéllos de la segunda de las razones que no conocía.
<h1><NAME></h1>
Inventor estadounidense que participó en el antecesor de Internet, llamado Arpanet. Es el típico tío que se adelanta a su tiempo, y sus ideas no se aceptaron, por lo que en 1978 le retiraron todas las subvenciones y su laboratorio se deshizo. La mayor parte de este laboratorio pasó a formar parte de la plantilla de Xerox (qué curioso, ¿no?). Se le atribuyen inventos como el ratón, la interfaz gráfica, las ventanas múltiples y el software multiusuario. Obra y milagros en la <a href="http://es.wikipedia.org/wiki/Douglas_Engelbart">wikipedia</a>.
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-01-27/autenticacin-pam-en-drupal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-15/usar-un-repositorio-cvs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-05/debian-en-cualquier-parte-con-un-cd-y-un-disco-usb.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-11-14/bolsa-de-trabajo-ceslcam.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-24/router-casero-con-debian-en-el-fit-pc-1-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2008-11-19/ayuda-con-molinux-acceso-a-programas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-07-13/software-libre-para-la-docencia.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-22/instalar-torrenflux-b4rt-en-debian.html
---
<file_sep>---
migrated: node/692
layout: post
title: Marcar y clasificar tráfico con iptables y tc
created: 1179942837
author: felix
comments: true
category: recipe
tags:
- networking
- Arco
---
<blockquote>
Siguiendo el hilo de la receta anterior, vamos a ver cómo podemos clasificar el tráfico que generamos para meterlos en las colas que vamos a crear y así clasificar el tráfico. Para ello, aunque existen multitud de formas de clasificar el tráfico vamos a utilizar el campo TOS (type of service) del protocolo IP.
</blockquote>
<h2>Clasificando paquetes</h2>
Bueno, esta claro que si queremos establecer clases de tráfico podemos hacer dos cosas, una marcar el tráfico de paquetes en el origen y sólo clasificar en el nodo que hace de enlace con, por ejemplo, Internet y otra, no marcar en el origen y filtrarlo (ademas de clasificar) todo en el nodo que hace de enlace.
Si nos imaginamos un nodo con dos interfaces, una conectada a Internet y otra a una red ethernet través de la cual se conectan otros nodos podemos establecer reglas de marcado en esos nodos mientras que en el enlace simplemente clasificamos el tráfico que le llega en el sistema de colas.
El problema de tener que marcar en el origen, es que pueden existir nodos que no marcan sus paquetes o que los marcan mal para obtener el ancho de banda asociado a un tráfico de prioridad mas alta. Vamos a considerar que todos los nodos son buenos y marcan sus paquetes.
Para marcar un paquete debemos usar iptables. Si observamos una regla de iptables para marcar paquetes:
<div class="console">
{% highlight console %}
#iptables -t mangle -I POSTROUTING 1 -p tcp -s 192.168.127.12 -j TOS --set-tos 16
{% endhighlight %}
</div>
Vemos que lo primero que nos encontramos es el "-t mangle" , que le indica la tabla dónde vamos a operar, por la tabla mangle pasan todos los paquetes por lo que no se nos escapa ninguno, si quieres ver una descripción de las tablas, y su uso mira <a href="http://es.wikipedia.org/wiki/Netfilter/iptables">aquí</a>.
Lo siguiente que viene es "-I POSTROUTING" que nos indica dónde vamos a aplicar la clasificación, como hemos dicho antes, estamos marcando el tráfico que el nodo genera por lo que lo marcamos despues del enrutado de los paquetes. En este punto hay que hacer una pequeña parada, el sitio ideal para marcar el paquete que ha sido originado en un sistema es en OUTPUT, ya que por ahí pasa todo el tráfico generado en un nodo, no obstante a mi me interesa hacerlo en POSTROUTING por que me interesa marcar tráfico que pasa por mi sistema y que no esta marcado (cosas del curro:-)).
El 1 que viene a continuación es el número de la regla que vas a crear, acuerdate de esto si quieres borrarla luego.
A continación vienen las reglas que debe cumplir el paquete para marcarlo, en nuestro caso buscamos todo el tráfico tcp que viene de la dirección 192.168.127.12, "-p tcp -s 192.168.127.12". Y por último especificamos qué queremos hacer con esos paquetes, en nuestro caso vamos a poner el valor 16 en el campo TOS.
Si queremos ver como queda la regla despues de que la ejecutemos en nuestro sistema:
<div class="console">
{% highlight console %}
# iptables -L -t mangle
Chain PREROUTING (policy ACCEPT)
target prot opt source destination
Chain INPUT (policy ACCEPT)
target prot opt source destination
Chain FORWARD (policy ACCEPT)
target prot opt source destination
Chain OUTPUT (policy ACCEPT)
target prot opt source destination
Chain POSTROUTING (policy ACCEPT)
target prot opt source destination
TOS tcp -- 192.168.127.12 anywhere TOS set Minimize-Delay
{% endhighlight %}
</div>
Para borrar todas las reglas de marcado, en el caso de la tabla mangle:
<div class="console">
{% highlight console %}
#iptables -F -t mangle
{% endhighlight %}
</div>
Obviamente podemos especificar el marcado de los paquetes como queramos y atendiendo a multitud de parámetros. Veamos algunos ejemplos:
Especificar que todo el tráfico de salida de un host que sale del puerto 12345 se marque con el valor 4 en el campo TOS:
<div class="console">
{% highlight console %}
#iptables -t mangle -I OUTPUT -p tcp --sport 12345 -s localhost -j TOS --set-tos 4
{% endhighlight %}
</div>
En general, despues de la opción -p puedes poner cualquier tipo de protocolo (si esta incluido en /etc/protocols), tambien existe la posibilidad de marcar los paquetes que van a un destino específico (con -d), que entraron por un interfaz específico o van a salir etc. existen módulos asociados a protocolos que enriquecen las posibilidades, pero vamos estudiando un poco seguro que puedes marcar y filtrar lo que quieras.
Por ejemplo un paquete muy majo es <a href="http://l7-filter.sourceforge.net/">este</a> que te permite filtrar de forma cómoda por protocolos de la capa de aplicación y habilitar reglas como esta:
<div class="console">
{% highlight console %}
#iptables -t mangle -A FORWARD -p tcp -m ipp2p --kazaa ... acción
{% endhighlight %}
</div>
Vamos, te marca los paquetes de kazza.
<h2> Creando colas </h2>
Como vimos en la receta anterior, para crear colas, usamos la herramienta tc.
Por ejemplo si queremos establecer cuatro colas, cada una para un tipo de tráfico distinto, y que tengan el mismo ancho de banda, una estructura como esta sería apropiada:
<div class="console">
{% highlight console %}
#tc qdisc add dev eth0 root handle 1: htb default 1
#tc class add dev eth0 parent 1: classid 1:1 htb rate 3125kbps
#tc class add dev eth0 parent 1: classid 1:2 htb rate 3125kbps
#tc class add dev eth0 parent 1: classid 1:3 htb rate 3125kbps
#tc class add dev eth0 parent 1: classid 1:4 htb rate 3125kbps
{% endhighlight %}
</div>
Tal y como hemos creado las estructuras, el ancho de banda es dividido, es necesario resaltar que para que compartieran el ancho de banda deberíamos crear una cola hija de root de la cual "colgar" la clasificación. De esta forma si una clase de tráfico no ocupa su ancho de banda, el que sobra, es repartido en las otras clases de tráfico.
Nuestras colas, quedan, por lo tanto:
<div class="console">
{% highlight console %}
#tc qdisc show dev eth0
class htb 1:1 root prio 0 rate 25000Kbit ceil 25000Kbit burst 14096b cburst 14096b
class htb 1:2 root prio 0 rate 25000Kbit ceil 25000Kbit burst 14096b cburst 14096b
class htb 1:3 root prio 0 rate 25000Kbit ceil 25000Kbit burst 14096b cburst 14096b
class htb 1:4 root prio 0 rate 25000Kbit ceil 25000Kbit burst 14096b cburst 14096b
qdisc htb 1: r2q 10 default 1 direct_packets_stat 0
{% endhighlight %}
</div>
<h2>Clasificando paquetes</h2>
Una vez que tenemos el tráfico marcado y las clases de tráfico creado, debemos establecer las reglas para encolar el tráfico en su clase correspondiente, dentro de la clasificación de colas preestablecida.
Para ello, de nuevo utilizamos la herramienta tc:
<div class="console">
{% highlight console %}
#tc filter add dev eth0 parent 1:0 protocol ip prio 10 u32 match ip tos 0x10 0xff flowid 1:4
#tc filter add dev eth0 parent 1:0 protocol ip prio 9 u32 match ip tos 0x08 0xff flowid 1:3
#tc filter add dev eth0 parent 1:0 protocol ip prio 8 u32 match ip tos 0x04 0xff flowid 1:2
#tc filter add dev eth0 parent 1:0 protocol ip prio 7 u32 match ip tos 0x02 0xff flowid 1:1
{% endhighlight %}
</div>
Con estas instruciones le indicamos que vamos a crear un filtro "tc filter" y asociarlo a la interfaz eth0 "add dev eth0" y a la clase root "parent 1:0" (la qdisc que hemos creado antes). Este filtro es sobre el protocolo ip "protocol ip" y le fijamos una prioridad para indicar en que orden se aplican los filtros "prio 10" (mayor número, mayor prioridad). A continuación viene qué condiciones debe cumplir nuestro paquete (con el filtro u32), en nuestro caso, como hemos utilizado el campo TOS, lo especificamos con "match ip tos 0x10 0xff" y por último el flujo al que pertenece si cumple los criterios "flowid 1:4".
Si queremos ver que todo se ha configurado bien:
<div class="console">
{% highlight console %}
# tc filter show dev eth0
filter parent 1: protocol ip pref 7 u32
filter parent 1: protocol ip pref 7 u32 fh 803: ht divisor 1
filter parent 1: protocol ip pref 7 u32 fh fc00:e968:6179::de52:7100 order 2048 key ht 803 bkt 0 flowid 1:1
match 00020000/00ff0000 at 0
filter parent 1: protocol ip pref 8 u32
filter parent 1: protocol ip pref 8 u32 fh 802: ht divisor 1
filter parent 1: protocol ip pref 8 u32 fh fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b order 2048 key ht 802 bkt 0 flowid 1:2
match 00040000/00ff0000 at 0
filter parent 1: protocol ip pref 9 u32
filter parent 1: protocol ip pref 9 u32 fh 801: ht divisor 1
filter parent 1: protocol ip pref 9 u32 fh fc00:e968:6179::de52:7100 order 2048 key ht 801 bkt 0 flowid 1:3
match 00080000/00ff0000 at 0
filter parent 1: protocol ip pref 10 u32
filter parent 1: protocol ip pref 10 u32 fh 800: ht divisor 1
filter parent 1: protocol ip pref 10 u32 fh fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b order 2048 key ht 800 bkt 0 flowid 1:4
match 00100000/00ff0000 at 0
#
{% endhighlight %}
</div>
<h2>Comprobando que todo funciona</h2>
Vale, y todo esto, ¿cómo comprobamos que funciona?, bueno, hay un script en perl que te permite visualizar las clases de tráfico que hay en tu sistema, el dispositvo y el tráfico que entra por cada clase en tiempo real:
<div class="console">
{% highlight console %}
#./monitor_tc_top.pl
18:49:55 up 1 day, 2:09, 5 users, load average: 0.20, 0.17, 0.17
Interval Cumulated Total
Dev Classid Tokens Ctokens Rate Speed Send Send
-------------------------------------------------------------------------
eth0 1:1 4558 4558 24.47KB 147B/s 103.17KB 333.33KB
eth0 1:2 4620 4620 0B 0B/s 0B 0B
eth0 1:3 4620 4620 0B 0B/s 0B 0B
eth0 1:4 4620 4620 0B 0B/s 0B 0B
{% endhighlight %}
</div>
Puedes bajártelo de <a href="http://www.docum.org/docum.org/monitor/"> aquí</a>. El script no está mantenido pero de momento funciona bastante bien. Si observais, en la ejecución del comando sólo va tráfico a la clase 1 que es por defecto, se pueden generar tráfico con una herramienta (tipo scapy) para comprobar qué tráfico va a cada una de las colas.
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2007-07-14/python-lirc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-07-22/transformar-un-servidor-fisico-linux-en-servidor-virtual.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-12-30/montar-una-particin-de-un-disco-vdi-de-virtualbox.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2010-09-15/el-programa-no-ejecuta-el-archivo-de-video.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2006-07-25/convertir-a-comic.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-15/instalacin-de-moin-wiki-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-11-28/no-dejes-en-el-equipo-tus-contraseas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2011-03-23/entrevista-a-richard-m-stallman-en-baqua-tv.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-06/lg-l204-wt-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-10/netcat-la-navaja-suiza-de-tcp-ip.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-05-27/juicio-en-luxembugo-contra-el-canon.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-26/cmake-enlazado-de-libreras.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-07-05/desnudando-a-la-nds-lite.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-09-03/la-forma-cannica-ortodoxa.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-03-02/la-que-nos-espera.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-19/pap-de-donde-vienen-las-placas-base.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-11-28/creacin-de-un-plugin-de-munin-para-mldonkey.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-25/hp-deskjet-720-en-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-07-12/enlaces-tiles-para-chumby.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-06-30/escndalo-dell-simplemente-no-compres-dell.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-17/escuchar-los-40-principales-por-internet-con-mplayer.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-20/lego-mindstorms-nxt-bluetooth.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-30/problemas-con-particiones.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-01-26/yago-un-robotillo-libre-basado-en-avr.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-01-05/usabilidad-que-no-entiendo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-06-20/el-formato-de-word-es-bueno-pero-no-para-ti.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-08-28/aire-fresco-en-el-entretenimiento.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-07-14/instalar-gnu-linuxcell-sdk-2-1-en-playstation-3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-04-25/atributos-con-tipado-esttico-en-python-usando-un-descriptor.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-19/linux-vserver-en-5-minutos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-02-18/migrar-el-directorio-home.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-12-03/500-euros-para-el-mejor-software-libre-de-2009.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-10-20/cumpleaos-feliz.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-26/presumiendo-de-aceleracin-3d-en-gnu-linux-con-3ddesktop.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-12-03/tslib-librera-para-la-pantalla-tctil-del-chumby.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-07/cmo-ser-un-gnesis-maintainer.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-02-26/problemas-con-adsl-telefnica-y-debian-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-01-12/puertos-en-el-pic16f690.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-02/personalizar-los-plugins-de-bsqueda-de-firefox.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-11-02/wii-mote-ese-extrao-mando-a-distancia.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-08-23/m-invita-a-firefox-a-sus-instalaciones.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-05-29/personalizar-gdm3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-06-14/radio-por-internet.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-06/duke-nukem-3d-en-gnu-linux-come-get-some.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-12-04/nuevo-sistema-antipiratera-de-vista-sp1-dar-la-brasa.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-06-02/ver-dvds-en-ubuntu-o-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-22/blender-efecto-de-profundidad-de-campo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-06-04/es-como-lego.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-11-14/jornadas-sobre-el-uso-del-software-libre-y-la-incorporacin-de-las-tic-en-las-empresas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-03-25/programacin-de-shaders-glsl-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-25/instalar-programas-con-stow.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-08-29/atheist.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-04-10/el-sistema-operativo-ncleo-2-6-12.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-02-26/wallpaper-gnesis-3-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-03-11/imagen-para-el-trmino-cocina.html
---
<file_sep>---
migrated: node/1692
layout: post
title: Desactivar traducciones de índices de repositorios Debian
created: 1354294546
author: david_villa
comments: true
category: recipe
tags:
- Debian
---
Te suena haber visto últimamente cosas como ésta al hacer @apt-get update@:
<div class="console">
{% highlight console %}
{% endhighlight %}
</div>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-12-27/accept-un-decorador-para-type-checking-verstil-en-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-12-06/mantener-limpia-tu-debian.html
---
<file_sep>---
migrated: node/1076
layout: post
title: 'Gimp: recortar una imagen en línea de órdenes'
created: 1231312811
author: magmax
comments: true
category: script
tags:
- graphics
---
<blockquote>A menudo he necesitado este script y por fin me he puesto a implementarlo. Consiste en un script-fu que se le puede pasar a gimp para recortar una imagen de forma automática. Contaré los pasos que he seguido y al final hay un resumen para 'ir más rápido'.</blockquote>
<h2>Ingredientes</h2>
No os lo vais a creer: gimp.
<div class="console">
{% highlight console %}
# apt-get install gimp
{% endhighlight %}
</div>
Seguro que en Mindows es más fácil de instalar.
<h2>Mi primer script</h2>
Lo primero que necesitado ha sido una manera de ejecutar un script. Para ello, he abierto mi imagencilla con Gimp de la forma habitual:
<div class="console">
{% highlight console %}
$ gimp tab1.png
{% endhighlight %}
</div>
Después hay que irse a la ventana principal de Gimp, donde están las herramientas, y acceder a "Exts"->"Script-fu"->"Consola de Script-fu". Se nos abrirá una ventana bastante fea. Antes de avanzar, pulsamos el botón "examinar". Se nos abrirá la ventana de ayuda que podemos tener danzando por ahí mientras nos damos cabezazos contra la pared con nuestro script.
Vamos con mi "hola mundo". En esta ocasión, consistió en seleccionar toda la imagen :-D
<pre>
(gimp-selection-all 1)
</pre>
Ahora lo explico: estoy llamando a la función "gimp-selection-all" con el parámetro "1", que es el identificador de la imagen (como la acabamos de abrir, debería ser el 1 :D). Para comprobar el resultado, vamos a la ventana de la imagen y debería estar seleccionado todo.
<h2>El Script</h2>
Ahora que sabemos usar perfectamente script-fu, vamos con el script que recorta una imagen:
<div>
{% highlight text %}
(let* \
( \
( img (gimp-file-load 0 "/ruta_hasta_la_imagen/tab1.png" "tab1.png") ) \
( drw (gimp-image-get-active-drawable (car img)) ) \
) \
(plug-in-autocrop 0 (car img) (car drw) )\
(plug-in-colortoalpha 1 (car img) (car drw) '(0 0 0) ) \
(gimp-file-save 1 (car img) (car drw ) "/ruta_hasta_la_imagen/ejemplo.png" "ejemplo.png") \
)
{% endhighlight %}
</div>
Lo he separado en líneas para que lo veáis bonito :D. La barra invertida: "\" sólo sirve para indicar que va todo en la misma línea.
Ahora explico: "let*" me permite definir variables. Así defino dos: una es "img" y otra "drw". Casi todas las funciones requieren estos parámetros, así que me los guardo "para luego" y así es más fácil todo. Al declarar "img" le asigno el valor devuelto por la función "gimp-file-load", que me ha cargado la imagen, pasándole un "0" ("no interactivo"), la ruta hasta la imagen y "el nombre dado por el usuario", que creo que es el nombre de la imagen sin el path.
Igualmente, a "drw" le asigno el resultado de "gimp-image-get-active-drawable", donde " (car img)" es una llamada a una función que me da el primer elemento del vector "img" (no sé explicarlo mejor. No he conseguido quitarme de encima el "car" ése).
Con eso ya tengo las dos variables locales que, como locales que son, sólo existirán hasta que cierre el "let*" (por eso lo cierro al final).
El primer argumento del resto de funciones siempre es el modo interactivo, el problema es que de homogeneidad nada: unas funciones toman el 0 como "interactivo" y otras como "no-interactivo". En el ejemplo todas están a "no-interactivo".
Por orden, hago: Cargar la imagen (como hemos visto), recortar la imagen, transformar el color blanco a alpha (éste es un bonus track) y guardar la imagen.
No me enrollo más.
<h2>Batch mode </h2>
Claro, nada de esto tiene gracia si tengo que estar abriendo el gimp para cada una de mis 159 imágenes que quiero procesar... Así que a currarme la línea de órdenes:
<div class="console">
{% highlight console %}
$ gimp -i -b '(let* ( ( img (gimp-file-load 0 "/ruta_hasta_la_imagen/tab1.png" "tab1.png") ) ( drw (gimp-image-get-active-drawable (car img)) ) ) (plug-in-autocrop 0 (car img) (car drw) ) (plug-in-colortoalpha 1 (car img) (car drw) '(0 0 0) ) (gimp-file-save 1 (car img) (car drw ) "/ruta_hasta_la_imagen/ejemplo.png" "ejemplo.png") ) (gimp-quit 0)'
{% endhighlight %}
</div>
A gimp tengo que pasarle la orden "-i" (no interactivo) y "-b" (ejecútame este batch) con la ristra que hemos comentado antes. Tan sólo le he añadido un detallito, que es la orden "gimp-quit" para que no se quede en "modo gimp".
Hay una forma de hacer que me procese todas las imágenes de golpe... Pero eso lo dejaré para otra receta, ya que yo voy a utilizar un precioso Makefile que me va a ir transformando todas las imágenes y me las va a actualizar cuando las modifique (mientras que con gimp tendría que volver a procesar todas), así que lo dejaré en el tintero.
<h2>Y fin</h2>
No sé si esto resultará útil para alguien, pero puedo asegurar que para mí sí, y como no sabía dónde ponerlo y el script-fu es infernal, lo he plantao aquí, que hacía mucho que no saba señales de vida.
<h2>Referencias</h2>
Pues un poco de aquí y otro de allá:
<ul>
<li><a href="http://www.javielinux.com/programacion_python.php">javielinux</a></li>
<li><a href="http://www.linbox.com/ucome.rvt/any/doc_distrib/gimp-1.1.18/manual/manual/GUM/write_scriptfu3.html">Manual de gimp </a></li>
<li><a href="http://www.gimp.org/tutorials/Basic_Batch/">Tutorial de gimp</a></li>
<li><a href="http://www.gimp.org/docs/scheme_plugin/index.html">Documentación de gimp</a></li>
<li><a href="http://gimp-plug-ins.sourceforge.net/doc/Writing/html/plug-in.html">Plugins de gimp</a></li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-06-02/seguridad-wifi-con-tarjetas-atheros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2006-10-30/lasaa-con-championes-y-berenjena.html
---
<file_sep>---
migrated: node/1069
layout: post
title: La consejería de educación y ciencia de CLM apuesta por Windows
created: 1229681899
author: ricardo
comments: true
category: new
tags:
- Molinux
---
Éste es el título de un <a href=" http://www.ste-clm.com/modules/news/article.php?storyid=630">artículo</a> que aparece en la web del STE de Castilla-La Mancha.
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-22/hacer-backup-de-tu-correo-en-gmail.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-07-08/iptables-en-fc5.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-01-18/como-funciona-lo-del-vmware-que-esta-instalado-en-los-equipos-de-la-esi.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-12-20/el-ceslcam-regala-un-pendrive-de-4gb.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-04-30/se-puede-hacer-dinero-con-el-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-02-21/sobre-los-gtk-radiobutton.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2010-10-29/alitas-de-muerte-a-k-a-alitas-al-horno.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-09-04/comparar-ficheros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-09/mailman-and-exim4.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-08-10/gestionando-preferencias-con-gconf-y-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-02/compartir-una-impresora-cups-automgicamente.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-15/ethernet-bridging-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-05-12/otra-de-linus.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2007-03-14/generate_m3u.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-01-27/no-habr-sistema-de-3-amenazas-a-internautas-en-uk.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2013-06-26/pollo-a-la-pia-con-queso.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-14/webcams-usb-y-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-02-01/wormux-el-worms-para-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-02/instalar-gnesis-en-un-usb-desde-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-04-07/fuse-y-python-crea-tu-propio-sistema-de-ficheros-fcilmente.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2011-05-13/install-party-y-videoforum-sobre-software-libre-en-daimiel-17-de-mayo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-21/impresora-hp-laserjet-1000-usb.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-12-25/patrn-flyweight-en-python-como-metaclase.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-11-13/demo-del-nuevo-instalador-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-19/crear-un-sitio-web-ssl-con-apache2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-08-14/lo-nico-seguro-es-que-no-lo-es.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-01/publicar-los-homes-con-apache2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2005-10-19/usuarios-ubuntu-quereis-howtos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-11-27/ms-de-20-estudiantes-de-la-uclm-desarrollarn-proyectos-en-sl.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-03-18/abierto-el-plazo-de-inscripcin-para-asistir-a-la-fase-final-del-i-concurso-universitario-de-sl-de-c-lm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-19/renombrar-mltiples-ficheros-usando-mmv.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-15/compartir-ficheros-con-sshfs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-19/bluetooth-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-10-27/ayuda-con-mono.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-21/aceleracin-3d-con-dri-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-02-17/el-meta-amigo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-04-26/configuring-igmp-in-a-lan-to-control-iptv-multicast-flows-over-cisco-catalyst-3550-12t.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-09-21/abierta-inscripcin-concurso-universitario-de-software-libre-de-clm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-12-15/zeroc-ice-desarrollo-de-plugins.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-04-12/gua-de-referencia-para-cmaras-axis.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-22/stallman-en-la-esi.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-02-27/instalar-un-mdulo-python-en-un-virtualenv.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2012-02-06/97-things-every-programmer-should-know.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-04-13/si-eres-legal-eres-legal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-06-16/problemon-en-el-pc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-02-05/el-santsimo-credo-del-informtico.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-11-08/nueva-versin-de-molinux-adarga-4-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-04-24/eaglemode-mi-pc-a-vista-de-pjaro.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-18/introduccin-a-unicode-y-utf-8.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-07-29/y-si-contrataran-a-los-conductores-igual-que-a-los-programadores.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-08-20/usar-pidgin-para-unirte-a-una-sala-jabber.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-10/gtk-uimanager.html
---
<file_sep>---
migrated: node/470
layout: post
title: convertidor de video
created: 1166030897
author: kryle
comments: true
---
alguien por ahi me puede recomendar un buen convertidor de video? especificamente que convierta de AVI a MPG.
y si tiene tiempo limitado, les agradeceria que me indiquen como crackearlo. Gracias!!
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-06/la-tierra-como-fondo-de-escritorio-en-gnome-con-xplanet.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-04-20/una-de-listos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-25/buscar-ficheros-en-el-repositorio-debian-apt-file.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-01-14/microsoft-patenta-el-fat.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-04-05/instalar-x-wrt-en-la-fonera.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-11-24/gnu-emacs-editar-archivos-remotos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-10-31/introduccin-a-la-programacin-en-emacs-lisp-de-chassell
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2005-11-29/conexiones-wifi-que-tambin-pueden-ser-libres.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2008-12-04/el-robo-del-milenio-cmo-internet-lleg-a-ser-libre-y-porqu-es-importante.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-25/dos-monitores-dual-head-con-xrandr.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-24/mdem-comtrend-ct-350-con-el-driver-ueagle-atm-remake.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-01/imagen-opie-para-tu-friendlyarm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-02-13/manipulacin-de-ficheros-mp3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-09/gnu-emacs-todos-los-comandos-que-necesitas-y-nunca-recuerdas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-07-31/usuario-de-yafray-necesitamos-tu-ayuda.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-16/mini-referencia-de-sql-con-mysql.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-10-18/algunos-que-otros-problemillas-con-la-web-de-cisco.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-25/cmake-compilar-un-hola-mundo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2010-05-22/reto-de-la-semana-containers-de-la-stl.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-08-14/10-seales-de-que-no-eres-tan-gnu-ista-como-crees.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-11-17/arco-devel-zoom-para-gnu-emacs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2005-12-06/arroz-con-pollo-al-curry.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-02-24/navegacin-annima-mediante-tor-y-privoxy.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-04-26/zeroc-ice-parseando-un-fichero-slice.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2011-06-01/intel-hd-graphic-urgente.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-09-29/cambio-en-la-configuracin-de-sudo.html
---
<file_sep># -*- mode: ruby -*-
Vagrant.configure("2") do |config|
config.vm.box = "deb/jessie-i386"
config.vm.provider :virtualbox do |vb|
vb.memory = 2048
end
config.vm.provision "ansible" do |ansible|
ansible.playbook = "playbook.yml"
end
end
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-08-01/puesta-en-marcha-de-un-servidor-dhcp.html
---
<file_sep>---
migrated: node/1452
layout: post
title: Actualizado el Manual de GNU Emacs a 22.2
created: 1295887711
author: suso
comments: true
category: new
tags:
- emacs
---
Tengo el inmenso placer de anunciar la actualización de la traducción
<!--break-->
del Manual de GNU Emacsi a su Decimosexta Edición, correspondiente a
GNU Emacs 22.2. Sigue estando en fase alfa, aunque hemos corregido
muchos errores mecanográficos y avanzado en tareas de la fase beta.
Ha sido un trabajo arduo e ingrato, pues en dicha edición se produjeron
sustanciales cambios estructurales en el manual original que nos
retrasaron muchísimo la actualización. Buena parte de esos cambios no
eran de especial importancia desde el punto de vista de la edición en
español, así que hemos tenido que adoptar decisiones que afectan de
manera considerable al mantenimiento del manual español.
A ello hay que sumarle los innumerables "retoques estilísticos" que
sufrió el manual inglés, la mayoría de ellos sin efecto en la traducción
final, pero de dificilísimo seguimiento durante la actualización.
Todo ello se explica en la página web del manual en español. Esperemos
que con el paso dado podamos volver a abordar la actualización a 23 con
más soltura y facilidad.
----------
Elimino las galeradas de texto intermedias y las genero en HTML
provisional de muestra. Recuérdese, son galeradas.
El material aún no está listo para distribución, pero se puede
consultar.
Fase "alfa" significa que aún hay que hacerle los pases de traducción
de ejemplos, índices, nodos, etc.; y profundas correcciones técnicas,
gramaticales y estilísticas. Por ello se pone aquí a los meros
efectos de recibir sugerencias de aquellos que quieran colaborar con
este proyecto.
Entendemos que las traducciones en fase alfa se encuentran en un estado
considerable de imperfección, por ello la política de nuestra empresa
es no entregarlas nunca al público. Sin embargo, debido a la naturaleza
de esta documentación, estamos siguiendo un método de galeradas al que
pueden sumarse los interesados.
Las galeradas se pueden alcanzar en la sección Libros.
<a href="http://gnu.manticore.es/manual-emacs">http://gnu.manticore.es/manual-emacs</a>
--
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-19/java-blackdown-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-12-13/necesito-algo-de-ayuda-sobre-mi-lan.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-01-12/gnu-linux-media-center.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-08-21/ingsoft-realista.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-01/sobre-la-web.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-06/midi-por-software.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-03-03/open-letter-to-hardware-manufacturers.html
---
<file_sep>---
migrated: node/1071
layout: post
title: Suicidios de discos duros...
created: 1229909393
author: javieralso
comments: true
category: recipe
tags:
- Debian
---
En realidad no sé si debo catalogar ésto como una receta o no. A mi me ha sido algo útil buscando formas de hacer análisis de discos duros (el mio últimamente falla mas que una escopetilla de plomo y solo tiene un año).
<!--break-->
Como paso de reinventar la rueda, simplemente pongo el enlace donde encontré el artículo (que para algo ya está escrito). El artículo se titula <a href="http://www.vicente-navarro.com/blog/2007/10/28/linux-no-mata-discos-duros-se-mueren-solos"><b>Linux no mata discos duros, se mueren solos</b></a>
En mi caso, tras 12 meses y un par de semanas de uso, he leido 47720 ciclos, los 10 últimos en menos de 15 minutos...
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-09/autenticacin-de-paquetes-para-repositorios-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-07-05/cmo-usar-tor-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-06-25/reproducir-vdeo-con-mplayer-utilizando-la-gpu-y-vdpau-para-decodificar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-04-24/ayuda.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-23/gnu-emacs-edicin-rectangular-seleccin-vertical.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-02-14/revista-begins.html
---
<file_sep>---
migrated: node/410
layout: post
title: Comienza el desarrollo de Gnesis 3.0
created: 1161704087
author: cleto
comments: true
category: new
tags:
- GNESIS
---
Hola a todos!
<!--break-->
Escribo para informaros que el desarrollo de Gnesis 3.0 ha comenzado. De la versión 2.0 a la que está actualmente en desarrollo hay muchos cambios; muchos de ellos muy notorios:
<ul>
<li> <b>Basada en Debian</b>: la nueva versión de Gnesis está basada en Debian GNU/Linux, generada con las herramientas de <a href="http://debian-live.alioth.debian.org/">Debian-Live</a>. Concretamente, será la versión inestable de Debian: para estar a la última ;-)!</li>
<li><b>Repositorio de documentación y aplicaciones</b>: uno de los objetivos de Gnesis es que cubra las necesidades de los estudiantes de informática. Pues bien, se piensa en montar un sistema de paquetes virtuales para disponer de aplicaciones, herramientas y transparencias de las distintas asignaturas de la carrera. Su instalación será tan sencilla como la de un paquete debian y estará siempre actualizado ante cambios en las transparencias, apuntes, etc...</li>
<li><b>Debian Live, pero instalable</b>: otro de los objetivos de Gnesis es que sea instalable. Como sabéis, Ubuntu ya es Live-Installer pero Debian aún no. Esperemos que Gnesis sea la primera. :-)</li>
<li><b>Gnesis en USB</b>: para ir a todos los lugares presumiendo de distribución, se construirá una distribución USB mínima y usable para discos duros portátiles y otros dispositivos USB.</li>
</ul>
A grandes líneas, esto es lo que pretende ser Gnesis. Si tienes alguna sugerencia y quieres aportar alguna opinión, deja tu comentario. Esperamos impacientes vuestras sugerencias.
Un saludo a todos!
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-09-24/sensor-de-movimiento-del-ibook-powerbook-g4-bajo-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-05-16/peticin-de-talleres-y-actividades-para-la-party-quijote-2006.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-14/recuperar-grub.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-17/mini-tutorial-de-python-3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-07-04/sai-mge-pulsar-ellipse-600-en-debian-con-nut.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-15/arreglar-subttulos-srt-desincronizados.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-02-28/cansado-de-amd64.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-01-21/creando-ejecutables-vlidos-para-cualquier-psp-como-sony.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-02-28/pegatinas-de-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-19/python-xlib-emulando-el-teclado.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-12-17/razones-de-peso-por-las-que-necesitas-windows-live.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-11-09/hook-subversion-para-integracin-con-hudson.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-11/convertir-subttulos-de-dvd-a-formato-srt.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-20/gnu-emacs-usando-emacs-cscope.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-20/tinyos-2-0-un-sistema-operativo-libre-para-dispositivos-empotrados.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-09-21/eduroam-con-network-manager-one-more-time.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-07-13/libro-de-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-21/apt-get-y-dpkg.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-13/timeout-de-un-mtodo-en-c-con-glib.html
---
<file_sep>---
migrated: node/201
layout: post
title: Virtualización fácil con Xen3
created: 1138102303
author: int-0
comments: true
category: recipe
tags:
- Arco
---
<blockquote>Esta receta no se si tendrá muchas aplicaciones "domésticas", en ella vamos a tratar de crear <em>n</em> máquinas virtuales en una única máquina física. Esto permitirá crear (por ejemplo) <em>n</em> servidores, cada uno con unos servicios determinados en una misma máquina física. Con ello se consigue (entre otras cosas) proteger unos servicios mas "críticos" de otros más "vulnerables" (lo que pase en una máquina es independiente al resto).</blockquote>
<!--break-->
<h1>Qué es Xen</h1>
Xen es un sistema de virtualización similar a <em>VM-Ware</em> o <em>QEmu</em>, pero tiene una difirecia bastante notable: mientras que <em>VM-Ware</em> degrada alrededor de un 30% el rendimiento de la máquina emulada, Xen lo hace entre un 2% y un 3% (es increible pero cierto!), pero claro... no va a ser todo de color de rosa: es necesario parchear tanto el núcleo del sistema real como el núcleo de los sistemas que se emularán (esto debería impedir ejecutar sistemas hasefroch 95, XP, etc. bajo un linux ya que no podemos parchear, a priori, el núcleo de estos sistemas privativos... se comenta por ahí que <b>sí</b> existen estos parches, aunque yo no los he visto).
¿Cómo consigue esto?, en realidad Xen no es una máquina virtual, sino un <a href="http://en.wikipedia.org/wiki/Hypervisor">hypervisor</a> para núcleos linux de la rama 2.6, cuando se inicia un núcleo parcheado con Xen antes de nada aparecerán unos mensajes que empiezan con "[XEN]:", así se muestran los mensajes del hypervisor, también se puede dirigir la consola de Xen a un puerto serie concreto o compartir una cosola con el sistema principal. Después podremos cargar más núcleos como linux, FreeBSD o Plan9.
Para gestionar todos los sistemas que se quieran ejecutar en una máquina, Xen divide el sistema en dominios (o <b>dom</b>), el dominio 0 (o <b>Dom0</b> a partir de ahora, denominado asi por ser privilegiado o <em>priviledge</em>) es el sistema real y será el encargado de lanzar/administrar el resto de los dominios (o genéricamente <b>DomU</b> o <em>unpriviledge</em> a partir de ahora,). La comunicación entre dominios se realiza mediante <em>backends</em> de <em>hotplug</em>, esto permite la comunicación entre dominios con interfaces de red virtuales (o <b>vif</b>'s), discos compartidos, etc.
Los discos de los dominios serán imagenes de sistemas de ficheros ext2, ext3 o reiser. Se pueden crear con dd y montar como dispositivos <em>loop</em> (esto lo veremos más adelante) o incluso usar particiones reales.
La conexión de los dominios a una red se puede hacer de diversos modos: <em>bridged</em>, <em>NAT</em>, etc.
Las versiones de Xen a partir de la 3 soportan virtualización SMP en sistemas SMP. Ninguna versión de Xen liberada hasta ahora soporta librerías <a href="http://en.wikipedia.org/wiki/Thread-local_storage">TLS</a>, es necesario "deshabilitarlas", no he encontrado un método <em>Debian-like</em> en ninguna parte para hacerlo, los chicos de Xen sugieren lo siguiente (que es lo que he hecho yo):
<div class="console">
{% highlight console %}
$ cd /lib
$ mv tls tls.disabled
{% endhighlight %}
</div>
<h1>Instalación con paquetes binarios</h1>
La forma más cómoda de instalar Xen es descargarse los paquetes binarios: <tt>xen-3.0.0-install-x86_32.tgz</tt> o <tt>xen-3.0.0-install-x86_32p.tgz</tt> (incluye soporte <a href="http://en.wikipedia.org/wiki/Physical_Address_Extension">PAE</a>). Estas distribuciones traen de serie el soporte SMP. Ahora símplemente debemos descomprimir el paquete y ejecutar el script de instalación:
<div class="console">
{% highlight console %}
$ unp xen-3.0.0-install-x86_32.tgz
$ cd xen-3.0.0-install
# ./install.sh
{% endhighlight %}
</div>
Este script copiará los ejecutables necesarios, las bibliotecas de python, el hypervisor y los scripts de inicio. Sólo nos queda configurar Grub y corregir unos problemas con este tipo de instalación.
<h1>Problemas serios y soluciones cutres en la instalación</h1>
Bien, esos problemas "serios" me han ocurrido en una Debian unstable (tenedlo en cuenta si usáis Ubuntu o alguna otra), algunos ejecutables no funcionan correctamente por no encontrar una serie de librerías: <tt>libssl.so.4</tt>, <tt>libcrypto.so.4</tt> y <tt>libcurl.so.4</tt>. La solución <b>cutre</b> que he encontrado ha sido la siguiente: enlaces débiles de las librerías que vienen en Debian (más nuevas) a las que me pide el programa:
<div class="console">
{% highlight console %}
# cd /usr/lib
# ln -s libssl.so libssl.so.4
# ln -s libcrypto.so libcrypto.so.4
# ln -s libcurl.so libcurl.so.4
{% endhighlight %}
</div>
También puede ocurrir que los programas <em>python</em> den error al importar algunos módulos, esto también se puede corregir de forma cutre fácilmente:
<div class="console">
{% highlight console %}
# cd /usr/lib
# cp python/* python2.3/
{% endhighlight %}
</div>
O también podríamos haber creado enlaces simbólicos (más mejor...). El problema está en que el script de instalación deja las bibliotecas de python en <tt>/usr/lib/python</tt> y nuestro intérprete las busca en <tt>/usr/lib/python2.3</tt>.
Un problema que ha aparecido en esta versión de Xen (coincidiendo con la sustitución de <em>hotplug</em> por <em>udev</em>) se debe a que aunque se necesita el módulo <b>loop</b> éste no se carga, es necesario cargarlo antes de iniciar <b>xend</b>. Para ello haremos lo siguiente:
<div class="console">
{% highlight console %}
# echo "loop" >> /etc/modules
{% endhighlight %}
</div>
<blockquote>Nota: cuando se carga el módulo, por defecto no deja montar más de 8 dispositivos <em>loopback</em>, esto puede ser problemático en sistemas donde querámos montar más de 4 dominios y no queramos usar particiones reales. Podemos pasar un parámetro a la carga del módulo:
<div class="console">
{% highlight console %}
# modprobe loop max_loop=64
{% endhighlight %}
</div></blockquote>
<!--break-->
Finalmente, la instalación añade dos servicios nuevos en <em>init.d</em> pero, en cambio, no actualiza los enlaces simbólicos de los servicios según el <em>runlevel</em>, así que lo hacemos nosotros y en paz:
<div class="console">
{% highlight console %}
# update-rc.d xend defaults
#update-rc.d xendomains defaults
{% endhighlight %}
</div>
Con esto, la próxima vez que arranque el sistema lo hará con el módulo cargado, <b>xend</b> inciará correctamente y podremos lanzar todos los dominios que necesitemos.
<h1>Configurar GRUB</h1>
Bueno... por si no lo he dicho: es obligatorio <b>Grub</b>... así que migraros los que aún sigáis usando cargadores obsoletos... :-P
En este paso, tendréis que tener <tt>xen-3.0.0.gz</tt> en <tt>/boot</tt>, al igual que las tres imagenes del kernel (sólo privilegiado o -xen0, sin privilegios o -xenU y uno capaz de funcionar de ambos modos o -xen). Muy importante es que no ejecutéis <b>update-grub</b> porque os va a añadir las imágenes parcheadas al menú de arranque y esas opciones no van a ser válidas. Las nuevas opciones las añadiremos "a pelo":
<pre>title Xen 3 Linux, Kernel 2.6.12-xen
root(hd0,4)
kernel /boot/xen-3.0.0.gz dom0_mem=262144
module /boot/vmlinuz-2.6.12.6-xen root=/dev/hde5</pre>
Empecemos con las explicaciones... lo primero que las opciones <b>root</b> serán distintas en vuestro sistema, mirad la de las otras opciones de arranque que tenéis y poned lo mismo.
También podemos especificar una imagen <em>initrd</em>, para ello solo tenemos que añadir una nueva entrada tal que:
<pre>module /boot/initrd.img-2.6.12</pre>
De todas formas yo no la uso y me funciona.
Otra cosa, <b>dom0_mem</b> establece la cantidad de memoria física que verá el Dom0, este dominio no lo usaremos para ofrecer servicios, símplemente para gestionar el resto de dominios, por tanto, no gastéis demasiada memoria en él, yo le he asignado 256Mb que para lo que se usa ya va bien.
Pues nada, ya sólo nos falta reiniciar el sistema y seleccionar ese arranque, si todo va bien veremos los mensajes del hypervisor y después los del arranque normal del linux, si esto es así... ¡ya tenemos el Dom0 funcionando!.
<h1>Compilación de Xen3</h1>
Puede ser que queramos meternos de lleno en Xen, ya sabéis: compilar es más bonito, pero tened en cuenta una cosa, se crean 3 núcleos diferentes: <b>-xen</b> (funciona como Dom0 y DomU), <b>-xen0</b> (funciona sólo como Dom0) y <b>-xenU</b> (funciona como DomU, tiene algunos drivers menos y ocupa 30% menos); así pues el núcleo se compilará 3 veces cada vez, si os apetece entonces vamos al tema:
Necesitaremos los paquetes necesarios para compilar el linux, a parte, también tendremos que tener instalado:
<ul>
<li>zlib1g-dev</li>
<li>rubber (para instalar latex de "rebote")</li>
<li>python-dev</li>
<li>gs-common</li>
<li>transfig</li>
<li>tetex-extra</li>
<li>python-twisted</li>
</ul>
Si hace falta algún paquete más, por favor, comunicadmelo. La compilación es bastante rollo porque si te falta alguno de estos programas se para y cada vez que se hace un <em>make</em> empieza desde el principio porque hace <em>clean</em> antes de empezar.
Ahora necesitamos el archivo <tt>xen-3.0.0-src.tgz</tt>, en la página de <a href="http://www.xensource.com/xen/downloads/">XenSource</a> no está el link directo, hay que pedirlo por mail pero os lo dan automáticamente al hacerlo. Lo que si os viene es un archivo <em>torrent</em> para descargar los mismos ficheros, intentad esta opción si pasáis de dar vuestro mail por ahí.
Una vez tengamos el fichero, lo descomprimimos:
<div class="console">
{% highlight console %}
$ unp xen-3.0.0-src.tgz
{% endhighlight %}
</div>
Y finalmente compilamos:
<div class="console">
{% highlight console %}
$ cd xen-3.0.0
$ make KERNELS=linux-2.6-xen world
{% endhighlight %}
</div>
Esto compilará la documentación y tres núcleos parcheados: <em>version</em>-xen0, <em>version</em>-xenU y <em>version</em>-xen: uno para el Dom0, otro para el DomU y finalmente uno que puede funcionar de ambos modos (este el que usaremos para todo...). Los parches tienen un problema: vienen pensados para una versión concreta del linux, no sirve cualquiera de la 2.6, en este caso hace falta la 2.6.12, pero tranquilos porque el mismo script se descargará los fuentes del núcleo apropiado y lo parcheará debidamente... más fácil imposible!
Una vez compilado lo instalamos (ya como root):
<div class="console">
{% highlight console %}
# make install
{% endhighlight %}
</div>
Y ahora como antes, sólo nos queda configurar Grub.
<h1>Parámetros de configuración de núcleos parcheados</h1>
Supongamos que queremos establecer/modificar valores en el núcleo: inhabilitar módulos, añadir nuevos, etc. Podemos utilizar objetivos normales como los vistos en la receta <a href="/node/182">"Configurar, parchear, cacharrear y compilar un linux FÁCILMENTE"</a> de una forma bastante simple:
<div class="console">
{% highlight console %}
$ cd xen-3.0.0
$ cd linux-2.6.12-xen0
$ make ARCH=XEN menuconfig
{% endhighlight %}
</div>
También puede ser más cómodo usar <em>make-kpkg</em>, en cuyo caso:
<div class="console">
{% highlight console %}
$ make-kpkg --arch xen kernel_image
{% endhighlight %}
</div>
Los archivos de configuración también se gestionan de igual manera, aunque este núcleo tendrá nuevos parámetros añadidos por el parche de Xen.
Hay que tener en cuenta que esto funcionará una vez compilado como se ha explicado porque la descarga y parcheo del núcleo se habrán realizado automáticamente, en caso contrario hay que hacerlo "a mano".
<h1>Creación de dominios</h1>
Bien... ¿cómo creamos DomU?... en Debian existe un paquete llamado <b>xen-tools</b> con unos scripts bastante chulos que lo hacen todo automático, explicaremos su uso más abajo; ahora veremos como hacerlo a la forma "tradicional".
Los dominios se describen con pequeños archivos de configuración situados en <tt>/etc/xen/</tt> que especifican aspectos básicos como el disco, las interfaces de red, el número de procesadores virtuales, etc. Nosotros vamos a crear un dominio cuyo disco va a ser un archivo (para manejarlo luego con más comodidad), para ello creamos el fichero y le damos formato. Los alojaremos en <tt>/var/xen/domains/{nombre_dominio}</tt>, no por obligación, sino por comodidad:
<div class="console">
{% highlight console %}
cd /var/xen/domains/xentest/
$ dd if=/dev/zero of=disk.img bs=1k seek=2048k count=1
# mkfs -t ext3 disk.img
{% endhighlight %}
</div>
Con esto hemos creado un archivo de 2GB que contiene un sistema ext3, lo podemos montar para "rellenarlo" de una forma fácil:
<div class="console">
{% highlight console %}
# mount -o loop disk.img /mnt/test
# cp -ax /{root,dev,var,etc,usr,bin,sbin,lib} /mnt/test
# mkdir /mnt/test/{proc,sys,home,tmp}
{% endhighlight %}
</div>
Esto copiará nuestro sistema actual dentro de ese "sistema virtual", además, crearemos los directorios necesarios para la adecuada ejecución del sistema. Ya podemos desmontar /mnt/test, ahora crearemos otra imagen para el <em>swap</em>:
<div class="console">
{% highlight console %}
$ dd if=/dev/zero of=swap.img bs=1k seek=128k count=1
# mkswap swap.img
{% endhighlight %}
</div>
Con esto tendremos un archivo de intercambio de 128Mb, ahora solo nos queda crear el archivo de configuración (xentest.cfg por ejemplo):
<pre>kernel = "/boot/vmlinuz-2.6.12.6-xen"
memory = 128
name = "xentest"
disk = ['file:/var/xen/domains/xentest/disk.img,sda1,w','file:/var/xen/domains/xentest/swap.img,sda2,w']
root = "/dev/sda1 ro"
dhcp = "dhcp"
nics = 1</pre>
Como véis, especificamos que núcleo (accesible por Dom0) y cuanta memoria dispondrá. También se especifica el número de interfaces de red (<em>nics</em>) y cómo obtendrá la IP, tambíen se puede especificar directamente:
<pre>ip = "192.168.0.2"</pre>
Este archivo lo pondremos en /etc/xen/ y podremos lanzarlo mediante:
<div class="console">
{% highlight console %}
# xm create xentest.cfg -c
{% endhighlight %}
</div>
Si Dom0 esta correctamente configurado, se habrá lanzado <em>xend</em> y <em>xendomains</em> al inicio, entonces se lanzará ese nuevo dominio, podemos
ver que se ha lanzado y se está ejecutando con el comando:
<div class="console">
{% highlight console %}
# xm list
{% endhighlight %}
</div>
Si queremos que un dominio se lance automáticamente al iniciar Dom0 sólo tenemos que crear un enlace simbólico del archivo de configuración en el directorio <tt>/etc/xen/auto</tt>:
<div class="console">
{% highlight console %}
# cd /etc/xen
# ln -s xentest.cfg auto/xentest.cfg
{% endhighlight %}
</div>
<h1>Creación de dominios con xen-tools</h1>
Debian dispone de dos paquetes relativos a Xen: <b>xen</b> y <b>xen-tools</b>. Es mejor no instalar el paquete <b>xen</b> porque va por la versión <b>2.0.6</b>, además tampoco incluye los núcleos parcheados con lo que tendremos que compilarlo nosotros. Si podemos instalar y usar <b>xen-tools</b>, este paquete incluye scripts muy útiles para la gestión de dominios (creación, duplicado, etc.). Una vez instalado el paquete, podemos crear dominios fácilmente con la herramienta <b>xen-create-image</b> del paquete <b>xen-tools</b>, tan fácil como:
<div class="console">
{% highlight console %}
# xen-create-image --dir /var/xen --hostname xentest
{% endhighlight %}
</div>
Esto creará una Debian base en el archivo de imagen creando el archivo de intercambio directamente. Podemos especificar más parámetros, por ejemplo:
<div class="console">
{% highlight console %}
# xen-create-image --dir /var/xen --hostname xentest --fs ext3 --kernel /boot/vmlinuz-2.6.12.6-xen --mirror http://ftp.rediris.es/debian --dist sarge
{% endhighlight %}
</div>
<h1>Configurar la red de los dominios</h1>
En primer lugar, el Dom0 se configura como siempre, no hay que hacer nada "raro". Tenemos dos posibilidades para crear una red con los dominios: <em>bridged</em> o <em>NAT</em>. En modo <em>bridge</em> la interfaz del DomU tiene acceso a la red através del interfaz de Dom0. Podemos imaginarlo como si nuestra máquina real tiene varias IP's, una para cada dominio. En el modo <em>NAT</em> creamos una LAN entre los DomU, la salida a la LAN real se hace por la interfaz del Dom0 de forma similar a como funciona un red doméstica con varios equipos y un "routercillo" conectado a una ADSL.
Vamos con el modo <em>bridge</em>, en principio este modo es más sencillo pero tendremos que realizar un par de cambios a la configuración de nuestros DomU: principalmente usar el linux parcheado específicamente para DomU, modificamos la línea del kernel a cargar por la siguiente:
<pre>kernel = "/boot/vmlinuz-2.6.12.6-xenU"</pre>
Con esto, programas como <tt>dhcp-client</tt> nos darán menos problemas. En mi caso todos los dominios se configuran con <em>DHCP</em>, vuestra LAN debe tener <tt>dhcpd</tt> o similar funcionando. Todos los dominios tienen que tener la interfaz configurada en <tt>/etc/network/interfaces</tt> como sigue:
<pre>auto eth0
iface eth0 inet dhcp</pre>
Y en el archivo de configuración del dominio:
<pre>nics = 1
dhcp = "dhcp"</pre>
Xen configura la red mediante unos scripts (más o menos sencillos pero muy bien comentados) situados en <tt>/etc/xen/scripts</tt>, echad un vistazo a <tt>network-bridge</tt> que os explica como arrancarlo, básicamente:
<div class="console">
{% highlight console %}
$ /etc/xen/scripts/network-bridge start
{% endhighlight %}
</div>
Y ya arrancamos el dominio como antes, si todo ha ido bien, <tt>dhcp-client</tt> debe obtener una IP (también tenéis que tener <tt>dhcpd</tt> bien configurado en vuestra LAN...). Podéis probar a hacer <em>pings</em> a equipos incluso fuera de la LAN o desde otros equipos al dominio. Los <em>pings</em> hacedlos así:
<div class="console">
{% highlight console %}
$ ping -c 10 www.google.com
{% endhighlight %}
</div>
Por la sencilla razón de que los dominios, al compartir la terminal, no reciben señales como <em>Ctrl+C</em> y el <em>ping</em> se va a ejecutar indefinídamente hasta que no cerréis el dominio.
<h1>Conclusiones</h1>
En esta receta se han quedado muchisimas cosas por explicar, por ejemplo: configuración del núcleo parcheado con Xen, parámetros de Xen, nuevas opciones de núcleos parcheados, imagenes Xen en discos reales, en discos LVM, en discos NFS, migración en vivo de máquinas Xen (esto es impresionante), topología de red en dominios Xen, etc. Muchas de estas cosas son fáciles de realizar con la documentación en la mano, otras las estoy estudiando a ver si lo echo a andar completamente. Por ahora necesito ayuda con lo que ya he puesto aquí... gracias y que le aproveche a alguien... ;-)
<h1>Enlaces de interés</h1>
<a href="http://www.xensource.com/xen/xen/">XenSource, página oficial</a>
<a href="http://www.howtoforge.com/perfect_xen_setup_debian_ubuntu">Perfect Xen setup on Debian/Ubuntu</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-05-25/25-de-mayo-dia-del-orgullo-friki.html
---
<file_sep>---
migrated: node/735
layout: post
title: Ayuda con Visión por computador
created: 1184070181
author: manueldavid
comments: true
category: enquiry
---
<h2>Reconstrucción 3D a partir de dos imágenes</h2>
<blockquote class="head">
Intento realizar la reconstrucción 3D a partir de la visión estereo (dos imágenes o más) he llegado a un punto en el que me he quedado atascado. A ver si alguno de ustedes me puede orientar.
</blockquote>
<h2>Aclaraciones:</h2>
La reconstrucción 3D lo que pretende es obtener las dimensiones 3D de un objeto a partir de varias imágenes tomadas del mismo desde diferentes posiciones(mínimo dos imágenes).
Para ello yo estoy utilizando dos cámaras.
<h2>Pasos que estoy siguiendo:</h2>
<ol>
<li>Calibrar la cámara izquierda (Obtenemos la matriz de cámara KL) </li>
<li>Calibrar la cámara derecha (Obtenemos la matriz de cámara KR) </li>
<li> Sacar una imagen desde el lado izquierdo</li>
<li> Sacar una imagen desde el lado derecho</li>
<li> Detectar coordenadas en píxeles del objeto deseado de la imagen izquierda(mínimo 8 ) </li>
<li> Detectar las mismas coordenadas en píxeles del objeto deseado en la imagen derecha(mínimo 8 )</li>
<li> Calcular la matriz fundamental a partir de los píxeles anteriormente calculados
<blockquote class="head">
Para ello utilizo la función de OpenCV:
cvFindFundamentalMatrix( points1, points2,8,0, fund_matrix);
points1 corresponde a los puntos de la imagen de la izquierda, y points2 corresponde a los puntos de la imagen de la derecha, la matriz fundamental queda almacenada en fund_matrix.
</blockuote>
</li>
<li> calcular la matriz esencial a partir de la matriz fundamental y la matriz de cámara de ambas cámaras
<blockquote class="head">
De la siguiente forma:
ME=KLT x F x KR
Siendo ME la matriz esencial.
KLT la matriz transpuesta de KL(matriz de cámara izquierda).
KR matriz de cámara derecha.
F la matriz fundamental.
</blockuote>
</li>
<li> calcular la descomposición SVD de la matriz esencial
<blockquote class="head">
Para ello hay que utilzar la siguiente funcion de OpenCV.
void cvSVD( CvArr* A, CvArr* W, CvArr* U=0, CvArr* V=0, int flags=0 );
A es la matriz que queremos descomponer.
U,W,V son la matrices en las que se descompone. Como vemos en la siguiente línea de abajo.
A=U*W*VT
</blockuote>
</li>
<li> Calculamos la matriz de rotación(R)
<blockquote class="head">
de la siguiente forma:
U*P* VT
donde P es :
| 0 1 0 |
| -1 0 0|
| 0 0 1|
</blockuote>
</li>
<li> Calculamos la matriz de traslacion(T)
<blockquote class="head">
de la siguiente forma:
V*PP* VT
donde PP es :
| 0 -1 0 |
| 1 0 0|
| 0 0 1|
A partir de aquí es cuando empiezan mis primeras dudas, a partir de la T de arriba, tengo que sacar un vector de 3 X 1.
En uno de los documentos lo que hacen es formar esta matriz de 3 X 1, a partir del a02 y a21, de la siguiente forma:
|a21|
|a02|
|a02|
</blockuote>
</li>
<li> Con todas las matrices halladas anteriormente resolver las siguientes dos ecuaciones independientes
<blockquote class="head">
m1=[K1|0]M
m2=[(K2*R)|(-K2*R*T)]*M
m1 es una matriz 3X1 (coordenadas en pixeles de la primera imagen)
m2 es una matriz de 3X1 (coordenadas en pixeles de la segunda imagen)
K1 es una matriz de 3x3(matriz de cámara 1)
K2 es una matriz de 3X3(matriz de cámara 2)
R es una matriz de 3X3 (matriz de rotación)
T es una matriz de 3X1 (vector de traslacion)
M es el punto 3D que quiero obtener.
</blockuote>
</li>
</ol>
<h2>Mis dudas</h2>
Aplicando algebra lineal he conseguido hallar los puntos 3D M en ambas ecuaciones, el problema que he encontrado es que el eje Z, es decir, la tercera dimensión, la profundidad del objeto me la calcula de forma erronea.
¿Alguno de ustedes sabe si mi error radica en algún paso anterior?
¿Alguno de ustedes realiza la reconstrucción 3D de otra forma?
Agradecería cualquier tipo de orientación.
Un saludo
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-06/instalar-beryl-desde-repositorio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-11-24/instalar-debian-gnu-linux-en-el-acer-aspire-one-a150l.html
---
<file_sep>---
migrated: node/531
layout: post
title: Sacando el jugo a las tarjetas Atheros (madwifi)
created: 1169547128
author: int-0
comments: true
category: recipe
tags:
- security
- networking
---
<blockquote>Si alguno es un afortunado poseedor de una tarjeta wifi con chipset Atheros (que es el que viene usando Cisco) debéis saber que todo un mundo de diversión inalámbrica está a vuestro alcance ;-)</blockquote>
<!--break-->
<h2>Identificar la tarjeta</h2>
Bueno... para saber si tenemos una tarjeta yo uso <em>lspci</em> porque yo la tengo integrada, si la vuestra es <em>pcmcia</em> o <em>usb</em> podéis usar <em>cardctl</em> o <em>lsusb</em> respectivamente... para mi caso:
<div class="console">
{% highlight console %}
# lspci | grep Ath
00:09.0 Ethernet controller: Atheros Communications, Inc. AR5212 802.11abg NIC (rev 01)
{% endhighlight %}
</div>
Y ahí la tenemos! bueno pues vamos al siguiente paso...
<h2>Instalar los drivers madwifi</h2>
Pues nada... si tenemos Debian podemos hacerlo más o menos rápido:
<div class="console">
{% highlight console %}
$ m-a a-i madwifi
{% endhighlight %}
</div>
Como queremos divertirnos, instalaremos un par de cosas más:
<div class="console">
{% highlight console %}
$ apt-get install madwifi-tools kismet
{% endhighlight %}
</div>
Bien... si no tenemos Debian o queremos instalar los drivers a pelo podemos entrar en <a href="http://madwifi.org/">madwifi.org</a> donde encontramos dos sabores: <em>madwifi</em> y <em>madwifi-old-openhal</em>. Descargáis el que queráis mediante <em>subversion</em>, con <em>make</em> y <em>make install</em> los tendréis compilados y funcionando sin problemas. Las diferencias entre los dos es simple: existe una capa llamada <em>HAL</em> (<em>Hardware Access Layer</em>), es la capa que accede al chipset directamente. El chipset puede manejar frecuencias y potencias de transmisión fuera del rango de los estándares wifi (incluso frecuencias militares...), por ley (según legislaciones de algunos países), los fabricantes de aparatillos inalámbricos deben proporcionar mecanismos para asegurar que sus dispositivos no trabajen en otras frecuencias. En el caso de Atheros ese mecanismo es no proporcionar el código de <em>HAL</em>. Los dos sabores que tenemos son pues:
<ul><li>madwifi: HAL cerrado, pero toda la funcionalidad WIFI disponible.</li>
<li>madwifi-old-openhal: porciones de HAL abiertas (descubiertas, más bien), a mi me funcionan bien pero no tan bien como el anterior.</li></ul>
<h2>Configuración de la interfaz</h2>
Una vez cargados los drivers con <em>dmesg</em> deberéis obtener lo siguiente:
<pre>ath_pci: 0.9.4.5 (svn r1993)
PCI: Enabling device 0000:00:09.0 (0000 -> 0002)
ACPI: PCI Interrupt 0000:00:09.0[A] -> Link [LNK3] -> GSI 11 (level, low) -> IRQ 11
wifi0: 11b rates: 1Mbps 2Mbps 5.5Mbps 11Mbps
wifi0: 11g rates: 1Mbps 2Mbps 5.5Mbps 11Mbps 6Mbps 9Mbps 12Mbps 18Mbps 24Mbps 36Mbps 48Mbps 54Mbps
wifi0: H/W encryption support: WEP AES AES_CCM TKIP
wifi0: mac 5.9 phy 4.3 radio 4.6
wifi0: Use hw queue 1 for WME_AC_BE traffic
wifi0: Use hw queue 0 for WME_AC_BK traffic
wifi0: Use hw queue 2 for WME_AC_VI traffic
wifi0: Use hw queue 3 for WME_AC_VO traffic
wifi0: Use hw queue 8 for CAB traffic
wifi0: Use hw queue 9 for beacons
wifi0: Atheros 5212: mem=0x64000000, irq=11
</pre>
Aquí vemos los cifrados hardware soportados, los <em>rates</em>, etc... también vemos que disponemos de la interfaz <b>wifiX</b> (donde X es un número). Si usáis los <em>madwifi</em> con HAL abierto sólo dispondréis de una interfaz nueva <b>athX</b>. Si tenémos wifi0 también tendremos ath0, ésta es la que debemos usar, la configuramos de la siguiente forma (archivo <tt>/etc/network/interfaces</tt> en caso de Debian/Ubuntu):
<ul><li>Todo automático:
<pre>iface ath0 inet dhcp</pre>
Esto hará que se asocie con el punto de acceso que más calidad de señal tenga y haga ahí sus peticiones dhcp.</li>
<li>Automático a una determinada red:
<pre>
iface ath0 inet dhcp
wireless-essid UCLM
</pre>
Esto buscará la red <em>UCLM</em> y hará ahí sus peticiones DHCP, ejemplos de essid pueden ser <em>motorola</em>, <em>Bianco_</em>, etc.</li>
<li>Automático en una determinada red con cifrado WEP:
<pre>
iface ath0 inet dhcp
wireless-essid vecino
wireless-key <KEY>
</pre>
Se conectará a la red llamada <em>vecino</em> cifrada con esa contraseña (se detecta el tipo de cifrado según la contraseña).</li></ul>
Si las redes no tienen servicio DHCP se debe especificar una dirección, una máscara y una puerta de enlace como con cualquier otro tipo de interfaz.
<h2>Configurar Kismet</h2>
Si somos muy curiosos puede ser que queramos tener <em>kismet</em> funcionando. Kismet es una utilidad que permite estudiar redes wifi pero que por si sólo puede ser estudio de futuras recetas. Aquí explicaremos cómo configurarlo para su función con <em>madwifi</em> (los drivers con HAL cerrado). El archivo de configuración especifica <em>sources</em>, una <em>source</em> es un dispositivo inalámbrico capaz de establecerse en modo monitor. La <em>source</em> para el caso de <em>madwifi</em> es la siguiente (archivo <tt>/etc/kismet/kismet.conf</tt>):
<pre>
source=madwifi_g,wifi0,madwifi-ng
</pre>
El primer campo es el driver para la tarjeta, tenemos madwifi_a, madwifi_b, madwifi_g, madwifi_ab y madwifi_ag (consultad la documentación de Kismet y os vienen todos). El segundo campo es el nombre del dispositivo y por último el nombre que asignamos a la <em>source</em>.
Si ahora iniciamos kismet (como <em>root</em> o con un usuario con permisos suficientes), la interfaz cambiará a modo monitor y empezará a capturar paquetes como loco, generará una serie de archivos (descripciones de las redes en xml, etc.) y entre ellos el <em>dump</em> en formato <em>libpcap</em> (que puede ser abierto con <em>wireshark</em>) en el directorio <tt>/var/log/kismet/</tt>.
Hay que decir que kismet realiza un acceso bloqueante a la interfaz, con lo que las conexiones wifi se caerán mientras estéis usándolo. También te avisan de que después de usar kismet es probable que la interfaz deje de funcionar correctamente... pero a mi eso nunca me ha pasado.
<h2>Interfaces para todos</h2>
Otra cosa realmente interesante de <em>madwifi</em> es la posibilidad que nos brinda de crear nuestras propias interfaces wifi "virtuales" mediante la herramienta <em>wlanconfig</em>.
Son muchas las posibilidades que nos ofrece, pero veamos algunas "útiles"...
<ul><li>Crear una interfaz wifi en modo Ad-Hoc:
<div class="console">
{% highlight console %}
$ wlanconfig ath create wlandev wifi0 wlanmode adhoc
{% endhighlight %}
</div>
Usando la interfaz real <em>wifi0</em> nos creará una nueva, llamada <em>athX</em>, donde X es el siguiente número de interfaz disponible (aunque podemos especificarlo nosotros usando ath2, por ejemplo, en vez de ath).</li>
<li>Crear una interfaz en modo monitor:
<div class="console">
{% highlight console %}
$ wlanconfig ath create wlandev wifi0 wlanmode monitor
{% endhighlight %}
</div></li>
<li>Crear una interfaz en modo <em>access point</em> con una MAC diferente a la nuestra (da escalofríos pensar qué se puede hacer con esto...):
<div class="console">
{% highlight console %}
$ wlanconfig ath create wlandev wifi0 wlanmode ap bssid
{% endhighlight %}
</div>
Si coexisten otras interfaces virtuales debemos especificar al final <em>nosbeacon</em> para deshabilitar el uso hardware de los <em>beacon frames</em>.</li></ul>
Esto son sólo un par de ejemplos.... consultad la página <em>man</em> porque te explican hasta como hacer un <em>bridge</em> en unos pocos pasos...
<h2>Conclusiones</h2>
Aún se han quedado un par de cosas en el aire... como por ejemplo la inyección de paquetes (que también puede hacer la tarjetilla) etc. Pero creo que para ir abriendo boca tenemos suficiente. Es una lástima que el <em>HAL</em> esté cerrado porque desde luego el <em>chipset</em> este parece bastante espectacular...
<h2>Enlaces</h2>
<a href="http://madwifi.org/">Multiband Atheros Driver for Wifi</a>
<a href="http://www.kismetwireless.net/">Kismet</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-08-29/software-para-electrnicos-kicad-2-parte.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-02-06/arte-linuxero.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-05-18/politonos-gratis.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-26/aplicaciones-multilinges-gettext.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-11-28/catlogo-de-modismos-y-patrones.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-01-14/3-padrenuestros-y-5-ave-marias.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-11-27/inslacin-de-debian-en-un-asus-ux32v-con-w8-preinstalado.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-07-20/enjuto-mojamuto-no-todos-somos-gonzlez-sinde.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-12-30/nace-el-dominio-de-primer-nivel-42.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-03-17/fotografa-final-con-r-stallman.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-03-17/ya-estamos-en-china.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-06-06/upgrade-a-drupal-5-7.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2011-02-04/se-acab-el-ipv4.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-05-17/fallito-de-seguridad-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-12-08/generar-libros-para-devhelp.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-02-27/soporte-mejorado-para-el-asus-ux32vd.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-20/cairo-usar-un-svg-como-figura-cairo-groups.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-02-21/foros-de-ubuntu-es.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-05-30/ms-amigo-informtico.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-11-02/zeroc-icebox-creacin-de-un-servicio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-03-10/sabas-que-cp-no-tiene-porqu-borrar-tus-ficheros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-03-11/de-qu-me-suena.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-08-28/cambiar-la-password-de-active-directory-desde-gnu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-09-21/me-presento.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2012-02-05/pimientos-rellenos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-07-12/instalacin-y-puesta-en-marcha-de-hydra-desde-repo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-01-25/estamos-hartos-de-escribir-mierda.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-10-30/sockets-raw-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-03-01/access-denied-arte.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-08/configurando-backups-en-bacula.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2008-09-10/nano-reto-de-programacin-en-bash.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-04/instalar-debian-gnu-linux-en-un-compaq-tc-1100.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-12-07/compilar-linux-para-la-tarjeta-arm-mini2440.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-05/mercurial-hacer-un-hook-para-prohibir-ficheros-incorrectos.html
---
<file_sep>---
migrated: node/406
layout: post
title: Xplanet como salvapantallas con gnome-screensaver
created: 1161428799
author: fsancho
comments: true
category: recipe
tags:
- gnome
---
<blockquote>
Muchos de los que usamos Ubuntu vimos con poco agrado que se prescindiese del gestor de Salvapantallas XScreensaver. El sustituto fue gnome-screensaver, mucho mas sencillo y con muchas menos funcionalidades.
Con esta mini-receta trataré de devolver la sonrisa a aquellos que dejaron de poder utilizar Xplanet como salvapantallas.
</blockquote>
Aunque gnome-screensaver es mucho mas simplón, no limita las posibilidades. Simplemente están fuera de la interfaz de usuario, pero en la práctica sigue siendo perfectamente posible toquetear en sus tripas. Por ahora solo se puede hacer desde la consola, pero el equipo de gnome-screensaver tiene en mente extender su interfaz para que sea algo mas configurable (ya que a día de hoy no lo es en absoluto).
En el paquete 'xscreensaver-data' podremos encontrar un montón de ficheros con extensión <tt>.desktop</tt>, estos ficheros contienen los lanzadores de los salvapantallas. Lo único que tendremos que hacer es crearnos nuestro propio lanzador y colocarlo en el sitio adecuado. Además tendremos que crear un enlace para Xplanet en el directorio donde se encuentran los salvapantallas.
En primer lugar, creamos un fichero <tt>xplanet.desktop</tt> con este contenido.
<pre>
[Desktop Entry]
Encoding=UTF-8
Name=XPlanet
Comment=This Shows random planets from random points of view using Xplanet.
TryExec=xplanet
Exec=xplanet -vroot -label -body random -origin random -radius 20 -range 10 -wait 30
StartupNotify=false
Terminal=false
Type=Application
Categories=Screensaver
X-Ubuntu-Gettext-Domain=xscreensaver
</pre>
La etiqueta <tt>Exec</tt> es la que contiene el comando concreto que deberemos ejecutar para mostrar nuestro salvapantallas. En mi caso muestro un cuerpo aleatorio desde un origen aleatorio y lo cambio cada 30 segundos.
Una vez creado el fichero lo copiamos al directorio <tt>/usr/share/gnome-screensaver/themes/</tt>.
<b>Actualización:</b> Los usuarios de Ubuntu Edgy deben copiar el fichero en el directorio <tt>/usr/share/applications/screensavers/</tt>.
<div class="console">
{% highlight console %}
$ sudo cp xplanet.desktop /usr/share/applications/screensavers/
{% endhighlight %}
</div>
Por último creamos un enlace del binario de Xplanet en <tt>/usr/lib/xscreensaver</tt>
<div class="console">
{% highlight console %}
$ sudo ln -s /usr/bin/xplanet /usr/lib/xscreensaver/
{% endhighlight %}
</div>
Y listo!, sólo tienes que ir al menú Sistema->Preferencias->Salvpantallas y seleccionar tu nuevo salvapantallas de Xplanet.
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-04-24/concurso-de-videotutoriales-en-molinux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-11-16/patrones-todo-lo-que-nunca-quisiste-saber-y-siempre-evitaste-preguntar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-05-08/10-razones-para-no-usar-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-04-07/gnu-bash-para-programadores-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-11/usar-un-repositorio-subversion.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-04/gnu-emacs-como-editor-xml-o-docbook-con-psgml.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-20/montar-un-repositorio-trivial-de-paquetes-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-07-01/repositorio-de-paquetes-debian-serio-bsico-y-cmo-usarlo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-12-23/sql-bsico.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-05-11/molinux-zero-distribucin-gnu-linux-para-equipos-obsoletos-y-con-pocos-recursos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-08-29/wallpaper-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-12-19/la-consejera-de-educacin-y-ciencia-de-clm-apuesta-por-windows.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-26/pendrive-cifrado-con-dm-crypt-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-15/diferencia-de-sabores.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-10/flash-con-firefox.html
---
<file_sep>---
migrated: node/255
layout: post
title: Configurar TV en ubuntu
created: 1142018581
author: fmreja
comments: true
category: enquiry
---
Hola, tengo un pc portatil "beep" con tarjeta de televisión interna y tras muchas pruebas, manuales y recetas... he llegado a la conclusión de que no se configurarla para poder ver la tele en mi ubuntu. Alguien ha instalado alguna?? puede ser problema de que no tenga el chip bt??
Muchas gracias.
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-02-12/amara-por-fin-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-06-21/ms-canon.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-09-24/configuracin-y-uso-de-pbuilder.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-30/gnu-emacs-cambiar-la-configuracin-de-colores.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-15/udev-configurando-el-acceso-al-usb-sin-ser-root.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-23/apt-build-cuando-debian-huele-a-gentoo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-12/apache2-como-frontal-seguro-para-zope-plone.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-04-02/comunicacin-entre-tu-calculadora-hp4x-y-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-12/debian-gnu-linux-en-el-dell-xps-420.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-13/activar-compositing-en-gnome-sin-utilizar-compiz-ni-derivados.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-11-05/bridges-de-red-para-virtualbox-y-qemu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2011-04-12/fase-final-iv-edicin-concurso-univ-de-software-libre-de-clm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-11/traceando-cdigo-c-en-los-avr-in-circuit-avarice.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-05/cmo-escribir-una-receta.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-07-14/lirc-con-la-winfast-tv-2000-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-30/webdav-con-apache2-en-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-11-05/mens-y-barra-de-herramientas-dinmicas-en-pygtk.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-07-17/ejecutando-un-mismo-comando-en-varias-mquinas-con-fabric.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-10/experimentando-con-active-directory-y-openldap.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-18/swig-donde-c-y-python-se-dan-la-mano.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-25/convertir-subttulos-de-dvd-a-formato-vobsub.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-03-23/compilando-e-instalando-palib.html
---
<file_sep>---
migrated: node/1336
layout: post
title: problemas con adsl telefónica y debian linux
created: 1267184082
author: epatipati
comments: true
---
Hola soy nueva en esto; estoy intentando ser usuaria de debian linux y voy despacio. Ahora he encontrado problemas para conectarme a Internet mediante red inalámbrica de ADSL de Telefónica. Tengo un modem Xavi 7968, he instalado el wicd y me reconoce la red inalámbrica pero cuando busca la IP me da Fallo de conexión: incapaz de obtener una dirección IP. En Telefónica no hay soporte para Linux (que ya les vale, claro) y estoy empezando a desesperarme. Si alguien me puede ayudar lo agradecería.
<!--break-->
Un saludo. Ester.
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-07/ddclient-cliente-de-dns-dinmico-para-todos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-09/xgl-y-beryl-en-ubuntu-dapper.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-06/ejemplo-sencillo-de-glacier2-con-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-04-12/atajos-en-bash.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-13/instalar-debian-gnu-linux-en-sony-vaio-vgn-tx27tp.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-03-26/gnu-emacs-el-get-un-apt-get-para-emacs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-09-20/print-con-colores-en-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-18/el-reto-del-mes-1-edicin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-09-18/distribuir-programas-con-autotools.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-07-09/latex-chuletario-bsico.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/ver-ficheros-fuente-coloreados-en-consola.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-12-22/pensar-en-c-herencia-de-interfaces.html
---
<file_sep>---
migrated: node/767
layout: post
title: Real como la vida misma....
created: 1188373829
author: javieralso
comments: true
category: opinion
tags:
- ocio
---
Pues eso, <a href="http://www.malla20.com/wp-content/uploads/2007/06/linux.jpg">real como la vida misma</a>...
<file_sep>---
migrated: node/228
layout: post
title: Gaim 2.0
created: 1140108477
author: cleto
comments: true
category: new
---
Ya está disponible la <a href="http://xgn.com.br/fabio/gaim_2.0.0cvs2-1_i386.deb">nueva versión (beta) de gaim</a>. La verdad es que no cambia mucho con respecto a la anterior pero si se le ha añadido funcionalidades nuevas:
<!--break-->
- Más control sobre los estados de la conexión (ausente, no disponible...)
- Sonidos menos "estridentes"
- Animación en el desplazamiento de la conversación
Entre otras...
Un saludo!
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2007-02-25/peras-en-reduccin-de-vino.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-09-20/9-reglas-para-una-mejor-orientacin-a-objetos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-02-19/dobles-de-prueba.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /humor/2009-08-21/just-for-fun.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-12-24/arm-mini2440-configurando-uboot-para-arranque-desde-sd.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-12-09/kit-de-desarrollo-libre-para-ps3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-08-12/diez-o-ms-seales-de-que-no-eres-tan-buen-programador-como-piensas.html
---
<file_sep>---
migrated: node/114
layout: post
title: Instalando HURD
created: 1133011617
author: Consegliere
comments: true
---
Hola a todos!, esta es mi primera vez que escribo a un blog, (no es tan emocinante como el sexo, pero que se le va hacer).
<!--break-->
Habeis oido hablar alguna de vez de HURD?. Bueno, HURD es el nuevo kernel que se está desarrollando para el sistema GNU. Funciona bajo GNU MACH, que es el microkernel que actualmente es estable. HURD es un conjunto de servidores que se intercomunican mediante GNU MACH.
Ahora estoy intentando probarlo, porque la curiosidad me mata (verdad pararé de curiosear cuando explote el ordenador).
La distribucion que he probado es Bee GNU/Hurd. No es nada dificil de instalar cuando se conocen bien los comandos a ejecutar, y se sabe lo que se está haciendo en todo momento. Pues bien, cuando decidí probarlo repartioné el disco duro para tener una partición más para HURD. Extraje y descomprimí todos los archivos que componian el HURD en la nueva partición. Modifique el cargador GNU GRUB para que pudiera cargar HURD. Una vez hecho, reinicie el ordenador y se configuró automaticamente, luego reinicio solo automaticamente y al cargarlo otra vez ya lo podia usar.
Ahora tengo que configurarlo y eso me llevará un tiempo, hasta que entienda como funciona. El único problema que tuve fue que no funcionaban las pipes. Un gran problema porque no podia hacer casi nada. Entonces lei por internet que era debido al formato que de la particion del HURD. Debia de decirle cuando lo formateé que sistema iba a poner. Es decir debia poner: mke2fs -o hurd /dev/hda3
Despues de instalarlo otra vez, ya me funcionaba.
Enlaces:
<a href="http://www.gnu.org/software/hurd/hurd.html">GNU HURD</a>
<a href="http://www.gnu.org/software/hurd/gnumach.html">GNU MACH</a>
<a href="http://bee.es.gnu.org/blog/">Bee GNU/HURD</a>
<a href="http://bee.es.gnu.org/blog/Instalacion.html">Manual de instalación de Bee GNU/HURD</a>
<a href="http://bee.nopcode.org/">Repositorio de Bee GNU/HURD</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-03-15/nueva-camisetilla.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-28/hacer-un-latiguillo-ethernet-cable-cruzado-cross-over.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-09-19/python-time.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-12-10/emulador-remoto-para-android.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-03-12/readyboost-destroza-tu-pendrive-con-windows-vista.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-11-22/instalador-de-gnesis-3-0-en-su-versin-0-0-1-alfa-beta-gamma-archi-experimental.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-05-25/kerberos5-ldap-y-errores-frecuentes.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2008-10-24/se-trata-de-un-empecinamiento-en-separar-el-equipo-del-so.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-11-24/creacin-de-un-mdulo-drupal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-25/debian-plone-en-5-minutos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-07/tonteras-de-la-propiedad-intelectual.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-03-30/cruzar-el-atlntico-a-nado-no-tiene-precio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-26/solucin-al-fallo-de-glx-en-tarjetas-nvidia-antiguas-legacy.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-10/gnu-emacs-ocultar-password-de-root-en-el-modo-shell.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-27/teclas-multimedia-en-porttiles-apple-con-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-07-05/stallman-famoso-gracias-a-un-peluche.html
---
<file_sep>#!/bin/bash --
# -*- coding: utf-8; mode: shell-script; tab-width: 4 -*-
# from: http://pauldambra.github.io/using-travis-to-build-jekyll.html
DEPLOY_REPO="https://${DEPLOY_TOKEN}@github.com/CRySoL/CRySoL.github.io.git"
function clean {
echo "cleaning _site folder"
[ -d "_site" ] && rm -Rf _site
}
function get_current_site {
echo "getting latest site"
git clone --depth 1 $DEPLOY_REPO _site
}
function build_site {
echo "building site"
bundle exec jekyll build
}
function deploy {
echo "deploying changes"
if [ -z "$TRAVIS_PULL_REQUEST" ]; then
echo "except don't publish site for pull requests"
exit 1
fi
if [ "$TRAVIS_BRANCH" != "master" ]; then
echo "except we should only publish the master branch. stopping here"
exit 1
fi
cd _site
git config --global user.name "<NAME>"
git config --global user.email <EMAIL>
git add -A
git status
git commit -m "Lastest site built on successful travis build $TRAVIS_BUILD_NUMBER auto-pushed to github"
git -C _site pull
git push $DEPLOY_REPO master:master
}
clean
get_current_site
build_site
deploy
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-23/zeroc-ice-persistencia-de-sirvientes-con-freeze-evictor.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2007-02-16/aprende-punteros-con-binky.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-15/instalar-un-servidor-jabber-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-08-14/prcticas-de-programacin-infames-openjdk.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2007-05-15/conferencia-sobre-software-libre.html
---
<file_sep>---
migrated: node/993
layout: post
title: 'Python: merge lists'
created: 1219870243
author: david_villa
comments: true
category: script
tags:
- Python
---
A simple utility function to merge several lists in a single one, using the functional programming tools:
<!--break-->
<div>
{% highlight python %}
def merge(*input):
return reduce(list.__add__, input, list())
{% endhighlight %}
</div>
Example:
<div>
{% highlight python %}
>>> a = [0, 1, 2]
>>> b = [2, 3, 4]
>>> c = [4, 5, 6]
>>> merge(a, b, c)
[0, 1, 2, 2, 3, 4, 4, 5, 6]
{% endhighlight %}
</div>
The same but removing duplicates:
<div>
{% highlight python %}
def merge_uniq(*input):
return list(reduce(set.union, input, set()))
{% endhighlight %}
</div>
The same but removing duplicates later:
<div>
{% highlight python %}
def merge_uniq(*input):
return list(set(merge(input)))
{% endhighlight %}
</div>
Other (easier) ways to achieve the same?
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2007-03-15/prueba-de-logo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-09-22/obtener-permisos-de-root-en-el-htc-magic-g2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2008-03-28/espaguetis-ajo-aceite-y-guindilla.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-09-14/python-scapy_1-1-1-3-en-debian-unstable.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-14/enviar-correo-no-ascii-desde-un-programa-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-09-08/sub-repositorios-en-mercurial-con-forest.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-08-04/atmel-y-el-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-01-25/acelerar-velocidad-del-emulador-de-android.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-04/sudoers-o-cmo-evitar-que-sudo-te-pida-contrasea.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-21/propietarios-de-ideas-nos-toman-el-pelo.html
---
<file_sep>---
layout: post
title: "Plugins en Python"
author: magmax
category: recipe
tags: python
lang: es
---
Siempre me ha gustado escribir aplicaciones extensibles, pero picarme todo un sistema de plugins me ha parecido tedioso.
Por eso he intentado buscar librerías que me ayuden a crear plugins, aunque siempre he tenido problemas para la distribución de éstos, como [me ha pasado con Yapsy](http://magmax.org/blog/yapsy-un-sistema-de-plugins-pythonico).
La verdad es que me he sentido como un auténtico estúpido al descubrir que Python tiene un sistema para escribir plugins muy sencillo de usar. Vamos a ver cómo.
**Puedes encontrar el artículo original en: [MagMax Blog](<http://magmax.org/blog/plugins-en-python/>)**.
<!--break-->
Qué voy a hacer
---------------
Básicamente, vamos a hacer una pequeña aplicación y dos plugins.
Dependiendo de una opción se usará uno u otro.
Estructura:
```
.
├── app
│ └── app.py
├── plugin1
│ ├── plugin1
│ │ └── __init__.py
│ └── setup.py
└── plugin2
├── plugin2
│ └── __init__.py
└── setup.py
```
Plugin 1
--------
Vamos a comenzar escribiendo un plugin. Va a ser algo muy sencillo. Para
ello, creamos dos ficheros; el primero será el plugin propiamente dicho,
en el fichero `plugin1/plugin1/__init__.py`:
{% highlight python %}
def example():
print("I'm plugin one")
{% endhighlight %}
Y aquí está el truco: en el archivo `plugin1/setup.py`:
{% highlight python %}
from setuptools import setup, find_packages
setup(
name='plugin1',
version='0.0.6',
description="This is the plugin 1",
packages=find_packages('.'),
entry_points={
'plugin_system': 'example = plugin1:example'
},
)
{% endhighlight %}
Lo he reducido al mínimo. La parte importante es la de `entry_points`,
ya que estoy definiendo un *entry point* llamado `plugin_system` que,
básicamente, asigna a una variable la función anterior.
Con esto ya tenemos el plugin. Vamos a compilarlo (para ahorrar
problemas, lo compilaremos como `source`):
<div class="console">
{% highlight console %}
$ python setup.py sdist
{% endhighlight %}
</div>
lo que generará el archivo `dist/plugin1-0.0.6.tar.gz`. La versión 0.0.6
es porque 0.0.1 era muy sosa XD
Aplicación
----------
Vamos ahora con la aplicación principal (`app/app.py`):
{% highlight python %}
import argparse
import pkg_resources
def main():
parser = argparse.ArgumentParser(description='Loads a plugin')
parser.add_argument('action', choices=['run', 'list'],
help='action to be performed')
parser.add_argument('-p', '--plugin',
help='plugin to be loaded')
args = parser.parse_args()
if args.action == 'list':
full_env = pkg_resources.Environment()
dists, errors = pkg_resources.WorkingSet().find_plugins(full_env)
for dist in dists:
if 'plugin_system' in dist.get_entry_map():
print(' %s (%s)' % (dist.project_name, dist.version))
elif args.action == 'run':
requirement = pkg_resources.Requirement(args.plugin)
plugin = pkg_resources.WorkingSet().find(requirement)
example = plugin.load_entry_point('plugin_system', 'example')
example()
if __name__ == '__main__':
main()
{% endhighlight %}
Como se puede observar, hago uso intensivo de `pkg_resources`. Podemos
probar a listar los plugins instalados:
<div class="console">
{% highlight console %}
$ python app/app.py list
$
{% endhighlight %}
</div>
Y no tendremos nada. Claro, falta instalar el plugin. Para ello,
simplemente usamos `pip`, pero me voy a crear un `virtualenv` para no
engorrinarme el sistema:
<div class="console">
{% highlight console %}
$ virtualenv venv
[...]
$ . venv/bin/activate
(venv) $ pip install plugin1/dist/plugin1-0.0.6.tar.gz
[...]
(venv) $ python app.py list
plugin1 (0.0.6)
(venv) $
{% endhighlight %}
</div>
Mucho mejor. Ahora vamos a ejecutarlo:
<div class="console">
{% highlight console %}
(venv) $ python app.py run -p plugin1
I'm plugin one
(venv) $
{% endhighlight %}
</div>
Plugin2
-------
El lector avispado no tendrá problema en crearlo a partir del plugin1 :)
Es más, podéis crear todos los que queráis XD
Notas
-----
La gracia es que podemos tener más de un `entrypoint`, o agruparlos por
clave.
Con esto ya no necesito [Yapsy](http://yapsy.sourceforge.net/) ni
[PluginBase](http://pluginbase.pocoo.org/) ni ningún otro sistema
enrevesado.
Para más información, podéis leer [Dynamic Discovery of Services and
Plugins](http://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins).
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-05-29/proyecto-colaborais.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-01-03/creando-tneles-tcp-ip-port-forwarding-con-ssh-los-8-escenarios-posibles-usando-openssh.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-10-21/configuracion-wifi-de-la-uclm-en-ubuntu-5-10.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2008-03-23/transferencia-va-wifi-para-nintendo-ds-sends-3-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-08-11/intro-3k-de-crysol.html
---
<file_sep>---
migrated: node/578
layout: post
title: Avances en GNESIS 3.0
created: 1172149185
author: cleto
comments: true
category: new
tags:
- GNESIS
---
¡Hola a todos!
<!--break-->
GNESIS 3.0 cada vez está más cerca de su lanzamiento. Como últimas noticias acerca de su desarrollo decir que:
<ul>
<li>GNESIS 3.0 va a ser Live <b>persistente</b>. Por así decir, esto significa que GNESIS arranca de forma Live pero nos brinda la posibilidad de almacenar lo que hagamos y lo que instalemos de forma permanente. Por lo tanto, GNESIS está pensado para que lleves tu sistema operativo a cualquier parte con tu memoria USB o disco duro externo. Es la misma idea de Molinux Nómada pero mejor, ya que no sólo el <em>/home</em> es persistente.</li>
<li>Como ya sabeis, GNESIS 3.0 está basado en Debian (concretamente, del proyecto <a href="http://debian-live.alioth.debian.org/">Debian Live</a>). La rama es la <em>inestable</em>, por lo que GNESIS estará "a la última" de las aplicaciones y servicios de Debian.</li>
<li>Además, GNESIS dispone de un repositorio de documentación que incluirán transparencias y otros materiales para seguir las asignaturas. Si las transparencias de clase cambian, sólo tendrás que actualizar tu sistema para tener la documentación al día.</li>
<li>Se ha mejorado el tema de escritorio. En GNESIS 3.0 se ha cambiado de forma "radical" el tema que venía en las anteriores versiones. Gracias a Tobías, que está haciendo un trabajo magnífico, tendremos un entorno totalmente dibujado a mano.</li>
</ul>
Sin embargo, necesitamos saber qué echas en falta en una distribución como GNESIS (orientada a la ESI, es decir, a estudiantes de Informática). Deja un comentario con tu propuesta.
Gracias por tu colaboración.
<file_sep>---
migrated: node/665
layout: post
title: "¿se puede hacer dinero con el software libre?"
created: 1177937405
author: aledelgal
comments: true
category: opinion
---
Purulando por internet he encontrado un <a href="http://freetrans.blogspot.com/2007/04/se-puede-hacer-dinero-con-el-open.html">artículo</a> en el que se habla sobre este tema. Me pareció curioso, jeje.
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-19/primeros-pasos-con-mysql.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-12-05/gnu-linux-y-nuestra-amada-psp.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-11-07/page-speed.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2008-01-16/jornada-tcnica-molinux-en-la-esi-de-ciudad-real.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-21/programacin-de-la-psp-intros-multimedia.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-01-25/microsoft-aprieta-el-culo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /humor/2008-08-11/microsincity.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-04-19/soy-un-albail.html
---
<file_sep>---
migrated: node/1475
layout: post
title: Campaña para la candidatura de la Comunidad del Software Libre a los Premios
Príncipe de Asturias
created: 1298237730
author: cleto
comments: true
category: new
tags:
- proselitism
---
La noticia completa en: <a href="http://www.publico.es/ciencias/361752/un-principe-para-el-software-libre">http://www.publico.es/ciencias/361752/un-principe-para-el-software-libre</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-21/dibujemos-matrices-fcilmente-con-latex.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2005-12-16/ha-salido-molinux-2-0.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-08-15/zeroc-ice-recogiendo-estadsticas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-23/compartir-ficheros-con-nfs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-26/zeroc-icegrid-gua-rpida-i.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-04/problemillas-con-xorg.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-11-03/propuesta-crysol-en-podcast.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2010-11-16/jarabe-de-limn-con-miel.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-02-15/estamos-en-obras.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-05-28/estadsticas-crysol.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-01-13/bridges-y-filtrado-de-protocolos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-12/configurar-parchear-cacharrear-y-compilar-un-linux-fcilmente.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-17/introduccin-al-proceso-software-personal-humphrey.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-05-29/a-qu-huele-el-cdigo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-11-24/mapnik-visualizador-gis-para-python-y-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-11-16/troll-el-mafioso.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-05-15/mercurial-por-favor.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2010-01-04/escaneando-a-pdf-con-python-y-sane.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-03-26/arco-devel-toggle-split.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-11/zeroc-ice-tareas-peridicas-en-un-servidor.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-11/creacin-de-paquetes-debian-binarios.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-03-03/el-ceslcam-presenta-nuevos-cursos-de-openoffice-en-su-plataforma-e-learning.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2008-02-18/crysol-gnu-install-party-v3-2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-05-09/felicidades-david.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-02/configura-el-termmetro-de-tu-pc-con-lm-sensors.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2009-12-04/modificacion-de-iso-ubunutu-910.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-10-13/mini-tutorial-de-oo-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2008-11-07/mermelada.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2009-10-11/qu-se-puede-hacer-en-crysol.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-12-05/dlxview-un-emulador-para-arquitecturas-dlx.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-15/subversion-server-side-scripting-hooks.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-02-08/hook-mercurial-para-integracin-con-hudson.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-09-24/iv-concurso-universitario-de-software-libre-de-castilla-la-mancha.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-28/recordar-fechas-y-cumpleaos-con-birthday.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-02-01/denunciar-al-estado-espaol-por-el-monopolio-de-las-entidades-de-gestin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-24/gnesis-2-0-rc1-disponible-para-descarga.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-06-29/los-titulados-espaoles-son-los-que-menos-cobran-de-europa.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-20/gnu-linux-y-nintendo-ds.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-04-11/nueva-versin-de-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-10-26/asyncronous-spy-assertions-with-python-doublex.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-18/videolan-streaming.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2008-03-07/dotbf-necesita-tu-ayuda.html
---
<file_sep>---
migrated: node/383
layout: post
title: Preguntas que cambiarán el mundo.
created: 1156337594
author: magmax
comments: true
category: tale
---
<blockquote>
Esiste una serie de preguntas existenciales que cambiarán el mundo. Tras las ya conocidas "¿quienes somos?", "¿de dónde venimos?" y "¿a dónde vamos?" nos queda la de "¿hasta dónde llega la estupidez humana?" o, simplemente, "¿Cómo de gracioso puede llegar a ser el desconocimiento?". Pues bien... Centrémonos en la última pregunta y abordémosla con humor. Se ruega no poner las fuentes, por si se sienten ofendidas y que, si en algún momento el aludido llegar a leer esto, que se lo tomen con cachondeo y buen humor :-D
</blockquote>
<!--break-->
<ul>
<li> Y eso de Linux... ¿Es un programa para Windows? (un niño de menos de 15 años)</li>
<li> Sí, sí, pero... ¿Cuánto ocupa el Linux en mi Windows? (el mismo niño de antes)</li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-10-02/infoglobal-presentacin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-04-04/the-poors-man-dropbox-thing.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-06-08/comprar-billete-de-tren-renfe-eligiendo-asiento-con-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-07-16/foxg20.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2009-05-10/software-garantizado.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-01-30/conectar-remotamente-a-una-cmara-axis-211w-mediante-opencv-en-ubuntu-9-04.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-07/sistema-hbrido-paquetes-de-distintas-releases-con-apt.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-18/cambiar-la-direccin-mac-de-una-tarjeta-de-red.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-04-19/linuxalbacete-organiza-las-ii-jornadas-de-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-14/lego-mindstorms-nxt-programar-con-nxc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-23/marcar-y-clasificar-trfico-con-iptables-y-tc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2009-10-25/barrer-la-escoria-en-la-red.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-06-25/compartir-ficheros-con-samba-cifs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-24/kolofonium-activando-ssh-en-la-fonera.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-09-23/acabo-de-registrarme-en-el-portal.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2006-04-10/script-para-escuchar-emisoras-de-radio-por-internet.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-05/firefox-y-flash.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-01-04/kans.html
---
<file_sep>---
migrated: node/798
layout: post
title: 'ZeroC IceBox: Creación de un servicio'
created: 1194003900
author: cleto
comments: true
category: recipe
tags:
- networking
- Ice
- Arco
---
<blockquote>La siguiente receta explica como crear un servicio para IceBox. El ejemplo está hecho en C++, sin embargo es aplicable para los lenguajes soportados por ZeroC-Ice. Sólo es un leve cambio de sintaxis... :-P</blockquote>
<!--break-->
<h2>Introducción</h2>
IceBox es un servicio de Ice que permite tener control sobre un conjunto de servicios en ejecución. La necesidad de esta característica depende de la aplicación pero es útil cuando queremos tener varios servicios ejecutándose en la misma máquina.
Existe una clase llamada IceBox::Service. Esta clase tiene, entre otros muchos, los siguientes métodos:
<ul>
<li><b>start</b>: código que se ejecutará al iniciarse el servicio.</li>
<li><b>stop</b>: código que se ejecutará al pararse el servicio.</li>
</ul>
<h2>Servicio IceBox "Hola Mundo"</h2>
Primeramente, tenemos que tener nuestro interfaz hello.ice:
<pre>module UCLM {
interface Hello {
void puts(string str);
void shutdown();
};
};</pre>
... y su implementación correspondiente (helloI.cpp):
<pre>#include < Ice/Ice.h >
#include < helloI.h >
/*
* Este fichero fue generado en primera instancia mediante:
*
* $ slice2cpp --impl hello.ice
*
* y posteriormente modificado
*/
using namespace std;
void
UCLM::HelloI::puts(const ::std::string& str,
const Ice::Current& current){
cout << "SERVER: Se recibirá el valor: " << str << endl;
}
void
UCLM::HelloI::shutdown(const Ice::Current& c){
cout << "Shutting down..." << endl;
c.adapter->getCommunicator()->shutdown();
}</pre>
Una vez creados ambos componentes, ya puedes empezar con IceBox::Service. Primero, hay que especificar la clase del servicio que vayas a implementar. Por ejemplo, el contenido del archivo HelloService.h sería:
<pre>#ifndef HELLO_SERVICE_I_H
#define HELLO_SERVICE_I_H
#include < IceBox/IceBox.h >
#ifndef HELLO_API
# define HELLO_API ICE_DECLSPEC_EXPORT
#endif
class HELLO_API HelloService : public ::IceBox::Service{
public:
virtual void start(const ::std::string&,
const ::Ice::CommunicatorPtr&,
const ::Ice::StringSeq&);
virtual void stop();
private:
::Ice::ObjectAdapterPtr _adapter;
};
#endif</pre>
Y la implementación:
<pre>#include "helloI.h"
#include "HelloService.h"
using namespace std;
extern "C"{
HELLO_API ::IceBox::Service*
create(::Ice::CommunicatorPtr communicator){
return new HelloService;
}
}
void
HelloService::start(const string& name,
const ::Ice::CommunicatorPtr& communicator,
const ::Ice::StringSeq& args){
_adapter = communicator->createObjectAdapter(name);
::Ice::ObjectPtr object = new UCLM::HelloI;
_adapter->add(object,communicator->stringToIdentity("ServiceHola"));
_adapter->activate();
cout << "** Activación del Servicio." << endl;
cout << object << endl;
}
void
HelloService::stop(){
_adapter->deactivate();
cout << "** Desactivación del Servicio" << endl;
}</pre>
Comentarios respecto al código anterior:
<ul><li>La primera parte de código se debe a cómo está implementado IceBox. Cuando IceBox llame a create, se debe devolver un objeto del servicio creado.</li>
<li>El método <b>start</b> tiene los siguientes argumentos:</li>
<ul><li><em>name</em>: nombre del servicio. Muy útil para recoger propiedades de tu servicio de un fichero de configuración ;-).</li>
<li><em>communicator</em>: el comunicador asociado.</li>
<li><em>args</em>: los parámetros pasados a tu servicio en formato <em>StringSeq</em>.</li></ul>
<li><b>stop</b> únicamente desactiva el adaptador de objetos.</li></ul>
Los dos métodos implementados son los únicos necesarios e imprescindibles para crear un servicio de IceBox. Es recomendable, sin embargo, especificar un método destructor de clase.
<h2>Compilación e instalación</h2>
El objetivo de la compilación no es un ejecutable (ya te habrás dado cuenta de que no tiene un main por ningún sitio). Por ello, es necesario utilizar las opciones de compilación -lIceBox (para enlazar con la librería de IceBox) y la opción -shared (para crear el archivo como una librería compartida).
Por convenio, el archivo de salida de la compilación debe llamarse lib"servicio".so. En nuestro caso, <em>libHelloService.so</em>.
Para que IceBox pueda linkar en tiempo de ejecución el servicio de "Hola Mundo" tienes varias alternativas:
<ul>
<li>Crear/Modificar la variable de entorno LD_LIBRARY_PATH.</li>
<li>Copiar el archivo en /usr/lib (más guarrete :-P).</li>
</ul>
<h2>Configuración y arranque de IceBox</h2>
Una vez que ya tengas la librería compartida (servicio de IceBox) creada y todo accesible, tienes que configurar el servicio de IceBox (fichero icebox.config):
<pre>IceBox.ServiceManager.Endpoints=tcp -p 10000
IceBox.UseSharedCommunicator.HelloService=1
IceBox.Service.HelloService=HelloService:create
HelloService.Proxy=ServiceHola:tcp -p 9999
HelloService.Endpoints=tcp -p 9999</pre>
Finalmente, ejecutamos nuestro servicio con Icebox mediante el comando:
<div class="console">
{% highlight console %}
$ icebox --Ice.Config=icebox.config
{% endhighlight %}
</div>
<h2>Referencias</h2>
<ul>
<li>Manual de ZeroC-Ice (disponible con apt-get install zeroc-ice-manual o bien con aptitude install --please-i-only-want-a-manual zeroc-ice-manual :-P).</li>
<li><a href="#">Receta de librerías dinámicas</a>.</li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-01-27/el-efecto-2038.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2014-03-06/arrancar-grub-en-un-equipo-con-uefi.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-03-31/da-del-documento-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-10-21/servidor-de-rdp-o-algo-parecido-para-gnu-linux.html
---
<file_sep>---
migrated: node/1397
layout: post
title: Quinto aniversario
created: 1284043522
author: CRySoL
comments: true
category: event
---
Tal día como hoy, pero hace ya 5 años, empezaba a funcionar la web de CRySoL (no me atrevo a llamar web a lo que había antes).
En este tiempo "la comunidad" de CRySoL ha sufrido sus altibajos, pero a pesar de todo y gracias al esfuerzo de unos pocos (menos de lo que nos gustaría) seguimos aquí, al pie del cañon. Prueba de ello son los millones de visitas (que no contamos), los más de 1000 posts públicos y 498 recetas (hubiera estado bien llegar a las 500 :-P).
<!--break-->
Como admin (o uno de ellos) me gustaría agradecer personalmente (de hecho lo haré :-)) su colaboración e interés a todos ellos: magmax, int-0, javieralso, cleto, oscarah, fsancho, lk2, nacho, paco, toledanus, morcy y a otros tantos que también han contribuido, principalmente en forma de recetas.
Felicidades, y ánimo a todos para mantener vivo este grupúsculo de usuarios de software libre llamado CRySoL.
PD: Aunque suene mal, debería hablarlo con una cañas, no?
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-08-02/referencia-rpida-de-lvm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-06/alternativas-a-parse_launch.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-23/soporte-para-at90usb1287-y-otros-avr-nuevos-con-gcc.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-26/es-la-programacin-un-arte.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-04-20/openoffice-writer-gana-batalla-a-microsoft-word.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-01-15/ii-curso-online-de-java-ceslcam.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2007-03-05/bsqueda-recursiva-con-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-05-14/pruebas-unitarias-c-con-el-plugin-cxxtest-de-atheist.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-02-08/el-presidente-de-telefnica-quiere-cobrar-por-el-trabajo-de-los-dems.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-10-24/gracias-desde-muy-lejos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2009-09-01/arreglar-la-gua-docente-2009-2010.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-17/cairo-using-a-svg-as-a-shape-or-cairo-groups.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2008-12-12/copyright-amazing-adventures.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-23/ubuntu-menos-tonteras-y-ms-seriedad-por-favor.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-06-06/defectuoso-por-diseo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-26/borrado-a-conciencia-de-ficheros-y-particiones-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-06-23/la-uclm-llega-a-ubuntu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2011-01-12/si-los-ingenieros-de-tu-empresa-son-unos-intiles-contrata-buenos-abogados.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-09-26/instalacin-local-de-mambo-joomla-bajo-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-11-17/edicin-de-video-con-gpl-s-lives.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-01-10/papanatas-del-software-propietario.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-04-29/dijexcr-gnu-linux-para-la-pyme.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2006-01-17/el-reto-de-la-semana-2-edicin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-29/usuario-de-evolution-harto-de-spam.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-07-08/bacula-comandos-de-bajo-nivel.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-08-09/patrones-de-diseo-en-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-04-14/recordmydesktop-drag-and-drop.html
---
<file_sep>#!/bin/bash --
# -*- mode:shell-script; coding:utf-8; tab-width:4 -*-
ruby -rubygems -e 'require "/home/vagrant/drupal-import/lib/jekyll/jekyll-import/drupal6"; JekyllImport::Drupal6.process("drupal", "root", "")'
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-10-23/ampliado-el-plazo-de-inscripcin-en-el-concurso-univ-de-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-03-30/no-comprar-computadoras-hp.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-13/imprimir-en-una-winprinter-compartida-por-un-ms-windows.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-07/arrancar-como-root-sin-conocer-la-clave-usando-grub.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-04/el-gestor-de-arranque-grub.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2006-10-24/creacin-de-un-loap.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-10/latex-creacin-de-tablas-de-forma-sencilla.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-29/la-virgulilla-est-muerta.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-12-26/formatos-simples-xml-yaml-json-properties-e-inis.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-12-05/new-debian-developer.html
---
<file_sep>---
migrated: node/995
layout: post
title: Utilizar un servidor FTP remoto como backup
created: 1220633993
author: cleto
comments: true
category: recipe
tags:
- shell
- security
---
<blockquote>Si tenemos un servicio de hosting remoto con FTP, es probable que nos interese subir archivos de bakcup. Esta receta explica como hacerlo con un breve script de BASH.</blockquote>
<!--break-->
<h2>El script</h2>
El ejemplo propuesto es una tarea demasiado sencilla (copia de un fichero de texto al ftp remoto). Sin embargo, puede servite como base para realizar tareas más complejas.
Como vamos a hacer labores administrativas es recomendable usar <b>root</b> para crear el script. Además, el fichero contiene información privada (como el usuario y la contraseña del FTP), por lo que es necesario que tenga los permisos adecuados y que otros usuarios no autorizados NO puedan ver tal información.
<div class="console">
{% highlight console %}
#!/bin/bash
HOST="ftp.ejemplo.com"
USER="ejemplo"
PASSWD="<PASSWORD>"
SRC="/home/user"
#Directorio local de las copias de seguridad
BACKUP="/home/user/backup"
#Directorio remoto donde irán las copias de seguridad
REMOTE="/backup"
cd $BAKCUP
#Copia del archivo y renombrado con la fecha
cp $SRC/mi_archivo_importante.txt mi_archivo_importante.txt.$(date +%Y%m%d)
echo -e "---- BACKUP $(date +%Y%m%d) ----\n"
ftp -n -v $HOST << EOT
binary
user $USER $PASSWD
prompt
cd $REMOTE
mput mi_archivo_importante.txt.$(date +%Y%m%d)
bye
bye
EOT
{% endhighlight %}
</div>
Guarda el archivo con el nombre "backup.sh" en el directorio /sbin/ y ponles los permisos correspondientes:
<div class="console">
{% highlight console %}
# chmod 700 /sbin/backup.sh
{% endhighlight %}
</div>
Ahora, solo falta que actualicemos el cron para que esta tarea se ejecute a una determinada fecha y hora. Por ejemplo, vamos a hacer que se lance todos los días a las 12:00 H. De paso, vamos a decirle que la salida la deje en un archivo de log (/var/log/backup.log) para que podamos echarle un vistazo al archivo y saber que todo va bien.
Edita, pues, el archivo (/etc/crontab) con tu editor favorito, o sea:
<div class="console">
{% highlight console %}
# emacs /etc/crontab
{% endhighlight %}
</div>
Y añades la siguiente línea:
<pre>00 12 * * * root /sbin/backup.sh >> /home/user/backup.log </pre>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-25/programacin-de-microcontroladores-pic-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-11-29/100mbps-100mbps-200mbps-port-trunking-a-k-a-bonding-ports.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-09/hacking-serio-bsico-introduccin-a-los-shellcodes-i.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-12-21/babel-cuadro-o-tabla.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-12-16/restart-your-system.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-08/gua-rpida-para-creacin-de-paquetes-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-03/cherokee-y-https.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2005-11-02/crysol-install-party.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-13/adios-al-spam-con-spamassassin-y-sylpheed-claws.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-03-09/vota-microsoft-la-realidad-patente.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-11-23/ser-linuxero-est-de-moda.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-12-12/alsa-audio-loopbacks-o-cmo-capturar-el-audio-que-reproduzco.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-02/advertencia-desde-el-futuro.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-12/tneles-canales-y-puertos-con-ssh.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-06-30/configurar-glacier2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2008-09-06/micro-reto-de-la-semana.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-11-12/enviando-correo-a-golpe-de-protocolo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-04-28/linux-es-el-cncer-de-amrica.html
---
<file_sep>---
migrated: node/166
layout: post
title: "¿Linux en un smartphone?"
created: 1135623014
author: aledelgal
comments: true
category: tale
---
Hola, tenía yo una curiosidad:
<NAME> me ha traído un nokia de estos que lleva mp3 y todo, en concreto el 6630. Lleva el SO Symbian 8.0 (o el 7.0, no sé ahora mismo).
¿Se le podría poner Linux o es imposible?. Era sólo por saberlo, porque creo que hasta te quitan la garantía si haces eso.
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-21/configuracin-del-chipset-intel-82801h-con-alsa.html
---
<file_sep>---
migrated: node/172
layout: post
title: 'Sistema Híbrido: Paquetes de distintas "releases" con apt'
created: 1136630967
author: magmax
comments: true
category: recipe
---
<blockquote>
¿Nunca habéis deseado probar un programa de una distribución más inestable sabiendo que podéis volver atrás? ¿Nunca habéis tenido que recurrir a una versión anterior de ningún paquete? Pues existía una manera más fácil, y aquí os explico cómo.
</blockquote>
<h2>Asunciones</h2>
Asumiremos que tenemos instalada una "unstable", pero queremos tener acceso a algunos paquetes de "experimental" y... bueno, ya que nos ponemos, también de "stable" y "testing", pero con menor prioridad.
<h2>Configurar la "release" por defecto</h2>
Editáis el fichero (si no existe lo creáis) <tt>/etc/apt/apt.conf</tt> y añadís la línea:
<pre>APT::Default-Release "unstable";</pre>
<h2>Configurar las otras distros</h2>
APT ya sabe cuál es la distro que queremos tener instalada, pero hay que decirle que existe más mundo. Para ello se utiliza el fichero <tt>/etc/apt/preferences</tt>:
<pre>Package: *
Pin: release o=Debian,a=unstable
Pin-Priority: 900
Package: *
Pin: release o=Debian,a=experimental
Pin-Priority: 400
Package: *
Pin: release o=Debian,a=testing
Pin-Priority: 300
Package: *
Pin: release o=Debian,a=stable
Pin-Priority: 200
Package: *
Pin: release o=Ubuntu
Pin-Priority: -1
</pre>
<b>NOTA:</b> No, aún no lo he probado.
Debo advertir que las prioridades son bastante curiosas y hay que tener cuidadín:
<ul>
<li><b>< 0</b>: La versión no se instalará jamás.
</li>
<li><b>0 - 100</b>: El paquete no se reemplazará por una versión superior. Los paquetes instalados tienen prioridad 100.
</li>
<li><b>101-500</b>: La versión se instala si es más moderna que la existente y no existe ningún ejemplar en la versión objetivo.
</li>
<li><b>501-1000</b>: La versión se instala aunque no esté en la distro por defecto.
</li>
<li><b>1000:</b> La versión se instalará a toda costa, incluso si es inferior a la actual.
</li>
</ul>
Típicamente se utilizan los valores -1, 100, 500, 900 y 1001.
Lo de bloquear Ubuntu no es por mala leche: a menudo suele dar problemas mezclar paquetes. Cada uno que haga lo que quiera.
<h2>Editar sources.list</h2>
Claro, nos falta editar el sources.list y añadir todos los repositorios que nos dé la gana. Ya sabemos que no se va a instalar nada "extraño" (siempre que no metáis repositorios "extraños").
<h2>Cómo usarlo</h2>
Muy fácil: Para los paquetes de la distribución por defecto, como siempre:
<div class="console">
{% highlight console %}
apt-get install lo-que-sea
{% endhighlight %}
</div>
Para el resto, también fácil (donde pongo "experimental" poner el nombre de la distro):
<div class="console">
{% highlight console %}
apt-get install -t experimental lo-que-sea
{% endhighlight %}
</div>
Todos ellos se "updatean" a la vez por el método habitual.
<h2>Otras referencias</h2>
Me disponía a escribir esta receta basándome en un ejemplo que tenía por algún lado. No he encontrado el ejemplo, pero sí un documento titulado <a href="http://www.ubuntu-es.org/node/595">Jugando con APT</a>, que explica cómo hacer lo mismo para Ubuntu.
También están interesantes el <a href="http://www.debian.org/doc/manuals/apt-howto/ch-apt-get.es.html">Manual de APT</a> y un documentillo que he encontrado titulado <a href="http://www.argon.org/~roderick/apt-pinning.html">Using APT with more than 2 sources</a>, este último en inglés, claro.
<ul>
<li><a href="http://www.esdebian.org/articulos/24672/apt-pinning">http://www.esdebian.org/articulos/24672/apt-pinning</a></li>
</ul>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-06-18/python-y-gtk3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-26/crear-deb-a-partir-de-tar-gz.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-05-15/installing-omnet-on-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-21/openoffice-org-scripting.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-06-05/gnu-emacs-cambiar-fcilmente-entre-el-c-y-el-h.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-11/redimensionar-una-particin-con-gparted-live-cd.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-11/gp2x-primer-contacto.html
---
<file_sep>---
migrated: node/739
layout: post
title: Instalar GNU/Linux+Cell SDK 2.1 en PlayStation 3
created: 1184422714
author: KO_mOd_O
comments: true
category: recipe
---
<blockquote class="head">
Esta receta tiene como objetivo proporcionar una guía para la instalación de una distribución GNU/Linux junto con el SDK para el procesador Cell en PlayStation 3.
</blockquote>
<h2>Introducción</h2>
PlayStation 3 tiene como corazón el procesador Cell. Esta revolución tecnológica fue desarrollada inicialmente para PlayStation 3 por Sony Computer Entertainment, Toshiba e IBM. Sin embargo, el verdadero objetivo de este proyecto es la obtención de un procesador con una gran potencia de cálculo a un bajo coste.
<h2>Arquitectura del Cell</h2>
Cell es una arquitectura orientada a la computación distribuida de alto rendimiento. De acuerdo con IBM, la potencia ofrecida por este procesador es de un orden de magnitud mayor que la ofrecida por los procesadores actuales. En comparación con las unidades de procesamiento gráfico (GPUs), Cell supone una importante ventaja al tratarse de una herramienta de propósito general que se puede utilizar para una gran variedad de tareas.
Una de las principales características de la arquitectura Cell es el escalado a nivel de procesador. Sin embargo, un único chip proporciona una gran cantidad de cálculo por sí mismo (256 GFLOPS a 4 GHz).
La arquitectura está formada por los siguientes elementos:
<ul>
<li>1 PPE (Power Processor Element).</li>
<li>8 SPEs (Synergistic Processor Elements).</li>
<li>EIB (bus de interconexión).</li>
<li>DMAC (controlador de acceso a memoria).</li>
<li>2 Rambus XDR memory controllers (controladores de memoria).</li>
<li>Rambus FlexIO interface (interfaz de entrada/salida).</li>
</ul>
El corazón del procesador es el PPE, un microprocesador convencional cuya principal tarea (aunque puede utilizarse con otros propósitos) es la de distribuir el trabajo entre los SPEs, encargados de efectuar la mayor parte de los cálculos de una aplicación. A pesar de que puede funcionar a una velocidad mayor, el chip Cell utilizado en PlayStation 3 corre a 3,2 GHz y tiene 7 SPEs disponibles.
El PPE es un procesador de 64 bits con una <em>Power Architecture</em>. Este tipo de procesador no se utiliza en los computadores personales y es capaz de ejecutar binarios de <em>POWER</em> y <em>PowerPC</em>.
El SPE se puede considerar como un procesador vectorial independiente de 128 bits. Cada SPE es capaz de ejecutar 4 operaciones de 32 bits por ciclo. Para obtener ventajas en el desarrollo de aplicaciones en el Cell, los programas necesitan ser <em>vectorizados</em>.
Existen muchas más decisiones de diseño en la arquitectura Cell que la hacen muy diferente a las arquitecturas convencionales utilizadas en los computadores personales, proporcionando una revolución y no un simple paso más en la evolución [Blachford_Cell_v2].
<h2>Ingredientes</h2>
<ul>
<li>1 PlayStation 3 con una versión del <em>firmware</em> igual o superior a la 1.6.</li>
<li>1 CD o DVD vírgenes para grabar el sistema operativo a instalar.</li>
<li>1 CD vírgen.</li>
<li>1 <em>pendrive</em>.</li>
<li>1 teclado y un ratón con conexión USB.</li>
<li>1 conexión cableada a Internet.</li>
</ul>
Actualmente, existen distintas distribuciones que pueden instalarse en PlayStation3, como por ejemplo <a href="http://www.terrasoftsolutions.com/products/ydl">Yellow Dog Linux</a>, <a href="http://www.helios.de/support/ps3_install.phtml">Helios Linux</a>, <a href="http://www.ps3coderz.com/index.php?option=com_content&task=view&id=73&Itemid=31">Fedora Core 6</a>, <a href="https://help.ubuntu.com/community/PlayStation_3">Ubuntu</a>, <a href="http://ps3wiki.qj.net/index.php/Gentoo_Howto">Gentoo</a>, o <a href="http://www.keshi.org/moin/moin.cgi/PS3/Debian">Debian</a>, entre otras.
La distribución en cuestión a instalar se deja a gusto del lector, pero es imprescindible descargar una ISO para una arquitectura PowerPC. En mi caso, elegí la distribución <a href="http://rhold.fedoraproject.org/Download/mirrors.html">Fedora Core 6</a>, y descargué una ISO para grabar en un DVD. También es necesario obtener la ISO relativa al <a href="ftp://ftp.uk.linux.org/pub/linux/Sony-PS3/">Linux add-on CD</a>, descargando el archivo CELL-Linux-CL-20061110-ADDON.iso. Por último la herramienta más importante, el entorno de desarrollo para el Cell: <a href="http://www.alphaworks.ibm.com/topics/cell?open&S_TACT=105AGX16&S_CMP=DWPA">SDK 2.1</a>.
<h2>El cargador de arranque</h2>
El siguiente paso es hacerse con un gestor de arranque. La opción más directa es obtenerlo desde el Linux add-on CD. Éste se encuentra en el directorio kboot y su nombre es otheros.bld.
En el <em>pendrive</em> hay que crear un directorio llamado PS3, y dentro de éste otro llamado otheros, en el cual hay que copiar el cargador de arranque (otheros.bld)
Llegados a este punto, se ha de disponer de los siguientes elementos:
<ul>
<li>1 CD o DVD con la distribución a instalar.</li>
<li>1 Linux add-on CD.</li>
<li>1 dispositivo de arranque (<em>pendrive</em>) con el cargador de arranque.</li>
<li>1 CD con la ISO del SDK.</li>
<li>1 PlayStation 3 con una versión del firmware igual o superior a la 1.6 y una conexión cableada a Internet.</li>
</ul>
<h2>Instalación</h2>
En este momento, es importante resaltar que es necesario formatear el disco duro de PlayStation 3, por lo que se recomienda hacer una copia de seguridad. Sin embargo, los ajustes personales no se perderán.
Para formatear al sistema hay que acceder al menú de PlayStation 3 Ajustes -> Ajustes del sistema -> Herramienta de formateo -> Formatear disco duro. La herramienta de particionado ofrece distintas opciones. En mi caso, me dio la opción de hacer dos particiones: una de 10 GB y otra de 50 GB. Yo elegí asignar 10 GB al sistema operativo a instalar y 50 GB al sistema operativo de PlayStation 3 (GameOS).
Para instalar el gestor de arranque, es necesario conectar el <em>pendrive</em> y acceder al menú Ajustes -> Ajustes del sistema -> Instalar otro sistema operativo. El propio sistema localizará el archivo otheros.bld.
Una vez seguidas estas indicaciones, el siguiente paso es cambiar el sistema operativo por defecto y reiniciar. Para ello, se ha de acceder al menú Ajustes -> Ajustes del sistema -> Sistema predeterminado, y elegir la opción <em>Otro sistema operativo</em>. En este punto, el proceso de instalación diverge en función del sistema operativo elegido. En caso de instalar Fedora Core 6, y después de la aparición de la orden kboot, el comando a introducir es <em>install-fc sda</em>. Posteriormente, el sistema pide el DVD de instalación y, eventualmente, el Linux add-on CD para la instalación del sistema operativo. Una vez instalado el sistema operativo, el comando <em>boot-game-os</em> permite arrancar PlayStation 3 con el sistema operativo de los juegos.
En caso de utilizar una televisión con certificación <em>HD Ready</em> o <em>FullHD</em> se puede llevar a cabo el proceso de instalación a través de la televisión <em>sin dañarnos la vista</em>. Sin embargo, si se utiliza una televisión con definición estándar, resulta muy aconsejable continuar con el proceso de instalación de manera remota a través de ssh (en caso de seguir el proceso desde la TV, reducir el contraste puede ayudar).
La instalación es una instalación tradicional, aunque algo diferente en lo relativo al ajuste del modo de vídeo. En el caso de <a href="http://www.ps3coderz.com/index.php?option=com_content&task=view&id=73&Itemid=31">Fedora Core 6</a>, el comando ps3videomode permite ajustar la resolución y el escaneado de la salida de vídeo para un ajuste óptimo en la televisión (o monitor).
<h2>Instalación del SDK 2.1</h2>
En este paso se asume la instalación de todas aquellas herramientas necesarias para la instalación/construcción de las bibliotecas de desarrollo del SDK 2.1. En caso de instalar Fedora Core 6, se puede utilizar la herramienta yum para la instalación de todas estas herramientas [PS3_FedoraCore6]. El proceso es sencillo:
<div class="console">
{% highlight console %}
# cd path_to_iso_sdk_directory
# mkdir -p /mnt/cellsdk
# mount -o loop SDK21.iso /mnt/cellsdk
# cd /mnt/cellsdk/software
{% endhighlight %}
</div>
La instalación del SDK se puede efectuar con o sin simulador:
<div class="console">
{% highlight console %}
# ./cellsdk install [--nosim]
{% endhighlight %}
</div>
Este proceso implica la descarga de ciertos archivos. En caso de que haya algún problema en esta descarga, la solución consiste en volver a ejecutar este último comando.
Las bibliotecas de desarrollo no vienen como archivos ejecutables, por lo que es necesario generarlos a partir del compilador xlc o del compilador gcc (opción por defecto):
<div class="console">
{% highlight console %}
# cd /opt/ibm/cell-sdk/prototype
# ./cellsdk build --gcc
{% endhighlight %}
</div>
Este proceso involucra algo de tiempo. Una vez finalizado se desmontará la imagen:
<div class="console">
{% highlight console %}
# umount /mnt/cellsdk
{% endhighlight %}
</div>
<h2>¿Y ahora...?</h2>
En caso de adentrarse en el desarrollo de aplicaciones para el procesador Cell, es posible sentirse abrumado por la cantidad de documentación e información existente, además del cambio que supone el tratar con una arquitectura tan distinta con la que la mayoría no solemos trabajar. Una buena referencia es el portal <a href="http://www.ps3coderz.com">PS3coderz</a>, en el cual se indexa la documentación asociada al Cell en varias secciones.
<h2>Conclusiones</h2>
Si has llegado a este punto, tienes en tu poder un sistema GNU/Linux+Cell SDK 2.1. El siguiente paso y el más importante, en caso de que te <em>pique el gusanillo</em>, consiste en que te familiarices con la arquitectura Cell y con sus herramientas y bibliotecas de desarrollo.
<h2>Referencias</h2>
[Blachford_Cell_v2]
Análisis y explicación de la arquitectura Cell.
<a href="http://www.blachford.info/computer/Cell/Cell0_v2.html">http://www.blachford.info/computer/Cell/Cell0_v2.html</a>
[Cell_Developer's_Corner]
Rincón de desarrollo para el Cell.
<a href="http://www.power.org/resources/devcorner/cellcorner">http://www.power.org/resources/devcorner/cellcorner</a>
[PS3_Coderz]
Portal de desarrollo para el procesador Cell.
<a href="http://www.ps3coderz.com/">http://www.ps3coderz.com/</a>
[PS3_FedoraCore6]
Guía de instalación detallada de Fedora Core 6 en PlayStation 3.
<a href="http://www.ps3coderz.com/index.php?option=com_content&task=view&id=73&Itemid=31">http://www.ps3coderz.com/index.php?option=com_content&task=view&id=73&Itemid=31</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /challenge/2006-01-16/el-reto-de-la-semana-1-edicin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-30/alpine-consulta-tu-cuenta-de-correo-gmail-desde-la-consola-en-modo-texto.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-10-01/encuentra-una-idea-y-participa-en-el-concurso-universitario-de-software-libre-de-c-lm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-03-19/creating-a-virtual-grid-with-libvirt-debian-preseeds-puppet-icegrid.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-06-16/entornos-chroot-con-cdebootstrap-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-10/simulacin-de-cdigo-c-para-avr.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-10-24/lo-bueno-del-capitalismo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-27/arreglar-gnome-terminal-que-no-muestra-tildes-ni-ees.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-11-20/nuevas-beryl-0-1-3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-07/ubuntu-8-04-en-airis-kira-300.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2008-04-08/fase-final-del-concurso-de-sl-de-clm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-12/comprar-un-porttil.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-27/patrn-singleton-en-python-como-metaclase.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2006-06-02/twisted-controlando-varios-equipos-con-synergy.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-10-13/arco-devel-el-paquete.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-04-23/gladewrapper-o-cmo-hacer-una-aplicacin-gtk-con-python-en-7-lneas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-30/la-ensima-receta-de-iptables.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-06-14/configurar-dispositivos-android-para-desarrollo-bajo-gnu-linux.html
---
<file_sep>---
migrated: node/1558
layout: post
title: '"Reconciliación" Android y Linux '
created: 1325083965
author: RRC19189
comments: true
category: new
tags:
- android
---
El proximo kernel de android 3.3, incluirá ciertas modificaciones que fueron retiradas en otros kernel y que solo incorporaba un kernel especifico de la rama del 2.6 (2.6.33). Esta modificación nos permitirá arrancar un sistema operativo con núcleo Linux, sin necesidad de hacer ninguna modificación adicional.
<!--break-->
Noticia ampliada en Inglés: <a href="http://www.h-online.com/open/news/item/Android-drivers-to-be-included-in-Linux-3-3-kernel-1400996.html">http://www.h-online.com/open/news/item/Android-drivers-to-be-included-in-Linux-3-3-kernel-1400996.html</a>
Noticia ampliada en Español: <a href="http://www.xatakandroid.com/sistema-operativo/android-vuelve-al-codigo-oficial-de-linux">http://www.xatakandroid.com/sistema-operativo/android-vuelve-al-codigo-oficial-de-linux</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/ndiswrapper-tarjetas-wifi-no-soportadas-en-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-05/tes-emacs-con-pestaas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-31/compilacin-de-paquetes-debian-con-svn-buildpackage.html
---
<file_sep>---
migrated: node/78
layout: post
title: CRySoL install party
created: 1130950871
author: int-0
comments: true
category: new
---
Bueno... hace unos días se habló en CRySoL de realizar una "install party"... pero como viene siendo habitual si no hay nadie que diga: "va, lo hago yo" pues no se hace. Pues bien yo (<NAME>) y el siempre bien ponderado Cleto hemos decidido decir: "va, lo hacemos nosotros" y hemos empezado a preguntar por aquí y por allí a ver qué podemos y qué no podemos hacer.
Finalmente hemos llegado a la feliz idea de realizar una install party con talleres variados... es decir, pasaremos unas amenas horas instalando un sistema base tipo Debian/Ubuntu/Gnesis/MoLinux y después sacando parte del jugo a nuestros "hijos".
Todavía queda mucho por decir, pero lo mas inmediato es decidir que días os vienen mejor, para eso esta la encuesta... votad por favor!
En próximos días os contaremos como van los progresos... ok?
Ah! ya pediremos colaboraciones a ver si es verdad que todos los "podéis contar conmigo" eran de verdad o de boquilla... jejejeje...
<!--break-->
Taluego
-------------------------
init=/bin/bash to conquer the world!
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-06-06/omikey-cardman-5321-rfid-reader-en-debian.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-09/creacin-del-directorio-debian-con-dh_make-para-debianizar-tu-programa.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-01-27/visita-virtual-al-hospital-general-de-ciudad-real.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-10-16/cienes-y-cienes-de-cosas-como-dira-aquel.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-06-10/mster-en-tecnologas-de-la-informacin-y-las-comunicaciones-mtic.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2009-04-14/culpable-a-sabiendas-de-ser-inocente.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-28/en-nautilus-tambin-tenemos-abrir-terminal-aqu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-01-24/ebay-y-el-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2005-12-07/un-da-triste-para-los-usuarios-de-intel-airport-extreme-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-09-02/25-aniversario-del-proyecto-gnu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-10/el-diccionario-de-la-rae-siempre-a-mano.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-02-10/altavoces-bluetooth-en-gnome.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-27/generar-un-certificado-ssl-para-apache.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-11-26/instalando-hurd.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-11-11/http-get-con-libcurl-en-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-09/haciendo-funcionar-la-airport-extreme-con-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-10-25/migracin-cutre-de-una-instalacin-a-un-disco-duro-nuevo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-04-20/molinux-nomada-una-molinux-en-tu-usb.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-21/sincroniza-tus-ficheros-con-unison.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-02-01/cdigo-fuente-y-esquemas-de-yago.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-10-14/instalacin-de-edonkey2000.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-12-17/building-skills.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-25/gua-rpida-de-cmake-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-04-01/llamadas-voip-desde-nds.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-07-13/instalar-driver-privativo-de-nvidia-con-kernel-linux-2-6-20.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-07-29/ingsoft-keep-it-simple-stupid-kiss.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-04-07/antiprogramacin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-11-20/intentando-una-instalacin-decente-de-debian-en-el-asus-ux32vd.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2011-12-10/ios-vs-android-el-coste-social-de-la-eficiencia-o-el-coste-tcnico-de-la-libertad.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-03-22/integracin-continua-de-aplicaciones-python-con-hudson-jenkins.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-03-17/posible-cambio-de-nmero-en-las-versiones-de-ubuntu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-12-23/latex-referencias-imprescindibles.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-12-06/arco-devel-auto-insert.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-11-04/latex-babel-enumitem.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-10-10/sql-inyection.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2012-04-08/consulta-sobre-los-raspberry.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-09-23/el-fin-del-la-paginacin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-15/manipulacin-de-ficheros-pdf.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-03-19/manejo-bsico-de-seales-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-28/restringir-secuencias-controlalt-en-el-servidor-x.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-05-28/creacin-de-plugins-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-11-10/maneras-de-indentar.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-26/linux-en-un-smartphone.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-05-18/licencias-de-windows-y-macos-x.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-22/gnu-emacs-elegir-el-encoding.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-02/wifi-rt2500-de-ralink-en-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-07/sin-palabras-otra-vez-la-sgae.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-09-29/documentacin-oficial-de-scapy.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-07/gnu-linux-y-la-banda-ancha-mvil.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-02-15/el-nuevo-tema-de-crysol.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /humor/2010-12-14/cuntas-pedimos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-02-01/sobre-el-copyleft.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2010-06-15/la-comisaria-europea-de-agenda-digital-a-favor-del-software-libre-en-las-administraciones-pblicas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-06/streaming-multimedia-con-flumotion.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-01-29/a-todos-los-autores-de-recetas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-02-20/android-sdk-en-debian-64-bits.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-08/control-del-puerto-paralelo-con-ppdev.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-01/cv-de-la-aneca-en-sql-para-openoffice.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-06-21/cmo-limpiar-tu-ordenador.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-08-01/reconocimiento-de-voz-en-windows-vista.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-11-11/devkitpro-23b-para-debian-ubuntu-molinux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-24/creacin-de-plugins-para-freevo.html
---
<file_sep>---
migrated: node/111
layout: post
title: Consulta sobre distro
created: 1132910483
author: Lk2
comments: true
---
Hola a todos.
<!--break-->
A horas vista de la Install Party (a la cuál no voy más que con el portátil y sin más sitio que el que me dejen en un rincón), me pregunto que hacer con el PC de mi casa ...
Es un amd64, y tengo instalado en él Ubuntu. A pesar de ciertos problemas de locales ya funciona de forma más o menos decente, pero aún así, la falta de paquetes en los repositorios de Ubuntu para amd64 es casi indignante (por no estar no está ni lame, por ejemplo).
Por ello hago esta consultilla: para cantidad de paquetes, ¿qué distro es mejor? ¿Debian o Ubuntu? Se que esto es casi un debate filosófico, pero quiero simplemente atender a la cantidad de programas y paquetes que podré acceder con uno y otro de manera lo más simple posible (si, soy vago). La respuesta supongo que se irá más por Debian, pero bueno, dentro de la elegida, ¿versión 386 o amd64? La verdad es que no es que importe mucho el tipo de distro, para mi lo importante es que primero funcionen las cosas, y luego, que funcionen de lujo, por lo que si para poder ver flash, instalar más programas y tener más repositorios debo inclinarme por una 386 (ya sea Ubuntu o Debian) lo haré sin problema ...
Y bueno, sin más, que me gustaría conocer vuestra valiosa opinión :)
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-12/plugin-guifications-en-pidgin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-19/programando-una-mica2.html
---
<file_sep>---
layout: page
title: Users
group: navigation
js: [filter_by_url]
---
<table>
{% tablerow item in site.authors cols: 3 %}
{% assign author = item[1] %}
<div class="user item" id="{{ item[0] }}">
<h2>
{% include custom/author_photo %}
<span class="author-name">{{ author.name }}</span>
</h2>
<div class="author-details">
<p>
See <a href="/blogs.html#{{item[0]}}">{{author.name}}'s blog.</a>
</p>
<ul>
{% if author.twitter %}
<li>twitter.com/ <a href="https://twitter.com/{{author.twitter}}">{{author.twitter}}</a></li>
{% endif %}
{% if author.github %}
<li>github.com/ <a href="https://github.com/{{author.github}}">{{author.github}}</a></li>
{% endif %}
{% if author.bitbucket %}
<li>bitbucket.com/ <a href="https://bitbucket.com/{{author.bitbucket}}">{{author.bitbucket}}</a></li>
{% endif %}
</ul>
</div>
</div>
{% endtablerow %}
</table>
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2007-08-09/twitter-crysol.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-03-29/separacin-silbica-en-libreoffice.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-09-01/sobre-frikis-en-general.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-07-21/crear-una-imagen-emdebian-para-tu-friendlyarm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2009-01-07/gimp-recortar-una-imagen-en-lnea-de-rdenes.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-02-03/kit-de-desarrollo-completo-para-psp-actualizado.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-05/crear-sistemas-de-ficheros-xfs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-03-24/gestin-sencillita-de-tu-coleccin-de-pelculas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-06-07/pycurl-utilizando-autenticacin-y-cookies-desde-python.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-03-05/openoffice-org-la-suite-de-productividad-ofimtica-de-telefnica.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-07-28/la-sdae-desarroll-el-mp3-cuando-los-modem-iban-a-9-600-voltios.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2005-11-06/guacamole-top.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-09-08/trucos-tiles-para-la-tabletpc-hp-compaq-tc1100.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-04-16/glib-io-channels-con-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-08-19/ayuda-con-instalacin-de-ubuntu-a-medias.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2008-12-04/ayuda-con-mquina-virtual-para-ubuntu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-17/rs232-en-el-pic-uart-por-software.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /art/2009-05-27/quin-dijo-que-con-inkscape-no-se-poda.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2011-04-26/1-semana-de-obras-libres-del-13-al-19-de-mayo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-27/darwin-streaming-server-para-dispositivos-mviles.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-03-04/quieres-consultar-redcampus.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-12-13/merry-chrismas-and-a-happy-gnu-year.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-05-05/debian-live-personalizada-en-una-lnea.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-03-17/objetivos-de-la-web.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-06-25/gnu-emacs-puesta-a-punto.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-02-20/usar-la-librera-libxml2.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2010-03-26/cmake-instalar-un-paquete.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-07-12/ssh-en-el-chumby.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-12-24/averigua-tu-ip-externa-pblica.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-07-19/gstreamer-python-tu-propio-reproductor-multimedia-en-minutos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-03-25/depurar-un-programa-c-c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-03-06/gestin-de-los-ups.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2013-07-02/pastel-de-chocolate.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-06-15/aviso-a-los-autores-de-recetas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2006-01-07/una-razn-ms-para-utilizar-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-17/divide-y-vencers.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-02-19/propuesta-proponiendo-actividades-2-parte.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-10-24/gnu-windows-vs-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-01-29/desarrollo-de-aplicaciones-para-psp-slim-y-psp-fat-con-fw-actuales.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /event/2008-09-11/nueva-edicin-concurso-universitario-de-software-libre-de-clm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-05-22/fallo-de-pdflatex-en-debian-sid.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2010-07-14/ingsoft.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-06-02/crysol-org.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-01-22/instalar-debian-desde-red-con-pxe-nunca-fue-tan-fcil.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2006-10-25/macarrones-enfadados.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-05-10/soy-famoso.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-06-07/scanner-hp-scanjet-3200c.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2013-04-11/emacs-pills-compilation-feedback-with-colors.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /script/2008-08-27/python-merge-lists.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-10-13/crear-un-paquete-debian-con-scripts-de-inicio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-08-28/configuracin-de-mdem-usb-huawei-en-gnu-linux-huawei-linux.html
---
<file_sep>---
migrated: node/652
layout: post
title: El software es un producto?
created: 1177406038
author: darkgnu
comments: true
category: opinion
---
Rotundamente <b>SI</b>. Me explico:
<!--break-->
Cualquiera de nosotros puede acercarse al Eroski, y en la sección de informática encontrará un montón de cajas con CD's y DVD's llenos de programas privativos que cuestan un montón. Te compras alguno de ellos y... que obtienes? algunos dirían que un software... ¡mentira! lo que en realidad te has comprado es el <em>permiso de la compañía para usar ESE software</em>, esto es, una <em>licencia</em>. La caja con el CD viene <em>de regalo</em>. Cuántos programas privativos te los puedes bajar directamente de su web en versión <em>demo</em>? si pagas por una licencia (o la robas, esto es, keygen-erarla) ya obtienes el permiso para usarlo sin cartelitos y a "pleno rendimiento". ¿Qué significa esto? ¿el software es el producto? ¿o lo es la <em>licencia</em> de uso, el permiso de uso?.
Si yo me compro un cuaderno, todos sabemos que el producto es el cuaderno. Como tal yo me convierto en dueño de ese cuaderno y puedo hacer con él lo que quiera: regalarlo, romperlo, estudiarlo, dibujar en él, etc. En cambio si yo me comprase un permiso para dibujar en él... ¿que podría hacer? pues dibujar en el... y punto.
Ahora llegan los rojos estos del software libre y dicen: no no... el software debe ser libre (que no gratis). Pero bueno, estos tíos están locos! ¿y de qué viven?; pues fácil: de vender su software y ofrecer servicios sobre él. Con el modelo del software libre un programa se convierte en producto: la licencia es la misma para todo el software, tu eres el dueño y señor de ese producto y puedes hacer lo que te plazca con él. Una empresa podría crear un programa libre y venderlo; un comprador podría comprárselo y como se convierte en <em>dueño</em> del mismo, podría redistribuírlo, mejorarlo (la empresa te daría también su código fuente completo), estudiarlo... etc.
Pero todo esto da igual, a la gente le sigue pareciendo más normal la venta de licencias que la venta de software. Aún cuando las licencias sean abusivas; licencias como "puedes usar este software para lo que quieras excepto para..." (si yo me lo he comprado precisamente para eso!), "si usas esto no puedes hablar mal de nosotros" (creía que la libertad de expresión era un derecho constitucional) y dentro de poco "para acceder a los menús de expertos tienes que mandarnos fotos de tu hermana en bikini" (y porqué no? quien les pone límite?).
Esto lo leerán dos tipos de personas: los que ya saben cómo va esto (y que entonces habrán perdido unos cuantos minutos) y los que viven engañados y se creen libres porque pueden robar licencias impunemente... que evidentemente les dará igual saber que están siendo engañados y manejados al antojo de unos pocos.
Un saludo a estos últimos :sick:
<file_sep>---
layout: refresh
refresh_to_post_id: /2010-10-29/creada-la-pgina-rplica-en-espaol-de-auctex-para-gnu-emacs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-06-29/prcticas-de-programacin-infames-1-parte.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-09-27/debian-en-el-acer-aspire-revo.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-10-24/ms-eficaz-que-la-porra-es-el-miedo-a-la-porra.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-01-26/migracin-de-repositorios-subversion.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-08-31/gnu-emacs-configurar-indentacin.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-08-03/osd-escribiendo-en-el-escritorio.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-01-24/virtualizacin-fcil-con-xen3.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-11-19/bookcrossing-tambin-en-ciudad-real.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-06-06/inhabilitar-touchpad-synaptics-mientras-escribes.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2005-10-02/saluditos.html
---
<file_sep>---
migrated: node/1519
layout: post
title: Proyecto ¿Colaborais?
created: 1306694726
author: Marot77
comments: true
category: opinion
---
Buenas,hace tiempo pensé en un proyecto;Crear un Software HelpDesk totalmente creado desde 0 con licencia GPL,pero demasiadas comunidades ese proyecto existen ya,asique reflexionando durante este tiempo,diversos cambios de s.o...etc
<!--break-->
Pensé varias cosas,como una red social para jugadores mmo(World of Warcraft,Regnum....etc) una red social de educación(Que un profesor se conecte por la tarde y los usuarios expongan sus dudas de ejercicios que tenga en la misma tarde,pensé que le podria interesar al estado eso) hasta mi idea final que me parecía buena,Diseñar un sistema de entrada a la casa mediante un pin,es decir deberia a ver un teclado,introduces el PIN de tu casa y se abre la puerta,suena muy utópico,pero no estaría mal estudiarlo...etc ¿Alguien se apunta?
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-08-05/truco-tonto-engaar-a-wget.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-12-15/configuracin-de-thunderbird-icedove-para-el-correo-de-la-uclm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-05-04/debian-tdd.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-08-01/aplicaciones-portables-entre-psp-y-gnu-linux-con-sdl.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2006-01-10/magnatuneasy-vuelve-a-la-vida.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2012-03-05/instalar-debian-en-asus-zenbook-ux31e.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-10-31/git-el-control-de-versiones-definitivo-por-ahora.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2008-03-22/iphone-e-ipod-touch-con-mono.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-12-22/suicidios-de-discos-duros.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-18/solucionar-sincronizacin-audio-video-de-flash.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-02-25/google-gears-works-on-iceweasel-debian-ubuntu.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-03-22/plataforma-anti-soporte-gratuito-a-microsoft.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-09-10/jugando-con-lego-mindstorms-nxt.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2005-11-14/10-000-visitas.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2008-02-27/actualizar-firmware-de-xport.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2006-02-21/van-a-cambiar-los-escritorios-3d-la-manera-de-usar-el-ordenador.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /cooking/2012-01-28/mojo-picn.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-05-30/mini-tutorial-sobre-la-recursividad.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /humor/2009-07-15/si-zp-dice-que-bill-gates-lo-dice.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2005-11-04/grub-protegido-con-contrasea.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-12-13/usar-una-particin-real-con-virtualbox.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-06-17/afina-tus-discos-con-hdparm.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2006-11-09/error-al-abrir-algunas-paginas-webs.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2008-08-27/la-importancia-de-cifrar-tu-vida.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2009-03-10/el-autntico-emblema-de-informtica.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-05-19/hasta-luego-hd-dvd.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2007-02-25/gnu-linux-en-el-vaticano.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2008-03-01/nexuiz-2-4-en-la-calle.html
---
<file_sep>---
migrated: node/672
layout: post
title: 10 razones para no usar "Linux"
created: 1178661661
author: aledelgal
comments: true
category: opinion
---
<a href="http://www.sahw.com/wp/archivos/2006/10/20/10-razones-para-no-usar-linux/">Leerlo aquí</a>
<file_sep>---
layout: refresh
refresh_to_post_id: /opinion/2007-05-28/humor-informtico.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2011-05-13/vdeos-de-las-jornadas-de-software-libre-de-ciudad-real-15-al-18-de-marzo-de-2011.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /tale/2005-12-09/mapa-conceptual-del-software-libre.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2007-05-17/la-consola-de-la-fonera-ds275.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /new/2011-12-28/reconciliacin-android-y-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2011-05-14/pruebas-web-con-selenium-y-atheist.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-12-06/cmo-indicar-a-x-org-qu-tarjeta-de-vdeo-tienes.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2009-07-24/configurar-htc-magic-g2-con-android-en-debian-gnu-linux.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /enquiry/2011-12-07/filtro-de-contenidos.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /2007-02-13/cuando-el-gestor-de-claves-de-gnome-se-pone-pesadito.html
---
<file_sep>---
layout: refresh
refresh_to_post_id: /recipe/2006-08-25/motorola-v3-y-debian-gnu-linux.html
---
| 59887417f56bda0dd0cb9a9f1fd5d37f88b3bc38 | [
"Ruby",
"HTML",
"Markdown",
"JavaScript",
"Shell"
] | 1,218 | Markdown | CRySoL/web.source | 97d57e181686f87ebfc177a65e21dda4cb63b9b4 | 4958aee965eaff87c295126a4998c10b5fb6000b | |
refs/heads/master | <file_sep>from flask import Flask, render_template, jsonify, request
from pymongo import MongoClient # pymongo를 임포트 하기(패키지 인스톨 먼저 해야겠죠?)
app = Flask(__name__)
client = MongoClient('mongodb://sparta:[email protected]', 27017) # mongoDB는 27017 포트로 돌아갑니다.
db = client.dbsparta # 'dbsparta'라는 이름의 db를 만듭니다.
## HTML을 주는 부분
@app.route('/')
def home():
return render_template('Project_2.html')
@app.route('/hosworks/delete', methods=['POST'])
def deleterow():
# 1. 클라이언트가 전달한 name_give를 name_receive 변수에 넣습니다.
day_receive = request.form['day_give']
name_receive = request.form['name_give']
# 2. mystar 목록에서 delete_one으로 name이 name_receive와 일치하는 star를 제거합니다.
db.hosworks.delete_one({'day': day_receive, 'name': name_receive})
return jsonify({'result': 'success'})
## API 역할을 하는 부분
@app.route('/hos', methods=['POST'])
def write_hos():
name_receive = request.form['name_give']
address_receive = request.form['address_give']
number_receive = request.form['number_give']
person_receive = request.form['person_give']
tel_receive = request.form['tel_give']
email_receive = request.form['email_give']
# DB에 삽입할 review 만들기
hos = {
'name': name_receive,
'address': address_receive,
'number': number_receive,
'person': person_receive,
'tel': tel_receive,
'email': email_receive,
}
# reviews에 review 저장하기
db.hos.insert_one(hos)
# 성공 여부 & 성공 메시지 반환
return jsonify({'result': 'success'})
## API 역할을 하는 부분
@app.route('/hosworks', methods=['POST'])
def write_hosworks():
# title_receive로 클라이언트가 준 title 가져오기
day_receive = request.form['day_give']
# author_receive로 클라이언트가 준 author 가져오기
works_receive = request.form['works_give']
# review_receive로 클라이언트가 준 review 가져오기
etc_receive = request.form['etc_give']
name_receive = request.form['name_give']
# DB에 삽입할 review 만들기
hosworks = {
'day': day_receive,
'works': works_receive,
'etc': etc_receive,
'name': name_receive,
}
# reviews에 review 저장하기
db.hosworks.insert_one(hosworks)
# 성공 여부 & 성공 메시지 반환
return jsonify({'result': 'success'})
# @app.route('/hosworks', methods=['GET'])
# def read_hosworks():
# # 1. DB에서 리뷰 정보 모두 가져오기
# hosworks = list(db.hosworks.find({}, {'_id': 0}).sort("day", -1))
# # 2. 성공 여부 & 리뷰 목록 반환하기
# return jsonify({'result': 'success', 'hosworks': hosworks})
@app.route('/findhos', methods=['GET'])
def find_hos():
keyword = request.args.get('keyword')
# 1. DB에서 리뷰 정보 모두 가져오기
hos = list(db.hos.find({'name': keyword}, {'_id': 0}))
hosworks = list(db.hosworks.find({'name': keyword}, {'_id': 0},).sort("day", -1))
# 2. 성공 여부 & 리뷰 목록 반환하기
return jsonify({'result': 'success', 'hos': hos, 'hosworks': hosworks})
if __name__ == '__main__':
app.run('0.0.0.0', port=5000, debug=True) | b2a393da9577d926f7489850d63c7993f7d24947 | [
"Python"
] | 1 | Python | gscjj44/my_project | 49f11c6b0ff7857abff0fb384a6dc99f6dd402a3 | 32be6c38269f57d775b599798f44dd97809fd270 | |
refs/heads/master | <file_sep>package com.poojashree.springboot.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
import com.poojashree.springboot.dao.AlianDao;
import com.poojashree.springboot.model.Alian;
@Controller
@ComponentScan("com.poojashree.springboot")
public class AlianController {
@Autowired
AlianDao dao;
@RequestMapping("/")
public String home() {
return "home.jsp";
}
@RequestMapping("/addAlian")
public String addAlian(Alian alian) {
dao.save(alian);
return "home.jsp";
}
@RequestMapping("/getAlian")
public ModelAndView getAlian(@RequestParam int aid)
{
ModelAndView mv=new ModelAndView("showalian.jsp");
Alian alian=dao.findById(aid).orElse(new Alian());
mv.addObject(alian);
return mv;
}
}
<file_sep>package com.poojashree.springboot.dao;
import org.springframework.data.repository.CrudRepository;
import com.poojashree.springboot.model.Alian;
public interface AlianDao extends CrudRepository<Alian, Integer>
{
}
| fc0fac8f2a7e167c8625a9c79b1495e0ea0a50b0 | [
"Java"
] | 2 | Java | poojashree1234/Vesify | 57061c1ec7e7e11508ea6fd4dc539d4400a23aba | 4e7c99312d3edc7120bd78149bcd2706d8a2f488 | |
refs/heads/master | <repo_name>SalvAna/state_React3<file_sep>/simpson-quotes/src/App.js
import React, { Component } from "react";
import logo from "./logo.svg";
import "./App.css";
import Quote from "./Quote";
class App extends Component {
constructor(props) {
super(props);
this.state = {
working : true
};
}
handleClick = () => {
this.setState({ working: !this.state.working});
};
render() {
let newLogo = this.state.working ? "App-logo" : "App-logo-new";
let working = this.working;
return (
<div className="App">
<header className="App-header">
<img src={logo} className={newLogo} alt="logo" />
<h1 className="App-title">Homer! Are you working?</h1>
</header>
<button onClick={this.handleClick}
className={working}>Change the logo
</button>
<figure className={working} />
<Quote/>
</div>
);
}
}
export default App;
//id="App-logo-new"
//{working.toUpperCase()}
| db5e2a3caad9200ac3cc49f8d77cd46bcd4c0bc7 | [
"JavaScript"
] | 1 | JavaScript | SalvAna/state_React3 | b0f4c4105bc87fd5e22668a2b0e7ee21acb3b369 | 7ad89df05bc35b017ada7acce623a84808245251 | |
refs/heads/master | <repo_name>kmurata08/pubusub-sample<file_sep>/mailpubsub/requirements.txt
astroid==2.2.4
autopep8==1.4.3
cachetools==3.1.0
certifi==2018.11.29
chardet==3.0.4
Click==7.0
Flask==1.0.2
google-api-core==1.8.0
google-auth==1.6.3
google-cloud-pubsub==0.39.1
googleapis-common-protos==1.5.8
grpc-google-iam-v1==0.11.4
grpcio==1.19.0
idna==2.8
isort==4.3.12
itsdangerous==1.1.0
Jinja2==2.11.3
lazy-object-proxy==1.3.1
MarkupSafe==1.1.1
mccabe==0.6.1
protobuf==3.7.0
pyasn1==0.4.5
pyasn1-modules==0.2.4
pycodestyle==2.5.0
pylint==2.3.1
python-dotenv==0.10.1
pytz==2018.9
requests==2.21.0
rsa==4.7
six==1.12.0
typed-ast==1.3.2
urllib3==1.24.1
Werkzeug==0.15.3
wrapt==1.11.1
<file_sep>/mailpubsub/utils/sendgrid_utils.py
import sendgrid
from sendgrid.helpers.mail import Email, Content, Mail
from dotenv import load_dotenv
import os
import textwrap
def loadenv():
dotenv_path = os.path.join(os.path.dirname(
os.path.dirname(__file__)), '.env')
load_dotenv(dotenv_path)
def get_api_key():
loadenv()
return os.environ.get('SENDGRID_API_KEY')
def send_mail(to_email, subject, message):
sg = sendgrid.SendGridAPIClient(apikey=get_api_key())
content_message = get_content_message(to_email, message)
mail = Mail(
Email('<EMAIL>'),
subject,
Email(to_email),
Content('text/plain', content_message))
response = sg.client.mail.send.post(request_body=mail.get())
print(response.status_code)
if response.status_code >= 400:
return False
return True
def get_content_message(to_email, message):
content = textwrap.dedent('''
{email}様
フォームに入力いただきありがとうございます。
{email}様の一言は、{message}です。
''').format(email=to_email, message=message).strip()
return content
<file_sep>/mailpubsub/main.py
from flask import Flask, render_template, request, redirect, url_for
from utils.pubsub_utils import push_to_topic, pull_from_subscriber
from utils.sendgrid_utils import send_mail
import json
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/mail/push', methods=['POST'])
def mail_push():
push_data = {
'email': request.form['email'],
'subject': request.form['subject'],
'message': request.form['message']
}
push_msg = json.dumps(push_data)
# json形式でメッセージをPub/SubにPush
push_to_topic(push_msg)
return redirect(url_for('mail_push_done', message="mail jobを登録しました!"))
@app.route('/mail/push/done')
def mail_push_done():
message = request.args['message']
return render_template('notify.html', message=message)
@app.route('/mail/pull')
def mail_pull():
def pub_callback(msg):
data = json.loads(msg.data)
# SendGridからメール送信
send_mail(data['email'], data['subject'], data['message'])
msg.ack()
print('Pulled message.')
# Pub/Subからメッセージを引っ張ってきて、コールバック
pull_from_subscriber(pub_callback)
return redirect(url_for('index'))
if __name__ == '__main__':
app.debug = True
app.run(host='0.0.0.0')
<file_sep>/mailpubsub/utils/pubsub_utils.py
from google.cloud import pubsub_v1
from dotenv import load_dotenv
import os
def loadenv():
dotenv_path = os.path.join(os.path.dirname(
os.path.dirname(__file__)), '.env')
load_dotenv(dotenv_path)
def get_project_id():
loadenv()
return os.environ.get('PROJECT_ID')
def get_topic_name():
loadenv()
return os.environ.get('TOPIC_NAME')
def get_subscription_name():
loadenv()
return os.environ.get('SUBSCRIPTION_NAME')
def push_to_topic(msg):
project_id = get_project_id()
topic_name = get_topic_name()
publisher = pubsub_v1.PublisherClient()
topic_path = publisher.topic_path(project_id, topic_name)
data = msg.encode('utf-8')
publisher.publish(topic_path, data=data)
print('Published message.')
def pull_from_subscriber(callback):
project_id = get_project_id()
subscription_name = get_subscription_name()
subscriber = pubsub_v1.SubscriberClient()
subscription_path = subscriber.subscription_path(
project_id, subscription_name)
subscriber.subscribe(subscription_path, callback=callback)
| 4d98dd6a9f386f0d4fa8a5c331792cdce38d202b | [
"Python",
"Text"
] | 4 | Text | kmurata08/pubusub-sample | 1447dc9456ed51659d567761f120da72d876a18e | ea4d32a82f79e4be4c9ba0b4cd74b143a695080e | |
refs/heads/master | <file_sep>var request = require('request');
var querystring = require('querystring');
var fs = require('fs');
var cmd = require('commander');
var cheerio = require('cheerio');
var url = 'http://poe.trade/search';
var data = {
"league": "Perandus+Flashback",
"type": "",
"base": "",
"name": "WILL BE SET",
"dmg_min": "",
"dmg_max": "",
"aps_min": "",
"aps_max": "",
"crit_min": "",
"crit_max": "",
"dps_min": "",
"dps_max": "",
"edps_min": "",
"edps_max": "",
"pdps_min": "",
"pdps_max": "",
"armour_min": "",
"armour_max": "",
"evasion_min": "",
"evasion_max": "",
"shield_min": "",
"shield_max": "",
"block_min": "",
"block_max": "",
"sockets_min": "",
"sockets_max": "",
"link_min": "",
"link_max": "",
"sockets_r": "",
"sockets_g": "",
"sockets_b": "",
"sockets_w": "",
"linked_r": "",
"linked_g": "",
"linked_b": "",
"linked_w": "",
"rlevel_min": "",
"rlevel_max": "",
"rstr_min": "",
"rstr_max": "",
"rdex_min": "",
"rdex_max": "",
"rint_min": "",
"rint_max": "",
"mod_name": "",
"mod_min": "",
"mod_max": "",
"group_type": "And",
"group_min": "",
"group_max": "",
"group_count": "1",
"q_min": "",
"q_max": "",
"level_min": "",
"level_max": "",
"ilvl_min": "",
"ilvl_max": "",
"rarity": "",
"seller": "",
"thread": "",
"identified": "",
"corrupted": "",
"online": "x",
"buyout": "",
"altart": "",
"capquality": "x",
"buyout_min": "",
"buyout_max": "",
"buyout_currency": "",
"crafted": "",
"enchanted": ""
};
var userInput;
cmd
.version('beta')
.option('-i, --item [string]', 'Search item')
.parse(process.argv);
if (cmd.item) {
userInput = cmd.item.trim();
data.name = readyString(userInput);
console.log('Searching for ', data.name);
getSearchResults(data, function (link) {
request(link, grabTheMoney);
});
} else {
console.log("Input an item silly!!!");
return;
}
function getSearchResults (formData, fn) {
request({
headers: {
'User-Agent': 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Cookie': 'color=white;league=Perandus%20Flashback',
'Accept': '/',
'Connection': 'keep-alive',
'Content-Type': 'application/x-www-form-urlencoded',
'contentLength': querystring.stringify(data).length
},
uri: url,
body: stringifyData(formData),
method: 'POST'
}, function (err, httpResponse, data) {
handleData(data, fn)
});
}
function handleData (data, fn) {
var $ = cheerio.load(data);
fn($('a').attr('href'));
}
function stringifyData (data) {
var stringified = '';
Object.keys(data).forEach(function (key) {
stringified += key + '=' + data[key] + '&';
});
return stringified.substring(0, stringified.length - 1);
}
function capitalizeFirstLetter(string) {
return string.charAt(0).toUpperCase() + string.slice(1);
}
function readyString(str) {
var tokens = str.split(' ');
tokens.forEach(function (word,i) {
tokens[i] = capitalizeFirstLetter(word);
});
return tokens.join('+');
}
function grabTheMoney (err, httpResponse, data) {
var $ = cheerio.load(data);
var $elem = $('.currency').each(function (i, item) {
displayItem($(this));
});
}
function displayItem (item) {
var itemName = getItemName(item);
var currencyType = getCurrencyType(item);
var costAmount = item.text();
console.log(itemName + ' ' +costAmount + ' ' + currencyType);
}
function getItemName (item) {
var ctx = item.parentsUntil('.item');
return ctx.find('h5 a').first().text();
}
function getCurrencyType (item) {
var classes = item.attr('class');
return classes.split('currency-')[1].split(' ')[0];
}
/* http://currency.poe.trade/api-get-offers?
league=Perandus%20Flashback
var temp = 'league=Perandus+Flashback&type=&base=&name=Tabula+Rasa+Simple+Robe&dmg_min=&dmg_max=&aps_min=&aps_max=&crit_min=&crit_max=&dps_min=&dps_max=&edps_min=&edps_max=&pdps_min=&pdps_max=&armour_min=&armour_max=&evasion_min=&evasion_max=&shield_min=&shield_max=&block_min=&block_max=&sockets_min=&sockets_max=&link_min=&link_max=&sockets_r=&sockets_g=&sockets_b=&sockets_w=&linked_r=&linked_g=&linked_b=&linked_w=&rlevel_min=&rlevel_max=&rstr_min=&rstr_max=&rdex_min=&rdex_max=&rint_min=&rint_max=&mod_name=&mod_min=&mod_max=&group_type=And&group_min=&group_max=&group_count=1&q_min=&q_max=&level_min=&level_max=&ilvl_min=&ilvl_max=&rarity=&seller=&thread=&identified=&corrupted=&online=x&buyout=&altart=&capquality=x&buyout_min=&buyout_max=&buyout_currency=&crafted=&enchanted=';
"body: temp" in requestoptions
*/<file_sep># What is this?
This is a command line tool for fetching the prices of fictional items on a game called Path of Exile.
The tool uses node as a web scraper and uses http://poe.trade for its data.
## Usage
Windows: Execute `price.bat`
Other: `node price.js -i "item name"`
## Requirements
Made in JavaScript so it needs node and npm.
`npm intall` prior to using the tool.
# Why?
My friend wanted a quick way of looking up an item price from his desktop and this is the quickest way I could think of. He didn't mind installing node.
# Other stuff
Coded in spaghetti code, tightly coupled, very brittle and will likely fall over if any html changes on the server. | 6ade15be9b0bdb2d3675f4784c0597323a8325f6 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | AaronAcerboni/poe-item-prices | f3aae772819f93b46b6ee3ef43973cf0361b8eaa | ac1d5f51d552597f70c0f89c6f0584d13e4c2bd0 | |
refs/heads/master | <repo_name>TainaRegina/Netflix<file_sep>/CURSO ANDROID UDEMY/KotlinRecylcerView/app/src/main/java/com/example/kotlinrecylcerview/LiveAdapter.kt
package com.example.kotlinrecylcerview
import android.app.DownloadManager
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.recyclerview.widget.RecyclerView
import com.example.kotlinrecylcerview.models.Live
import kotlinx.android.synthetic.main.res_item_live.view.*
class LiveAdapter(private val onItemClick : (Live) -> Unit) : RecyclerView.Adapter<RecyclerView.ViewHolder>() {
private var items : List<Live> = ArrayList()
//cria o layout do xml para o código
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): RecyclerView.ViewHolder {
return LiveViewHolder(
LayoutInflater.from(parent.context).inflate(R.layout.res_item_live, parent, false)
)
}
// faz a ligação do xml com o recyclerview
override fun onBindViewHolder(holder: RecyclerView.ViewHolder, position: Int) {
when (holder){
is LiveViewHolder ->{
holder.bind(items[position], onItemClick)
}
}
}
//conta os items que possui na lista
override fun getItemCount(): Int {
return items.size
}
//pega os items da lista
fun setDataSet(lives : List<Live>) {
this.items = lives
}
class LiveViewHolder constructor(
itemView : View
): RecyclerView.ViewHolder(itemView){
private val liveTitle = itemView.textTitulo
private val autorLive = itemView.textAutor
private val banerLive = itemView.imageBaner
fun bind(live: Live, onItemClick: (Live) -> Unit) {
liveTitle.text = live.title
autorLive.text = live.autor
//PARA CHAMAR A URL DA IMAGEM
// val requestOptions = RequestOptions
// .placeholder(R.drawable.rocket)
// .error(R.drawable.rocket)
//
// Glide.with(itemView.context)
// .applyDefaultRequestoptions(requestOptions)
// .load(live.tumbnailURL)
// .into(banerLive)
//CHAMANDO O CLICK
itemView.setOnClickListener{
onItemClick(live)
}
}
}
}<file_sep>/aad-main/app/src/test/java/br/com/programadordeelite/gdc/codelab/debugging/calc/CalculatorTest.kt
package br.com.programadordeelite.gdc.codelab.debugging.calc
import org.hamcrest.CoreMatchers.`is`
import org.hamcrest.CoreMatchers.equalTo
import org.junit.Assert.*
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
// +-------------------------------------------------------------------------------------+
// | Caso vc tenha algum problema para rodar os testes este link pode ajudar a resolver |
// +-------------------------------------------------------------------------------------+
// https://stackoverflow.com/questions/47926382/how-to-configure-shorten-command-line-method-for-whole-project-in-intellij
@RunWith(JUnit4::class)
class CalculatorTest {
private lateinit var calculator: Calculator
@Before
fun setUp() {
calculator = Calculator()
}
@Test
fun add() = assertThat(calculator.add(1.0, 1.0), `is`(equalTo(2.0)))
@Test
fun sub() = assertThat(calculator.sub(1.0, 1.0), `is`(equalTo(0.0)))
@Test
fun div() = assertThat(calculator.div(1.0, 1.0), `is`(equalTo(1.0)))
@Test
fun mul() = assertThat(calculator.mul(2.0, 1.5), `is`(equalTo(3.0)))
@Test
fun add_one_positive_number_to_one_negative_number() = assertThat(calculator.add(-1.0, 2.0), `is`(equalTo(1.0)))
@Test
fun `add to floats with decimal values to each other` () = assertThat(calculator.add(1.111, 1.111), `is`(equalTo(2.222)))
@Test
fun subWorksWithNegativeResult() = assertThat(calculator.sub(1.0, 17.0), `is`(equalTo(-16.0)))
@Test
fun divTwoNumbersThroughZero_ShouldReturn_PositiveInfinity() = assertThat(calculator.div(32.0, 0.0), `is`(equalTo(Double.POSITIVE_INFINITY)))
@Test
fun divTwoNumbersZeroNegative() = assertThat(calculator.div(-32.0, 0.0), `is`(equalTo(Double.NEGATIVE_INFINITY)))
@Test
fun divTwoNumbersNegative() = assertThat(calculator.div(32.0, -2.0), `is`(equalTo(-16.0)))
// +--------------------------------------------------------------------------+
// | Boas práticas: separar testes em blocos de positivos e negativos |
// +--------------------------------------------------------------------------+
@Test
fun positive__divide_two_numbers_should_succeed() = assertThat(calculator.div(3.0, 2.0), `is`(equalTo(1.5)))
@Test
fun negative__divide_number_through_zero_should_return_infinity() = assertThat(calculator.div(10.0, 0.0), `is`(equalTo(Double.POSITIVE_INFINITY)))
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/core/workmanager/BlurViewModel.kt
package br.com.programadordeelite.gdc.codelab.core.workmanager
import android.app.Application
import android.net.Uri
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.LiveData
import androidx.work.*
// Modelo que retem as informações e faz a mediação entre workers e observa os estados
class BlurViewModel(application: Application) : AndroidViewModel(application) {
internal var imageUri: Uri? = null
internal var outputUri: Uri? = null
private val workManager = WorkManager.getInstance(application)
internal val outputWorkInfos: LiveData<List<WorkInfo>>
init {
// Adquire um live data do workmanager, para poder observar as mudanças de estado na view
outputWorkInfos = workManager.getWorkInfosByTagLiveData(TAG_OUTPUT)
}
internal fun cancelWork() = workManager.cancelUniqueWork(IMAGE_MANIPULATION_WORK_NAME)
// cria o input data para o worker
private fun createInputDataForUri(): Data {
val builder = Data.Builder()
imageUri?.let {
builder.putString(KEY_IMAGE_URI, imageUri.toString())
}
return builder.build()
}
/**
* Cria uma requisição de trabalho para liberar espaço, realizar o blur e salvar a imagem
* @param blurLevel nivel do blur como reflexo da seleção do usuario nos radio buttons
*/
internal fun applyBlur(blurLevel: Int) {
// 1) Adiciona o worker de cleanup para limpar arquivos temporarios
var continuation = workManager
.beginUniqueWork(
IMAGE_MANIPULATION_WORK_NAME,
ExistingWorkPolicy.REPLACE,
OneTimeWorkRequest.from(CleanupWorker::class.java)
)
// 2) adiciona tarefa de blur a quantidade de vezes requisitada
for (i in 0 until blurLevel) {
val blurBuilder = OneTimeWorkRequestBuilder<BlurWorker>()
// Input the Uri if this is the first blur operation
// After the first blur operation the input will be the output of previous
// blur operations.
if (i == 0) {
blurBuilder.setInputData(createInputDataForUri())
}
continuation = continuation.then(blurBuilder.build())
}
// Criar um pre-requisito(constraint) de carregamento de bateria
val constraints = Constraints.Builder()
.setRequiresCharging(true)
.build()
// 3) criar terceira tarefa agora para salvar o arquivo mas so quando estiver carregando
val save = OneTimeWorkRequestBuilder<SaveImageToFileWorker>()
.setConstraints(constraints)
.addTag(TAG_OUTPUT)
.setInputData(createInputDataForUri())
.build()
continuation = continuation.then(save)
// Por ultimo coloca tudo em uma fila sequencial e inicia o trabalho
continuation.enqueue()
}
private fun uriOrNull(uriString: String?): Uri? {
return if (!uriString.isNullOrEmpty()) Uri.parse(uriString) else null
}
internal fun setImageUri(uri: String?) {
imageUri = uriOrNull(uri)
}
internal fun setOutputUri(outputImageUri: String?) {
outputUri = uriOrNull(outputImageUri)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/tab/TabPagerAdapter.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.tab
import androidx.fragment.app.Fragment
import androidx.fragment.app.FragmentManager
import androidx.fragment.app.FragmentStatePagerAdapter
// OLD school implementation
class TabPagerAdapter(private val numOfTabs: Int, private val tabViewModel: TabViewModel, fm: FragmentManager) : FragmentStatePagerAdapter(fm) {
override fun getCount(): Int = numOfTabs
override fun getItem(position: Int): Fragment {
return when (position) {
0 -> TabOneFragment(tabViewModel)
1 -> TabTwoFragment(tabViewModel)
2 -> TabThreeFragment(tabViewModel)
else -> TabOneFragment(tabViewModel)
}
}
override fun getPageTitle(position: Int): CharSequence {
return "OBJECT ${(position + 1)}"
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/debugging/calc/CalcFragment.kt
package br.com.programadordeelite.gdc.codelab.debugging.calc
import android.os.Bundle
import android.util.Log
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentCalcBinding
import timber.log.Timber
import java.lang.IllegalArgumentException
// https://developer.android.com/studio/command-line/adb.html
class CalcFragment : Fragment(R.layout.fragment_calc) {
private val calculator by lazy { Calculator() }
private lateinit var binding: FragmentCalcBinding
private val logTag by lazy { CalcFragment::class.java.simpleName }
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentCalcBinding.bind(view)
binding.operationAddBtn.setOnClickListener { compute(Calculator.Operator.ADD) }
binding.operationSubBtn.setOnClickListener { compute(Calculator.Operator.SUB) }
binding.operationDivBtn.setOnClickListener { compute(Calculator.Operator.DIV) }
binding.operationMulBtn.setOnClickListener { compute(Calculator.Operator.MUL) }
}
private fun compute(operation: Calculator.Operator) {
try {
val operandOne = binding.operandOneEditText.text.toString().toDouble()
val operandTwo = binding.operandTwoEditText.text.toString().toDouble()
binding.operationResultTextView.setText(
when (operation) {
Calculator.Operator.ADD -> calculator.add(operandOne, operandTwo).toString()
Calculator.Operator.SUB -> calculator.sub(operandOne, operandTwo).toString()
Calculator.Operator.MUL -> calculator.mul(operandOne, operandTwo).toString()
Calculator.Operator.DIV -> try {
val division = calculator.div(operandOne, operandTwo)
if(division.isFinite()) division.toString() else throw IllegalArgumentException("Illegal Division")
} catch (iae: IllegalArgumentException) {
Log.e(logTag, "IllegalArgumentException", iae) // para exemplificar o uso de Log
getString(R.string.computationError)
}
}
)
} catch (nfe: NumberFormatException) {
Timber.e(nfe, "NumberFormatException") // para exemplificar o uso do timber
binding.operationResultTextView.setText(getString(R.string.computationError))
return
}
}
}<file_sep>/Firebase-Authentication-Android-aula-3/app/src/main/java/br/com/alura/aluraesporte/repository/FirebaseAuthRepository.kt
package br.com.alura.aluraesporte.repository
import android.util.Log
import androidx.lifecycle.LiveData
import androidx.lifecycle.MutableLiveData
import com.google.firebase.auth.*
import java.lang.IllegalArgumentException
private const val TAG = "FirebaseAuthRepository"
class FirebaseAuthRepository(private val firebaseAuth: FirebaseAuth) {
private fun desloga(firebaseAuth: FirebaseAuth) {
firebaseAuth.signOut()
}
private fun verificaUsuario(firebaseAuth: FirebaseAuth) {
val usuarioFirebase: FirebaseUser? = firebaseAuth.currentUser
if (usuarioFirebase != null) {
} else {
}
}
private fun autenticaUsuario(firebaseAuth: FirebaseAuth) {
firebaseAuth.signInWithEmailAndPassword("<EMAIL>", "<PASSWORD>")
.addOnSuccessListener {
}.addOnFailureListener {
}
}
fun cadastra(email: String, senha: String): LiveData<Resource<Boolean>> {
val liveData = MutableLiveData<Resource<Boolean>>()
try {
val tarefa =
firebaseAuth.createUserWithEmailAndPassword(email, senha)
tarefa.addOnSuccessListener {
Log.i(TAG, "cadastra: cadastro sucedido")
liveData.value = Resource(true)
}
tarefa.addOnFailureListener {exception ->
Log.e(TAG, "cadastra: cadastro falhou", exception)
val mensagemErro: String = when(exception){
is FirebaseAuthWeakPasswordException -> "Senha precisa de pelo menos 6 dígitos"
is FirebaseAuthInvalidCredentialsException -> "E-mail inválido"
is FirebaseAuthUserCollisionException -> "E-mail já cadastrado"
else -> "Erro desconhecido"
}
liveData.value = Resource(false, mensagemErro)
}
} catch (e: IllegalArgumentException) {
liveData.value = Resource(false, "E-mail ou senha não ser vazio")
}
return liveData
}
}<file_sep>/AppContatos/app/src/main/java/com/example/appcontatos/MainActivity.kt
package com.example.appcontatos
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.view.Menu
import android.view.MenuItem
import android.widget.Adapter
import androidx.constraintlayout.widget.ConstraintLayout
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
class MainActivity : AppCompatActivity() {
private lateinit var recyclerView: RecyclerView
private val contatoAdapter = ContatoAdapter()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
recyclerView = findViewById(R.id.recyclerView)
recyclerView.layoutManager = LinearLayoutManager(this)
recyclerView.adapter = contatoAdapter
val contatos = listOf(
Contato("Contato 1", "99999999", "email"),
Contato("Contato 2", "99999993", "email"),
Contato("Contato 3", "99999997", "email"),
Contato("Contato 4", "99999995", "email"),
Contato("Contato 5", "99999966", "email")
)
contatoAdapter.upDateItems(contatos)
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
menuInflater.inflate(R.menu.main_menu, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
when (item.itemId) {
R.id.menu_cadastrar -> {
val intent = Intent(this, CadastroActivity::class.java )
startActivity(intent)
true
}
else -> {
false
}
}
return super.onOptionsItemSelected(item)
}
}<file_sep>/white-label-firebase-tutorial-master/app/src/main/java/br/com/douglasmotta/whitelabeltutorial/domain/usecase/GetProductsUseCase.kt
package br.com.douglasmotta.whitelabeltutorial.domain.usecase
import br.com.douglasmotta.whitelabeltutorial.domain.model.Product
interface GetProductsUseCase {
suspend operator fun invoke(): List<Product>
}<file_sep>/Firebase-Authentication-Android-aula-3/app/src/main/java/br/com/alura/aluraesporte/ui/fragment/CadastroUsuarioFragment.kt
package br.com.alura.aluraesporte.ui.fragment
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.navigation.fragment.findNavController
import br.com.alura.aluraesporte.R
import br.com.alura.aluraesporte.ui.viewmodel.CadastroUsuarioViewModel
import br.com.alura.aluraesporte.ui.viewmodel.ComponentesVisuais
import br.com.alura.aluraesporte.ui.viewmodel.EstadoAppViewModel
import com.google.android.material.snackbar.Snackbar
import kotlinx.android.synthetic.main.cadastro_usuario.*
import org.koin.android.viewmodel.ext.android.sharedViewModel
import org.koin.android.viewmodel.ext.android.viewModel
class CadastroUsuarioFragment : Fragment() {
private val controlador by lazy {
findNavController()
}
private val estadoAppViewModel: EstadoAppViewModel by sharedViewModel()
private val viewModel: CadastroUsuarioViewModel by viewModel()
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
return inflater.inflate(
R.layout.cadastro_usuario,
container,
false
)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
estadoAppViewModel.temComponentes = ComponentesVisuais()
cadastro_usuario_botao_cadastrar.setOnClickListener {
cadastro_usuario_email.error = null
cadastro_usuario_senha.error = null
cadastro_usuario_confirma_senha.error = null
val email = cadastro_usuario_email.editText?.text.toString()
val senha = cadastro_usuario_senha.editText?.text.toString()
val confirmaSenha = cadastro_usuario_confirma_senha.editText?.text.toString()
var valido = true
if(email.isBlank()){
cadastro_usuario_email.error = "E-mail é necessário"
valido = false
}
if(senha.isBlank()){
cadastro_usuario_senha.error = "Senha é necessária"
valido = false
}
if(senha != confirmaSenha){
cadastro_usuario_confirma_senha.error = "Senhas diferentes"
valido = false
}
if(valido){
viewModel.cadastra(email, senha).observe(viewLifecycleOwner, Observer {
it?.let {recurso ->
if(recurso.dado){
Snackbar.make(
view,
"Cadastro realizado com sucesso",
Snackbar.LENGTH_SHORT
).show()
controlador.popBackStack()
} else {
val mensagemErro = recurso.erro ?: "Ocorreu uma falha no cadastro"
Snackbar.make(
view,
mensagemErro,
Snackbar.LENGTH_SHORT
).show()
}
}
})
}
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/tab/TabHostFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.tab
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.fragment.app.viewModels
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentTabHostBinding
import com.google.android.material.tabs.TabLayoutMediator
// See doc: https://developer.android.com/training/animation/vp2-migration
// See doc: https://developer.android.com/topic/libraries/architecture/livedata
class TabHostFragment : Fragment(R.layout.fragment_tab_host) {
private lateinit var binding: FragmentTabHostBinding
private val viewModel: TabViewModel by viewModels()
// Outra maneira de definir o view model
// private lateinit var viewModel: TabViewModel
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
binding = FragmentTabHostBinding.bind(view)
// outra maneira de inicializar o view model
// viewModel = ViewModelProvider(this).get(TabViewModel::class.java)
// setup tabs
val tabLayout = binding.tabLayout
val titles = arrayOf(R.string.tab_label1, R.string.tab_label2, R.string.tab_label3)
val pager = binding.pager
pager.adapter = TabPagerAdapter2(titles.size, this)
TabLayoutMediator(tabLayout, pager) { tab, position ->
tab.text = getString(titles[position])
}.attach()
// Old School way before ViewPager2
// tabLayout.tabGravity = TabLayout.GRAVITY_FILL
// tabLayout.addTab(tabLayout.newTab().setText(R.string.tab_label1))
// tabLayout.addTab(tabLayout.newTab().setText(R.string.tab_label2))
// tabLayout.addTab(tabLayout.newTab().setText(R.string.tab_label3))
//
//
// pager.adapter = TabPagerAdapter(tabLayout.tabCount, viewModel, childFragmentManager)
// pager.addOnPageChangeListener(TabLayout.TabLayoutOnPageChangeListener(tabLayout))
// tabLayout.addOnTabSelectedListener(object : TabLayout.OnTabSelectedListener {
// override fun onTabSelected(tab: TabLayout.Tab?) {
// tab?.let { pager.currentItem = it.position }
// }
// override fun onTabUnselected(tab: TabLayout.Tab?) = Unit
// override fun onTabReselected(tab: TabLayout.Tab?) = Unit
// })
// tabLayout.setupWithViewPager(pager)
}
fun getTabViewModel(): TabViewModel = viewModel
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/MainFragment.kt
package br.com.programadordeelite.gdc
import android.os.Bundle
import android.view.View
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.codelab.util.toast
import br.com.programadordeelite.gdc.databinding.FragmentMainBinding
/** Main Menu Study Guide */
class MainFragment : androidx.fragment.app.Fragment(R.layout.fragment_main) {
private lateinit var binding: FragmentMainBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
// +-------------------------------------------------------------------------------------+
// | Orientação do app: Portrait(em pé) or Landscape(deitado) or Unspecified (os dois) |
// +-------------------------------------------------------------------------------------+
// if you not define it in the manifest, you could also do it that way programmatically
//requireActivity().requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
binding = FragmentMainBinding.bind(view)
binding.codelabToastSnake.setOnClickListener { navTo(R.id.toastSnakeFragment) }
binding.codelabNotification.setOnClickListener { navTo(R.id.notificationFragment) }
binding.codelabWorkManager.setOnClickListener { navTo(R.id.selectImageFragment) }
binding.codelabMaterialComponents.setOnClickListener { navTo(R.id.materialComponentsFragment) }
binding.codelabInteractiveUi.setOnClickListener { navTo(R.id.interactiveUiFragment) }
binding.codelabActivitiesIntents.setOnClickListener { navTo(R.id.sendFragment) }
binding.codelabRecyclerviewWithPaging.setOnClickListener{ navTo(R.id.recyclerViewPagingFragment) }
binding.codelabAppSettings.setOnClickListener{ navTo(R.id.appSettingsFragment) }
binding.codelabRecyclerview.setOnClickListener { navTo(R.id.recyclerViewFragment) }
binding.codelabAccessibility.setOnClickListener { navTo(R.id.accessibilityFragment) }
binding.codelabCreateCustomView.setOnClickListener { navTo(R.id.customViewFragment) }
binding.codelabUserNavigationTab.setOnClickListener { navTo(R.id.tabHostFragment) }
binding.codelabUserNavigationDrawer.setOnClickListener { navTo(R.id.drawerFragment) }
binding.codelabMenuPickers.setOnClickListener { navTo(R.id.menuFragment) }
binding.codelabThemesTouches.setOnClickListener { navTo(R.id.themeFragment) }
binding.codelabThemesTouchesBaterry.setOnClickListener { navTo(R.id.batteryFragment) }
binding.codelabRoomWithView.setOnClickListener { navTo(R.id.wordFragment) }
binding.codelabCustomTheme.setOnClickListener { toast(getString(R.string.duplicated_dynamic, "Day / Night")) }
binding.codelabDrawableStyleThemes.setOnClickListener { toast(getString(R.string.duplicated_dynamic, "Day / Night")) }
binding.codelabRoomLivedataViewmodel.setOnClickListener { toast(getString(R.string.duplicated)) }
binding.codelabRepository.setOnClickListener { toast(getString(R.string.duplicated)) }
binding.codelabSharedPreferences.setOnClickListener { navTo(R.id.sharedPrefFragment) }
binding.codelabAnimPulseExplode.setOnClickListener { navTo(R.id.pulseExplodeFragment) }
binding.codelabViewmodelViewmodelfactory.setOnClickListener { navTo(R.id.titleFragment) }
binding.codelabAndroidStudioDebugger.setOnClickListener { navTo(R.id.calcFragment) }
binding.codelabAddLogStatements.setOnClickListener { toast(getString(R.string.duplicated_dynamic, "Debugger")) }
// +-----------------------------------------------------------------+
// | Codelab: Navegação entre activities (no fragments for show case) |
// +-----------------------------------------------------------------+
binding.codelabUserNavigation.setOnClickListener { navTo(R.id.showTabsActivity) }
// +-----------------------------------------------------------------+
// | Espresso Cheat Sheet (spike muito bom para quando for testar) |
// +-----------------------------------------------------------------+
// https://android.github.io/android-test/downloads/espresso-cheat-sheet-2.1.0.pdf
// Melhores práticas de teste >> https://developer.android.com/training/testing
binding.codelabUnitTests.setOnClickListener { toast("Veja >> Pacote: test") }
binding.codelabTestingJunitMockito.setOnClickListener { toast("Veja >> Pacote: androidTest") }
binding.codelabEspressoUiTesting.setOnClickListener { toast("Veja >> Pacote: AndroidTest") }
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/roomwithview/WordFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.roomwithview
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.fragment.app.setFragmentResultListener
import androidx.fragment.app.viewModels
import androidx.navigation.fragment.findNavController
import androidx.recyclerview.widget.LinearLayoutManager
import br.com.programadordeelite.gdc.MainApplication
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.datamanagement.roomwithview.NewWordFragment.Companion.BUNDLE_KEY_WORD
import br.com.programadordeelite.gdc.codelab.datamanagement.roomwithview.NewWordFragment.Companion.BUNDLE_REQUEST_KEY
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.databinding.FragmentWordBinding
// +-----------------------------------------------------------------+
// | TELA QUE EXIBIRA AS PALAVRAS SALVAS EM NOSSO BANCO DE DADOS |
// +-----------------------------------------------------------------+
class WordFragment : Fragment(R.layout.fragment_word) {
private lateinit var binding: FragmentWordBinding
// OBTER VIEW MODEL ATRAVÉS DE EXTENCÕES DE KOTLIN
private val wordViewModel: WordViewModel by viewModels {
// FACTORY CRIA NOSSO MODELO INJETANDO O REPOSITÓRIO QUE CRIAMOS
WordViewModelFactory((requireActivity().application as MainApplication).repository)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentWordBinding.bind(view)
val adapter = WordListAdapter()
binding.recyclerview.adapter = adapter
// PODE SER DEFINIDO NO XML TAMBEM SE PREFERIR, MAS QUERIA TE MOSTAR MAIS UMA FORMA DE FAZER
binding.recyclerview.layoutManager = LinearLayoutManager(requireContext())
binding.fab.setOnClickListener { navTo(R.id.newWordFragment) }
wordViewModel.allWords.observe(requireActivity(), { words ->
// Atualize a cópia em cache das palavras no adaptador.
words?.let { adapter.submitList(it) }
})
// DEFINIR O QUE SERA TETORNADO COMO PARAMETRO, QUANDO ESTA TELA FOR FECHADA
setFragmentResultListener(BUNDLE_REQUEST_KEY) { _, bundle ->
val result = bundle.getString(BUNDLE_KEY_WORD, "vazio")
wordViewModel.insert(Word(result))
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/materialcomponents/ProductGridFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.materialcomponents
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentProductGridBinding
class ProductGridFragment : Fragment(R.layout.fragment_product_grid) {
private lateinit var binding: FragmentProductGridBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
}
}<file_sep>/white-label-firebase-tutorial-master/app/src/main/java/br/com/douglasmotta/whitelabeltutorial/config/di/ConfigModule.kt
package br.com.douglasmotta.whitelabeltutorial.config.di
import br.com.douglasmotta.whitelabeltutorial.config.Config
import br.com.douglasmotta.whitelabeltutorial.config.ConfigImpl
import dagger.Binds
import dagger.Module
import dagger.hilt.InstallIn
import dagger.hilt.android.components.ViewModelComponent
@Module
@InstallIn(ViewModelComponent::class)
interface ConfigModule {
@Binds
fun bindConfig(config: ConfigImpl): Config
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/core/workmanager/BlurWorker.kt
package br.com.programadordeelite.gdc.codelab.core.workmanager
import android.content.Context
import android.graphics.BitmapFactory
import android.graphics.ImageDecoder
import android.net.Uri
import android.os.Build
import android.provider.MediaStore
import androidx.annotation.RequiresApi
import androidx.work.Worker
import androidx.work.WorkerParameters
import br.com.programadordeelite.gdc.R
import timber.log.Timber
// worker que vai aplicar o blur
class BlurWorker(private val ctx: Context, params: WorkerParameters) : Worker(ctx, params) {
@RequiresApi(Build.VERSION_CODES.P)
override fun doWork(): Result {
val appContext = applicationContext
makeStatusNotification("Blurring image", appContext)
return try {
val resourceUri = inputData.getString(KEY_IMAGE_URI)
val pic = MediaStore.Images.Media.getBitmap(ctx.contentResolver, Uri.parse(resourceUri))
val output = blurBitmap(pic, appContext)
// Write bitmap to a temp file
val outputUri = writeBitmapToFile(appContext, output)
makeStatusNotification("Output is $outputUri", appContext)
Result.success()
} catch (throwable: Throwable) {
Timber.e(throwable, "Error applying blur")
Result.failure()
}
}
}<file_sep>/aad-main/app/build.gradle
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-android-extensions'
// ATENCAO, NAO FUNCIONA, POR ISSO, JOGA EXCECAO:
// Class 'ActionGameToScore' is not abstract and does not implement abstract member public abstract val actionId: Int defined in androidx.navigation.NavDirections
//apply plugin: 'androidx.navigation.safeargs.kotlin' // Codelabs -> ViewModel and ViewModelFactory
// POR ISSO OPTEI POR ESSA SOLUCAO, ATE ELES ENTREGAREM UM FIX
apply plugin: 'androidx.navigation.safeargs' // Codelabs -> ViewModel and ViewModelFactory
// +-----------------------------------------------------------------+
// | Annotation processor for Room |
// +-----------------------------------------------------------------+
apply plugin: 'kotlin-kapt'
android {
compileSdkVersion 31
buildToolsVersion "29.0.3"
defaultConfig {
applicationId "br.com.programadordeelite.gdc"
minSdkVersion 23
targetSdkVersion 31
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
// +-----------------------------------------------------------------+
// | Workmanager |
// +-----------------------------------------------------------------+
renderscriptTargetApi 11
renderscriptSupportModeEnabled true
vectorDrawables {
useSupportLibrary true
}
// The following argument makes the Android Test Orchestrator run its
// "pm clear" command after each test invocation. This command ensures
// that the app's state is completely cleared between tests.
testInstrumentationRunnerArguments clearPackageData: 'true'
}
signingConfigs {
release {
storeFile file("release-key.keystore")
storePassword '<PASSWORD>'
keyAlias 'alias'
keyPassword '<PASSWORD>'
}
}
// +-----------------------------------------------------------------+
// | Build types you'll encounter in every projects (Build Variants) |
// +-----------------------------------------------------------------+
buildTypes {
release {
signingConfig signingConfigs.release
minifyEnabled true
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
// debug don't need to be specified, only if you want to change something
debug {
pseudoLocalesEnabled true // para exibir idiomas fakes no seu dispositivo
minifyEnabled false // para nao compactar o código durante desenvolvimento
debuggable true // para poder usar break points e depurar valores
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
/**
* The `initWith` property allows you to copy configurations from other build types,
* then configure only the settings you want to change. This one copies the debug build
* type, and then changes the manifest placeholder and application ID.
*/
staging {
initWith debug
manifestPlaceholders = [hostName:"internal.softsuit.com"]
applicationIdSuffix ".debugStaging"
}
}
// +-----------------------------------------------------------------+
// | Java Compile Options |
// +-----------------------------------------------------------------+
compileOptions {
sourceCompatibility JavaVersion.VERSION_11
targetCompatibility JavaVersion.VERSION_11
}
kotlinOptions {
jvmTarget = "1.8"
}
// +-----------------------------------------------------------------+
// | exclude atomic functions module from package prevent warnings |
// +-----------------------------------------------------------------+
packagingOptions {
exclude 'META-INF/atomicfu.kotlin_module'
}
// +-----------------------------------------------------------------+
// | Enables view binding |
// +-----------------------------------------------------------------+
buildFeatures {
viewBinding true
dataBinding true
}
useLibrary("android.test.runner")
}
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation "org.jetbrains.kotlin:kotlin-stdlib:1.5.10"
implementation 'androidx.core:core-ktx:1.6.0'
implementation 'androidx.appcompat:appcompat:1.3.1'
// +-----------------------------------------------------------------+
// | Layouting |
// +-----------------------------------------------------------------+
implementation 'androidx.constraintlayout:constraintlayout:2.1.0'
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
// +-----------------------------------------------------------------+
// | Workmanager |
// +-----------------------------------------------------------------+
implementation "androidx.work:work-runtime-ktx:2.6.0-rc01"
implementation "com.github.bumptech.glide:glide:4.10.0"
implementation "com.jakewharton.timber:timber:4.7.1"
implementation "androidx.lifecycle:lifecycle-extensions:2.2.0"
// +-----------------------------------------------------------------+
// | com.android... are old, using material instead |
// +-----------------------------------------------------------------+
// implementation "com.android.support:design:26.1.0"
// +-----------------------------------------------------------------+
// | TabExperience - UPDATE |
// +-----------------------------------------------------------------+
implementation "androidx.viewpager2:viewpager2:1.0.0"
// +-----------------------------------------------------------------+
// | Material Components |
// +-----------------------------------------------------------------+
implementation 'com.google.android.material:material:1.5.0-alpha02'
implementation 'com.android.volley:volley:1.2.0'
implementation 'com.google.code.gson:gson:2.8.6'
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.5.10"
implementation 'androidx.viewpager2:viewpager2:1.0.0'
androidTestImplementation 'androidx.test:core:1.4.0'
androidTestImplementation 'androidx.test:runner:1.4.0'
// +-----------------------------------------------------------------+
// | startFragmentForResult (setFragmentResultListener) |
// +-----------------------------------------------------------------+
implementation "androidx.fragment:fragment-ktx:1.3.6"
// +-----------------------------------------------------------------+
// | Codelab App Settings |
// +-----------------------------------------------------------------+
implementation 'androidx.preference:preference-ktx:1.1.1'
// +-----------------------------------------------------------------+
// | Room with view |
// +-----------------------------------------------------------------+
implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:2.3.1"
implementation "androidx.lifecycle:lifecycle-livedata-ktx:2.3.1"
implementation "androidx.lifecycle:lifecycle-common-java8:2.3.1"
implementation "androidx.lifecycle:lifecycle-runtime-ktx:2.3.1"
// +-----------------------------------------------------------------+
// | Coroutines |
// +-----------------------------------------------------------------+
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.5.0"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:1.5.0"
implementation "androidx.room:room-runtime:2.3.0"
implementation "androidx.room:room-ktx:2.3.0"
kapt "androidx.room:room-compiler:2.3.0"
implementation "androidx.activity:activity-ktx:1.3.1"
// +-----------------------------------------------------------------+
// | Paging 3 Library |
// +-----------------------------------------------------------------+
implementation("androidx.paging:paging-runtime-ktx:3.0.1")
// alternatively - without Android dependencies for tests
//testImplementation("androidx.paging:paging-common-ktx:3.0.0")
// optional - RxJava2 support
//implementation("androidx.paging:paging-rxjava2-ktx:3.0.0")
// optional - RxJava3 support
//implementation("androidx.paging:paging-rxjava3:3.0.0")
// optional - Guava ListenableFuture support
//implementation("androidx.paging:paging-guava:3.0.0")
// +-----------------------------------------------------------------+
// | Retrofit - Paging 3 Library |
// +-----------------------------------------------------------------+
implementation "com.squareup.retrofit2:retrofit:2.9.0"
implementation "com.squareup.retrofit2:converter-gson:2.9.0"
implementation "com.squareup.retrofit2:retrofit-mock:2.9.0"
implementation "com.squareup.okhttp3:logging-interceptor:4.9.0"
// +-----------------------------------------------------------------+
// | Accessibility |
// +-----------------------------------------------------------------+
androidTestImplementation "androidx.test.espresso:espresso-accessibility:3.4.0"
androidTestImplementation "androidx.test.espresso:espresso-intents:3.4.0" // deprecated
androidTestImplementation "androidx.test:core-ktx:1.4.0" // use this instead now
androidTestImplementation "androidx.test.ext:junit-ktx:1.1.3" // use this instead now
// +-----------------------------------------------------------------+
// | Navigation |
// +-----------------------------------------------------------------+
def nav_version = "2.3.5"
// Kotlin
implementation "androidx.navigation:navigation-fragment-ktx:$nav_version" //Codelabs -> ViewModel and ViewModelFactory
implementation "androidx.navigation:navigation-ui-ktx:$nav_version" //Codelabs -> ViewModel and ViewModelFactory
// Feature module Support
implementation "androidx.navigation:navigation-dynamic-features-fragment:$nav_version"
// Jetpack Compose Integration
implementation "androidx.navigation:navigation-compose:2.4.0-alpha07"
// +-----------------------------------------------------------------+
// | Testing deps |
// +-----------------------------------------------------------------+
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation "androidx.room:room-testing:2.3.0"
androidTestImplementation "androidx.arch.core:core-testing:2.1.0"
androidTestImplementation ("androidx.test.espresso:espresso-core:3.4.0", {
exclude group: 'com.android.support', module: 'support-annotations'
})
androidTestImplementation "androidx.test.ext:junit:1.1.3"
// Testing Navigation
androidTestImplementation "androidx.navigation:navigation-testing:$nav_version"
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/sharedprefs/SharedPrefFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.sharedprefs
import android.content.Context.MODE_PRIVATE
import android.content.SharedPreferences
import android.os.Bundle
import android.view.View
import androidx.annotation.ColorRes
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentSharedPrefBinding
class SharedPrefFragment : Fragment(R.layout.fragment_shared_pref) {
companion object {
const val COUNT_KEY = "COUNT_KEY"
const val COLOR_KEY = "COLOR_KEY"
const val SHARED_PREFS = "com.softsuit.codelab.datamanagement.sharedprefs"
}
private var mCount = 0
@ColorRes private var mColor = R.color.black_text_color
private val mPreferences: SharedPreferences by lazy { requireActivity().getSharedPreferences(SHARED_PREFS, MODE_PRIVATE) }
private lateinit var binding: FragmentSharedPrefBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentSharedPrefBinding.bind(view)
mPreferences.getInt(COUNT_KEY, 0).let { mCount = it }
mPreferences.getInt(COLOR_KEY, 0).let { mColor = it }
if (mCount != 0) binding.countText.text = String.format("%s", mCount)
if (mColor != 0) binding.countText.setTextColor(getColorFromResourceId(mColor))
binding.reset.setOnClickListener { reset() }
binding.blackButton.setOnClickListener {
mColor = R.color.design_default_color_on_secondary
updateUi(R.color.design_default_color_on_secondary)
}
binding.greenButton.setOnClickListener {
mColor = R.color.design_default_color_secondary
updateUi(R.color.design_default_color_secondary)
}
binding.blueButton.setOnClickListener {
mColor = R.color.titleColor
updateUi(R.color.titleColor)
}
binding.redButton.setOnClickListener {
mColor = R.color.design_default_color_error
updateUi(R.color.design_default_color_error)
}
binding.count.setOnClickListener {
binding.countText.text = (++mCount).toString()
}
}
private fun updateUi(@ColorRes colorId: Int) {
binding.countText.setTextColor(getColorFromResourceId(colorId))
}
override fun onPause() {
super.onPause()
// NA HORA DO "APERREIO" DA PROVA, DA UM SWITCH PRA CÁ, LEMBRA E VOLTA! :)
mPreferences.let {
val editor = it.edit()
editor.putInt(COUNT_KEY, mCount)
editor.putInt(COLOR_KEY, mColor)
editor.apply()
}
}
private fun reset() {
mCount = 0
binding.countText.text = String.format("%s", mCount)
mColor = android.R.color.white
binding.countText.setBackgroundColor(getColorFromResourceId(mColor))
// NA HORA DO "APERREIO" DA PROVA, DA UM SWITCH PRA CÁ, LEMBRA E VOLTA! :)
mPreferences.let {
val editor = it.edit()
editor.clear()
editor.apply()
}
}
private fun getColorFromResourceId(@ColorRes colorId: Int) : Int {
return if(colorId != 0) requireContext().getColor(colorId) else R.color.black_text_color
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/menu/OrderFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.menu
import android.os.Bundle
import android.view.ContextMenu
import android.view.MenuItem
import android.view.View
import androidx.appcompat.app.AlertDialog
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.snake
import br.com.programadordeelite.gdc.codelab.util.toast
import br.com.programadordeelite.gdc.databinding.FragmentOrderBinding
class OrderFragment : Fragment(R.layout.fragment_order) {
private lateinit var binding: FragmentOrderBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentOrderBinding.bind(view)
setHasOptionsMenu(true)
registerForContextMenu(binding.title)
binding.alertButton.setOnClickListener {
AlertDialog
.Builder(requireContext())
.setTitle("Alerta")
.setMessage("Olá!")
.setPositiveButton("OK") { _, _ -> toast("Pressed OK") }
.setNegativeButton("Cancel") { _, _ -> snake(it,"Pressed Cancel") }
.show()
}
binding.datePickerButton.setOnClickListener {
DatePickerFragment { result -> binding.selectedDateText.text = result }
.show(childFragmentManager,"datePicker")
}
binding.timePickerButton.setOnClickListener {
TimePickerFragment { result -> binding.selectedTimeText.text = result }
.show(childFragmentManager,"timePicker")
}
}
override fun onCreateContextMenu(
menu: ContextMenu,
v: View,
menuInfo: ContextMenu.ContextMenuInfo?
) {
super.onCreateContextMenu(menu, v, menuInfo)
requireActivity().menuInflater.inflate(R.menu.menu_main, menu)
}
override fun onContextItemSelected(item: MenuItem): Boolean {
when (item.itemId) {
R.id.action_status -> {
toast(getString(R.string.action_status_message))
return true
}
R.id.action_favorites -> {
toast(getString(R.string.action_favorites_message))
return true
}
R.id.action_contact -> {
toast(getString(R.string.action_contact_message))
return true
}
else -> Unit
}
return super.onContextItemSelected(item)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/animation/PulseExplodeFragment.kt
package br.com.programadordeelite.gdc.codelab.animation
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.codelab.util.setVisible
import br.com.programadordeelite.gdc.databinding.FragmentPulseExplodeBinding
class PulseExplodeFragment : Fragment(R.layout.fragment_pulse_explode) {
private lateinit var binding: FragmentPulseExplodeBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentPulseExplodeBinding.bind(view)
// ==================================================
// PULSE EXPLODE ANIMATION -------------------
// ==================================================
startPulseAnim(binding.buttonPulseBackgroundAnimation)
binding.buttonPulse.setOnClickListener {
binding.buttonPulse.setVisible(false)
startExplodeAnim(binding.buttonPulseBackgroundAnimation, ::showNextScreen)
}
}
private fun showNextScreen() {
navTo(R.id.pulseExplodeDestinationFragment_with_custom_animation)
}
private fun startPulseAnim(view: View) = scaleDown(view)
private fun scaleUp(view: View) {
view.animate()
.scaleX(SCALE_UP_VALUE)
.scaleY(SCALE_UP_VALUE)
.setDuration(PULSE_DURATION)
.withEndAction { kotlin.run { scaleDown(view) } }
}
private fun scaleDown(view: View) {
view.animate()
.scaleX(SCALE_DOWN_VALUE)
.scaleY(SCALE_DOWN_VALUE)
.setDuration(PULSE_DURATION)
.withEndAction { kotlin.run { scaleUp(view) } }
}
private fun startExplodeAnim(view: View, endAction: () -> Unit) = shrink(view, endAction)
private fun shrink(view: View, endAction: () -> Unit) {
// if any previous animation is running, make sure it is canceled first
view.animation?.let {
it.cancel()
it.reset()
}
view.animate()
.scaleX(SHRINK_VALUE)
.scaleY(SHRINK_VALUE)
.setDuration(SHRINK_DURATION)
.withEndAction { kotlin.run { explode(view, endAction) } }
}
private fun explode(view: View, endAction: () -> Unit) {
view.animate()
.scaleX(EXPLODE_VALUE)
.scaleY(EXPLODE_VALUE)
.setDuration(EXPLODE_DURATION)
.withEndAction { kotlin.run { endAction() } }
}
companion object AnimProps {
private const val PULSE_DURATION = 1500L
private const val SHRINK_DURATION = 600L
private const val EXPLODE_DURATION = 300L
private const val SCALE_DOWN_VALUE = 0.9f
private const val SCALE_UP_VALUE = 1f
private const val SHRINK_VALUE = 0.3f
private const val EXPLODE_VALUE = 4f
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/util/SharedPrefExt.kt
package br.com.programadordeelite.gdc.codelab.util
import android.content.Context
import androidx.annotation.StyleRes
const val GLOBAL_SHARED_PREFS = "br.com.programadordeelite.global.sharedprefs"
const val GLOBAL_APP_STYLE = "GLOBAL_APP_STYLE"
fun Context.putStyleIntSharedPref(key:String, @StyleRes value:Int){
val sharedPref = getSharedPreferences(GLOBAL_SHARED_PREFS, Context.MODE_PRIVATE)
sharedPref?.edit()?.putInt(key, value)?.apply()
}
fun Context.putStringSharedPref(key:String, value:String){
val sharedPref = getSharedPreferences(GLOBAL_SHARED_PREFS, Context.MODE_PRIVATE)
sharedPref?.edit()?.putString(key, value)?.apply()
}
fun Context.putBooleanSharedPref(key:String, value:Boolean){
val sharedPref = getSharedPreferences(GLOBAL_SHARED_PREFS, Context.MODE_PRIVATE)
sharedPref?.edit()?.putBoolean(key, value)?.apply()
}
fun Context.getIntSharedPref(key:String): Int{
val sharedPref = getSharedPreferences(GLOBAL_SHARED_PREFS, Context.MODE_PRIVATE)
return sharedPref?.getInt(key, -1) ?: -1
}
fun Context.getStringSharedPref(key:String): String{
val sharedPref = getSharedPreferences(GLOBAL_SHARED_PREFS, Context.MODE_PRIVATE)
return sharedPref?.getString(key, "") ?: ""
}
fun Context.getBooleanSharedPref(key:String): Boolean{
val sharedPref = getSharedPreferences(GLOBAL_SHARED_PREFS, Context.MODE_PRIVATE)
return sharedPref?.getBoolean(key, false) ?: false
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/appsettings/SettingsFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.appsettings
import android.os.Bundle
import androidx.preference.PreferenceFragmentCompat
import br.com.programadordeelite.gdc.R
const val KEY_PREF_SIGNATURE = "signature"
const val KEY_PREF_REPLY = "reply"
const val KEY_PREF_SYNC = "sync"
const val KEY_PREF_ATTACHMENT = "attachment"
class SettingsFragment : PreferenceFragmentCompat() {
override fun onCreatePreferences(savedInstanceState: Bundle?, rootKey: String?) {
setPreferencesFromResource(R.xml.root_preferences, rootKey)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/menu/TimePickerFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.menu
import android.app.Dialog
import android.app.TimePickerDialog
import android.os.Bundle
import android.text.format.DateFormat
import android.widget.TimePicker
import androidx.fragment.app.DialogFragment
import java.util.*
class TimePickerFragment(val callback: (result: String) -> Unit) : DialogFragment(), TimePickerDialog.OnTimeSetListener {
override fun onCreateDialog(savedInstanceState: Bundle?): Dialog {
val c: Calendar = Calendar.getInstance()
val hour = c[Calendar.HOUR_OF_DAY]
val minute = c[Calendar.MINUTE]
return TimePickerDialog(requireContext(), this, hour, minute, DateFormat.is24HourFormat(activity))
}
override fun onTimeSet(time: TimePicker?, hour: Int, minute: Int) {
val hourString = hour.toString()
val minuteString = minute.toString()
callback("$hourString : $minuteString")
}
}<file_sep>/AppContatos/app/src/main/java/com/example/appcontatos/ContatoViewHolder.kt
package com.example.appcontatos
import android.view.View
import android.widget.TextView
import androidx.recyclerview.widget.RecyclerView
class ContatoViewHolder (view: View): RecyclerView.ViewHolder(view) {
private val textViewNome = itemView.findViewById<TextView>(R.id.textViewNome)
private val textViewTelefone = itemView.findViewById<TextView>(R.id.textViewTelefone)
fun bind(item: Contato) {
textViewNome.text = item.nome
textViewTelefone.text = item.telefone
}
}<file_sep>/aad-main/README.md
# Google Developer Certification 🥇 (Android Associated Certification)
🇧🇷 **Preparatório** para Certificação de desenvolvedor do Google 🏁 **EM APENAS 23 DIAS** (Android Associate Certification) - um guia passo a passo completo com vídeos no Youtube passando por todas as seções oficiais de cada codelab, POREM TUDO EM PORTUGUÊS 🇧🇷 E EM **KOTLIN!** ⚽😀
### ⚙️ **SETUP PROJETO**
| Titulo | 📺 Youtube Link |
| :------------- |:-------------:|
| Criando o projeto e fazendo todo o setup usando navigation graph | <a href="https://youtu.be/NjhT6ZA62LQ" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/google_certification_android_associated_developer_setup_mini.png" width="100" height="56"></a> |
### 🧪 **CODELAB: CORE**
| Titulo | 📺 Youtube Link |
| :------------- |:-------------:|
| Alertas e Mensagens com ações (toast & snake), extenções Kotlin e NavGraph | <a href="https://youtu.be/9vfXfq1RfTo" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/google_certification_android_associated_developer_toast_and_snake_alertas_e_mensagens_mini.png" width="100" height="56"></a>|
| Como disparar notificações de push personalizadas e com ações | <a href="https://youtu.be/iDlRsZ-hAU8" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BPASSO-A-PASSO%5D-Como-Criar-ANDROID-PUSH-NOTIFICATIONS-PERSONALIZADAS-sem-Google-Cloud-Message_mini.png" width="100" height="56"></a>|
| Como reagir a eventos do sistema (BroadCastReceiver) | <a href="https://youtu.be/vGwr9XZ8xDY" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BPASSO-A-PASSO%5D-Broadcastreceiver-Android-Studio-Tutorial-Notifica%C3%A7%C3%B5es-Actions-Tarefas-Background_mini.png" width="100" height="56"></a>|
| Como traduzir seu aplicativo para vários idiomas | <a href="https://youtu.be/XHT5RXsp8uM" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BAPP-INTERNACIONAL%5D-Como-Criar-Um-Aplicativo-Para-Celular-Com-Suporte-A-V%C3%A1rios-Idiomas-ANDROID-STUDIO_mini.png" width="100" height="56"></a> |
| Como executar tarefas em Segundo Plano - Parte 1 | <a href="https://youtu.be/5AGWzq9JpYo" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BWORKMANAGER%5D-Como-Executar-Tarefas-(Threads)-Em-Segundo-Plano-(Background)-em-uma-Android-App-parte1_mini.png" width="100" height="56"></a> |
| Como executar tarefas em Segundo Plano - Parte 2 | <a href="https://youtu.be/MJpeoRopmgw" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BWORKMANAGER%5D-Como-Executar-Tarefas-(Threads)-Em-Segundo-Plano-(Background)-em-uma-Android-App-parte2_mini.png" width="100" height="56"></a> |
### 🧪 **CODELAB: USER INTERFACE(INTERAÇÃO COM O USUÁRIO)**
| Titulo | 📺 Youtube Link |
| :------------- |:-------------:|
| Como criar textos, campos, botões e reagir a EVENTOS DE CLICK | <a href="https://youtu.be/qE5lZRSrgxo" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BTEXTOS-CAMPOS-BOT%C3%95ES%5D-Como-manipular-textos-campos-bot%C3%B5es-reagindo-a-eventos-de-click-android_mini.png" width="100" height="56"></a> |
| Como criar um app layout profissional em 10min | <a href="https://youtu.be/XBUbvKczRRI" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BINCR%C3%8DVEL%5D-Como-criar-um-APP-PROFISSIONAL-do-zero-em-10-minutos-no-ANDROID-STUDIO-usando-KOTLIN_mini.png" width="100" height="56"></a> |
| Como abrir uma segunda tela (Activity/Fragment) e navegar entre telas | <a href="https://youtu.be/5gqNUeNi9es" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BF%C3%81CIL%5D-Como-abrir-uma-segunda-tela-Activity-ou-Fragment-e-navegar-entre-elas-no-Android-Studio_mini.png" width="100" height="56"></a> |
| Como criar listas dinâmicas com RecyclerView | <a href="https://youtu.be/Da3PCGnIagE" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BPASSO-A-PASSO%5D-Como-Criar-listas-personalizadas-com-Recyclerview-no-Android-Studio-Tutorial_mini.png" width="100" height="56"></a> |
| Como criar listas infinitas com paginação fazendo requests na internet | <a href="https://youtu.be/lAAlxi2IH0U" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BLISTAS-INFINITAS%5D-Como-Criar-Listas-fluidas-com-Recyclerview-usando-Paging-3-Library-Android_mini.png" width="100" height="56"></a> |
| Como desenvolver aplicativos com acessibilidade - Guia completo | <a href="https://youtu.be/riAGnGv5aAs" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BAcessibilidade%5D-Como-desenvolver-aplicativos-acess%C3%ADveis-ativar-narrador-de-voz-Sistema-Android_mini.png" width="100" height="56"></a> |
| Como implementar a navegação de abas e guias usando TabLayout e ViewPager | <a href="https://youtu.be/SGazP_G4ek0" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BFACIL%5D-Como-implementar-o-Tablayout-abas-usando-Fragments-e-ViewPager-Guia-no-Android-Studio_mini.png" width="100" height="56"></a> |
| Como criar views e componentes personalizados com Custom Views | <a href="https://youtu.be/fpNEife2cYo" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BANDROID-CUSTOM-VIEW-TUTORIAL%5D-Como-criar-componentes-personalizados-no-android-usando-KOTLIN_mini.png" width="100" height="56"></a> |
| Como criar a navegação de hambuger lateral (Navigation Drawer) usando fragments | <a href="https://youtu.be/X5B8Q6q22FU" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BDO%20ZERO%5D-Como-criar-ou-implementar-o-navigation-drawer-com-fragments-no-Android-Studio-kotlin_mini.png" width="100" height="56"></a> |
| Como implementar TIME OU DATEPICKER (Data e Hora) com Fragments em KOTLIN no Android Studio | <a href="https://youtu.be/QJj3T8H8myI" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BDATA-E-HORA%5D-Como-implementar-TIME-OU-DATEPICKER-com-Fragments-em-KOTLIN-no-Android-Studio-2021_mini.png" width="100" height="56"></a> |
| Como criar ou alterar ÍCONE PADRÃO do aplicativo no Android Studio KOTLIN | <a href="https://youtu.be/JMvXUjISoFo" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BCRIANDO-%C3%8DCONES%5D-Como-criar-ou-alterar-%C3%8DCONES-PADR%C3%83O-do-app-no-Android-Studio-din%C3%A2micamente-KOTLIN_mini.png" width="100" height="56"></a> |
### 🧪 **CODELAB: DATA MANAGEMENT(ROOM E ARMAZENAMENTO DE DADOS)**
| Titulo | 📺 Youtube Link |
| :------------- |:-------------:|
| Como usar o ViewModel com o banco de dados Room e LiveData no android studio Kotlin STUDIO | <a href="https://youtu.be/EqkqNjY7uGs" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BMVVM%5D-Como-usar-o-ViewModel-com-o-banco-de-dados-Room-e-LiveData-no-android-studio-Kotlin_mini.png" width="100" height="56"></a> |
| Como USAR SHARED PREFERENCES e PERSISTIR na MEMÓRIA? KOTLIN ANDROID | <a href="https://youtu.be/ScctmGl47tc" target="_blank"><img src="https://github.com/treslines/aad/blob/main/app/src/main/mini/%5BSALVAR-PREFER%C3%8ANCIAS%5D-Como-USAR-SHARED-PREFERENCES-e-PERSISTIR-na-MEM%C3%93RIA-KOTLIN-ANDROID-STUDIO_mini.png" width="100" height="56"></a> |
### 🧪 **CODELAB: DEGUBBING(CORRIGINDO BUGS DO SEU APP)**
- EM ANDAMENTO
### 🧪 **CODELAB: TESTING(APRENDA A TESTAR SEU APP > INTERFACE GRÁFICA e CÓDIGO)**
- EM ANDAMENTO
### 🧪 **CODELAB: ANIMATION(ANIMAÇÕES COM KOTLIN)**
- EM ANDAMENTO
<file_sep>/AulaMvvM/app/src/main/java/com/example/aulamvvm/MainActivity.kt
package com.example.aulamvvm
import android.content.Intent
import android.net.Uri
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.util.Log
import android.widget.Toast
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import com.example.aulamvvm.adapters.MainAdapter
import com.example.aulamvvm.databinding.ActivityMainBinding
import com.example.aulamvvm.databinding.ResItemLiveBinding
import com.example.aulamvvm.repositories.MainRepository
import com.example.aulamvvm.rest.RetrofitService
import com.example.aulamvvm.viewmodel.main.MainViewModel
import com.example.aulamvvm.viewmodel.main.MainViewModelFactory
class MainActivity : AppCompatActivity() {
private lateinit var binding: ActivityMainBinding
lateinit var viewModel: MainViewModel
private val retrofitService = RetrofitService.getInstance()
private val adapter = MainAdapter {
openLink(it.link)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
binding = ActivityMainBinding.inflate(layoutInflater)
setContentView(binding.root)
viewModel = ViewModelProvider(this, MainViewModelFactory(MainRepository(retrofitService))).get(
MainViewModel::class.java
)
binding.recyclerview.adapter = adapter
}
override fun onStart() {
super.onStart()
viewModel.liveList.observe(this, Observer { lives ->
Log.i("Taina", "livelist.observe")
adapter.setLiveList(lives)
})
viewModel.errorMessage.observe(this, Observer { message ->
Toast.makeText(this, message, Toast.LENGTH_SHORT).show()
})
}
override fun onResume() {
super.onResume()
Log.i("Taina", "onResponse")
viewModel.getAllLives()
}
private fun openLink(link: String) {
val browserIntent = Intent(Intent.ACTION_VIEW, Uri.parse(link))
startActivity(browserIntent)
}
}
<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/util/FragmentExt.kt
package br.com.programadordeelite.gdc.codelab.util
import android.content.Intent
import android.os.Bundle
import android.view.View
import android.widget.Toast
import androidx.annotation.IdRes
import androidx.annotation.StyleRes
import androidx.appcompat.app.AppCompatDelegate
import androidx.fragment.app.Fragment
import androidx.navigation.fragment.findNavController
import br.com.programadordeelite.gdc.R
import com.google.android.material.snackbar.Snackbar
fun Fragment.navTo(@IdRes dest: Int) = findNavController().navigate(dest)
fun Fragment.navTo(@IdRes dest: Int, args: Bundle) = findNavController().navigate(dest, args)
fun Fragment.toast(msg: String) = Toast.makeText(requireContext(), msg, Toast.LENGTH_SHORT).show()
fun Fragment.snake(view: View, msg: String) = Snackbar.make(view, msg, Snackbar.LENGTH_SHORT).show()
fun Fragment.startActivity(clazz: Class<*>, name: String = "", args: Bundle = Bundle()){
val intent = Intent(requireContext(), clazz).apply {
if(!(name.isNullOrEmpty() && args.isEmpty)) {
putExtra(name,args)
}
}
requireActivity().startActivity(intent)
}
<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/drawer/DrawerFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.drawer
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentDrawerBinding
class DrawerFragment : Fragment(R.layout.fragment_drawer) {
private lateinit var binding: FragmentDrawerBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
}
}<file_sep>/white-label-firebase-tutorial-master/app/src/main/java/br/com/douglasmotta/whitelabeltutorial/domain/usecase/CreateProductUseCase.kt
package br.com.douglasmotta.whitelabeltutorial.domain.usecase
import android.net.Uri
import br.com.douglasmotta.whitelabeltutorial.domain.model.Product
interface CreateProductUseCase {
suspend operator fun invoke(description: String, price: Double, imageUri: Uri): Product
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/viewmodelfactory/TitleFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.viewmodelfactory
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.navigation.fragment.findNavController
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentSharedPrefBinding
import br.com.programadordeelite.gdc.databinding.FragmentTitleBinding
class TitleFragment : Fragment(R.layout.fragment_title) {
private lateinit var binding: FragmentTitleBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentTitleBinding.bind(view)
binding.playGameButton.setOnClickListener {
findNavController().navigate(TitleFragmentDirections.actionTitleToGame())
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/tab/TabThreeFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.tab
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.fragment.app.viewModels
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentTabThreeBinding
class TabThreeFragment(val viewModel: TabViewModel) : Fragment(R.layout.fragment_tab_three) {
private lateinit var binding: FragmentTabThreeBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentTabThreeBinding.bind(view)
// Criar um observador que atualiza a UI
val tabContentObserver = Observer<String> { newContent ->
binding.contentTitle.text = viewModel.getCurrent(newContent)
}
viewModel.getContentObserver().observe(viewLifecycleOwner, tabContentObserver)
binding.btnTabThree.setOnClickListener {
viewModel.getContentObserver().setValue("Tab Three!")
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/util/ViewExt.kt
package br.com.programadordeelite.gdc.codelab.util
import android.view.View
import android.view.animation.Animation
fun View.startAnimation(anim: Animation, onEnd: () -> Unit) {
anim.setAnimationListener(object : Animation.AnimationListener {
override fun onAnimationEnd(anim: Animation?) = onEnd()
override fun onAnimationRepeat(anim: Animation?) = Unit
override fun onAnimationStart(anim: Animation?) = Unit
})
this.startAnimation(anim)
}
fun View.setVisible(show: Boolean) {
if (show) this.visibility = View.VISIBLE else this.visibility = View.GONE
}<file_sep>/Firebase-Authentication-Android-aula-3/app/src/main/java/br/com/alura/aluraesporte/ui/viewmodel/CadastroUsuarioViewModel.kt
package br.com.alura.aluraesporte.ui.viewmodel
import androidx.lifecycle.LiveData
import androidx.lifecycle.ViewModel
import br.com.alura.aluraesporte.repository.FirebaseAuthRepository
import br.com.alura.aluraesporte.repository.Resource
class CadastroUsuarioViewModel(private val repository: FirebaseAuthRepository) : ViewModel(){
fun cadastra(email: String, senha: String): LiveData<Resource<Boolean>> {
return repository.cadastra(email, senha)
}
}<file_sep>/CURSO ANDROID UDEMY/KotlinRecylcerView/app/src/main/java/com/example/kotlinrecylcerview/DataSource.kt
package com.example.kotlinrecylcerview
import com.example.kotlinrecylcerview.models.Live
class DataSource {
companion object {
fun createDataSet(): ArrayList<Live> {
val list = ArrayList<Live>()
list.add(
Live(
"Live 001 da Tainá na twitch",
"Tainá Regina",
"file:///C:/Users/tainasantos/Downloads/banner%20para%20Twitch.png",
"https://www.twitch.tv/tainadmissivel"
)
)
list.add(
Live(
"Live 002 da Tainá na twitch",
"Tainá Regina",
"file:///C:/Users/tainasantos/Downloads/banner%20para%20Twitch.png",
"https://www.twitch.tv/tainadmissivel"
)
)
list.add(
Live(
"Live 003 da Tainá na twitch",
"Tainá Regina",
"file:///C:/Users/tainasantos/Downloads/banner%20para%20Twitch.png",
"https://www.twitch.tv/tainadmissivel"
)
)
list.add(
Live(
"Live 004 da Tainá na twitch",
"Tainá Regina",
"file:///C:/Users/tainasantos/Downloads/banner%20para%20Twitch.png",
"https://www.twitch.tv/tainadmissivel"
)
)
return list
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/customview/Password.kt
package br.com.programadordeelite.gdc.codelab.userinterface.customview
import android.content.Context
import android.text.Editable
import android.text.TextWatcher
import android.util.AttributeSet
import androidx.annotation.LayoutRes
import androidx.annotation.StyleableRes
import androidx.constraintlayout.widget.ConstraintLayout
import br.com.programadordeelite.gdc.R
import com.google.android.material.button.MaterialButton
import com.google.android.material.textfield.TextInputLayout
import kotlinx.android.synthetic.main.item_password.view.*
import kotlin.properties.Delegates
abstract class Password @JvmOverloads constructor(
context: Context,
attrs: AttributeSet? = null,
defStyleAttr: Int = 0,
defStyleRes: Int = R.style.CustomComponents_TextInputLayout,
@LayoutRes layoutRes: Int = R.layout.item_password,
@StyleableRes styleableRes: IntArray = R.styleable.PasswordItem
) : ConstraintLayout(context, attrs, defStyleAttr, defStyleRes) {
private val emptyString = ""
private val defaultMinLength = 4
private val defaultMaxLength = 25
var passwordForgottenButtonEnabled: Boolean by Delegates.observable(false) { _, _, new ->
pwdButton.visibility = if (new) VISIBLE else GONE
pwdLayout.endIconMode = if (!new) TextInputLayout.END_ICON_PASSWORD_TOGGLE else TextInputLayout.END_ICON_NONE
}
var passwordForgottenButtonText: String by Delegates.observable(initialValue = emptyString) { _, old, new ->
if (old != new) pwdButton.text = new
}
var passwordCounterEnabled: Boolean by Delegates.observable(false) { _, _, new ->
pwdLayout.isCounterEnabled = new
}
var passwordCounterMaxLength: Int by Delegates.observable(defaultMaxLength) { _, _, new ->
pwdLayout.counterMaxLength = new
}
var passwordHintText: String by Delegates.observable(initialValue = resources.getString(R.string.input_password_hint_text)) { _, old, new ->
if (old != new) pwdLayout.hint = new
}
var passwordErrorText: String = resources.getString(R.string.input_password_error_text_default)
var passwordCounterMinLength: Int = defaultMinLength
var passwordSuccessTextEnabled: Boolean by Delegates.observable(false) { _, _, new ->
pwdLayout.isHelperTextEnabled = new
}
var passwordSuccessText: String by Delegates.observable(resources.getString(R.string.input_helper_text_blank_space)) { _, old, new ->
if (pwdEdit.text != null && pwdEdit.text.toString().length > passwordCounterMinLength) {
if (old != new) pwdLayout.helperText = new
}
}
val passwordForgottenButton: MaterialButton?
get() = pwdButton
init {
inflate(context, layoutRes, this)
context.theme.obtainStyledAttributes(
attrs,
styleableRes,
defStyleAttr,
defStyleRes
).apply {
try {
passwordForgottenButtonText = getString(R.styleable.PasswordItem_passwordForgottenButtonText)
?: emptyString
passwordHintText = getString(R.styleable.PasswordItem_passwordHintText)
?: resources.getString(R.string.input_password_hint_text)
passwordForgottenButtonEnabled = getBoolean(R.styleable.PasswordItem_passwordForgottenButtonEnabled, false)
passwordCounterEnabled = getBoolean(R.styleable.PasswordItem_passwordCounterEnabled, false)
passwordCounterMaxLength = getInt(R.styleable.PasswordItem_passwordCounterMaxLength, defaultMaxLength)
passwordCounterMinLength = getInt(R.styleable.PasswordItem_passwordCounterMinLength, defaultMinLength)
passwordSuccessTextEnabled = getBoolean(R.styleable.PasswordItem_passwordSuccessTextEnabled, false)
val blankSpace = resources.getString(R.string.input_helper_text_blank_space)
val customHelperText = getString(R.styleable.PasswordItem_passwordSuccessText)
passwordSuccessText = customHelperText ?: blankSpace
val customErrorText = getString(R.styleable.PasswordItem_passwordErrorText)
val defaultErrorText = resources.getString(R.string.input_password_error_text_default)
passwordErrorText = customErrorText ?: defaultErrorText
} finally {
recycle()
}
}
pwdEdit.setOnFocusChangeListener { _, hasFocus ->
if (!hasFocus) {
if (pwdEdit.text.toString().length in passwordCounterMinLength..passwordCounterMaxLength) {
pwdLayout.isErrorEnabled = false
if (passwordSuccessTextEnabled) pwdLayout.helperText = passwordSuccessText
} else {
if (pwdEdit.text != null && pwdEdit.text.toString().isEmpty()) {
showErrorHelperText()
}
}
}
}
pwdEdit.addTextChangedListener(object : TextWatcher {
override fun beforeTextChanged(chars: CharSequence?, start: Int, count: Int, after: Int) = Unit
override fun onTextChanged(chars: CharSequence?, start: Int, before: Int, count: Int) = Unit
override fun afterTextChanged(editable: Editable?) {
val passwordLayout = pwdLayout
val passwordForgottenButton = pwdButton
if (editable.isNullOrEmpty()) {
if (passwordForgottenButton.visibility != VISIBLE) passwordForgottenButton.visibility = VISIBLE
if (passwordLayout.endIconMode != TextInputLayout.END_ICON_NONE) passwordLayout.endIconMode = TextInputLayout.END_ICON_NONE
} else {
if (passwordForgottenButton.visibility != GONE) passwordForgottenButton.visibility = GONE
if (passwordLayout.endIconMode != TextInputLayout.END_ICON_PASSWORD_TOGGLE) passwordLayout.endIconMode = TextInputLayout.END_ICON_PASSWORD_TOGGLE
if (pwdEdit.text.toString().length in passwordCounterMinLength..passwordCounterMaxLength) {
passwordLayout.isErrorEnabled = false
if (passwordSuccessTextEnabled) passwordLayout.helperText = passwordSuccessText
} else {
showErrorHelperText()
}
}
}
})
}
private fun showErrorHelperText() {
val defaultErrorText = resources.getString(R.string.input_password_error_text_default)
pwdLayout.error = if (passwordErrorText.isNotEmpty()) passwordErrorText else defaultErrorText
pwdLayout.isErrorEnabled = true
pwdLayout.helperText = emptyString
}
}<file_sep>/white-label-firebase-tutorial-master/app/src/main/java/br/com/douglasmotta/whitelabeltutorial/domain/usecase/GetProductUseCaseImpl.kt
package br.com.douglasmotta.whitelabeltutorial.domain.usecase
import br.com.douglasmotta.whitelabeltutorial.data.ProductRepository
import br.com.douglasmotta.whitelabeltutorial.domain.model.Product
import javax.inject.Inject
class GetProductUseCaseImpl @Inject constructor(
private val productRepository: ProductRepository
) : GetProductsUseCase {
override suspend fun invoke(): List<Product> {
return productRepository.getProducts()
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/core/notification/NotificationFragment.kt
package br.com.programadordeelite.gdc.codelab.core.notification
import android.app.NotificationChannel
import android.app.NotificationManager
import android.app.PendingIntent
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import android.content.IntentFilter
import android.graphics.BitmapFactory
import android.graphics.Color
import android.os.Bundle
import android.view.View
import androidx.core.app.NotificationCompat
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import kotlinx.android.synthetic.main.fragment_notification.*
private const val NOTIFICATION_ID = 0
private const val PRIMARY_CHANNEL_ID = "primary_notification_channel"
private const val ACTION_UPDATE = "ACTION_UPDATE_NOTIFICATION"
private const val ACTION_CANCEL = "ACTION_CANCEL_NOTIFICATION"
private const val ACTION_DELETE_ALL = "ACTION_DELETED_NOTIFICATIONS"
class NotificationFragment : Fragment(R.layout.fragment_notification) {
private lateinit var notificationManager: NotificationManager
private val notificationReceiver = NotificationReceiver()
private val dynamicReceiver = DynamicReceiver()
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
setupUiButtonListeners()
setupUiButtonStates(enableNotify = true, enableUpdate = false, enableCancel = false)
createNotificationChannel()
registerNotificationReceiver()
registerDynamicReceiver(dynamicReceiver)
}
private fun setupUiButtonListeners() {
// Não estou usando o binding aqui propositalmente para vcs aprenderem mais uma coisa
notify.setOnClickListener { sendNotification() } // enviar notificação padrão
update.setOnClickListener { updateNotification() } // customizar / personalizar
cancel.setOnClickListener { cancelNotification() } // remover da barra de status
// snake.setOnClickListener{ }
}
private fun setupUiButtonStates( // assegurar o estado inicial dos botões
enableNotify: Boolean,
enableUpdate: Boolean,
enableCancel: Boolean
) {
notify.isEnabled = enableNotify
update.isEnabled = enableUpdate
cancel.isEnabled = enableCancel
}
// A partir do android 8.0 (api 26) temos que definir o canal para que o usuário possa
// eventualmente desabilitar as notificações do aplicativo através das configurações
private fun createNotificationChannel() {
notificationManager =
requireActivity().getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
// Isso aqui é que aparece nas configurações do aparelho la no seu aplicativo
val notificationChannel = NotificationChannel(
PRIMARY_CHANNEL_ID,
"Mascot Notification", NotificationManager.IMPORTANCE_HIGH
)
notificationChannel.enableVibration(true)
notificationChannel.description = "Notification from Mascot"
// serão exibidas se o telefone der suporte a essas coisas
notificationChannel.enableLights(true)
notificationChannel.lightColor = Color.RED
// criar o canal com as propriedades definidas
notificationManager.createNotificationChannel(notificationChannel)
} else {
// sua tarefinha de casa :) caso vc tenha um aparelho inferior a api 26
}
}
private fun cancelNotification() {
notificationManager.cancel(NOTIFICATION_ID)
setupUiButtonStates(enableNotify = true, enableUpdate = false, enableCancel = false)
}
private fun updateNotification() {
// personalização dinamica da notificação adicionando um icone
val androidImage = BitmapFactory.decodeResource(resources, R.drawable.ic_notification)
// atualizando o estilo e o titulo
val notification = getNotificationBuilder()
.setStyle(
NotificationCompat.BigPictureStyle()
.bigPicture(androidImage)
.setBigContentTitle("Notificação atualizada!")
)
// atualizar a notificação atual
notificationManager.notify(NOTIFICATION_ID, notification.build())
// e habilitar o botão de cancelamento
setupUiButtonStates(enableNotify = false, enableUpdate = false, enableCancel = true)
}
private fun sendNotification() {
val builder = getNotificationBuilder()
createNotificationAction(builder, NOTIFICATION_ID, ACTION_UPDATE, "Atualize")
createNotificationAction(builder, NOTIFICATION_ID, ACTION_CANCEL, "Remover")
val deleteAllAction = Intent(ACTION_DELETE_ALL) // remove com slide left/right ou lixeira
val deletedAction = PendingIntent.getBroadcast(
requireContext(),
NOTIFICATION_ID,
deleteAllAction,
PendingIntent.FLAG_ONE_SHOT
)
builder.setDeleteIntent(deletedAction)
notificationManager.notify(NOTIFICATION_ID, builder.build())
// como neste passo aqui a notificação foi enviada, eu deshabilito o botão de enviar
// e habilito os botões de customização e cancelamento
setupUiButtonStates(enableNotify = false, enableUpdate = true, enableCancel = true)
}
private fun createNotificationAction(
builder: NotificationCompat.Builder,
notificationId: Int,
actionId: String,
actionTitle: String
) {
val updateActionFilter = Intent(actionId) // for broadcast receiver
val updateAction = PendingIntent.getBroadcast(
requireContext(),
notificationId,
updateActionFilter,
PendingIntent.FLAG_ONE_SHOT
)
builder.addAction(
// mudanças nas notificação desde o Android N
// esse icone nao aparece mais e esta presente apenas para manter compatibilidade
// em aparelhos antigos. Em compensação se ganhou mais espaço para os titulos
// // https://android-developers.googleblog.com/2016/06/notifications-in-android-n.html
R.drawable.ic_android,
actionTitle,
updateAction
)
}
private fun getNotificationBuilder(): NotificationCompat.Builder {
val notificationIntent = Intent(requireContext(), NotificationFragment::class.java)
val notificationPendingIntent = PendingIntent.getActivity(
requireContext(),
NOTIFICATION_ID, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT
)
return NotificationCompat.Builder(requireContext(), PRIMARY_CHANNEL_ID)
.setContentTitle("Você recebeu uma notificação!")
.setContentText("Valeu, já vou me inscrever no canal!")
.setSmallIcon(R.drawable.ic_notification_update)
.setContentIntent(notificationPendingIntent)
.setPriority(NotificationCompat.PRIORITY_HIGH)
.setDefaults(NotificationCompat.DEFAULT_ALL)
.setAutoCancel(false)
}
private fun registerNotificationReceiver() {
val notificationActionFilters = IntentFilter()
notificationActionFilters.addAction(ACTION_UPDATE)
notificationActionFilters.addAction(ACTION_DELETE_ALL)
notificationActionFilters.addAction(ACTION_CANCEL)
requireActivity().registerReceiver(notificationReceiver, notificationActionFilters)
}
// for broadcast receiver
override fun onDestroy() {
requireActivity().unregisterReceiver(notificationReceiver)
super.onDestroy()
}
inner class NotificationReceiver : BroadcastReceiver() {
override fun onReceive(context: Context, intent: Intent) {
// Update the notification
when (intent.action) {
ACTION_UPDATE -> updateNotification()
ACTION_CANCEL -> {
notificationManager.cancel(NOTIFICATION_ID)
setupUiButtonStates(
enableNotify = true,
enableUpdate = false,
enableCancel = false
)
}
ACTION_DELETE_ALL -> setupUiButtonStates(
enableNotify = true,
enableUpdate = false,
enableCancel = false
)
}
}
}
// API level 26 a maioria dos broadcastreceiver sao declarados dinamicamente
private fun registerDynamicReceiver(dynamicReceiver: BroadcastReceiver) {
IntentFilter(Intent.ACTION_AIRPLANE_MODE_CHANGED).also {
requireActivity().registerReceiver(dynamicReceiver, it)
}
}
private fun unregisterDynamicReceiver() {
requireActivity().unregisterReceiver(dynamicReceiver)
}
override fun onStop() {
super.onStop()
unregisterDynamicReceiver()
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/core/notification/DynamicReceiver.kt
package br.com.programadordeelite.gdc.codelab.core.notification
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import android.widget.Toast
class DynamicReceiver: BroadcastReceiver() {
override fun onReceive(context: Context?, intent: Intent?) {
if(intent?.action == Intent.ACTION_BOOT_COMPLETED) {
Toast.makeText(context, "app reinicio com sucesso", Toast.LENGTH_SHORT).show()
}
if(intent?.action == Intent.ACTION_AIRPLANE_MODE_CHANGED){
Toast.makeText(context, "modo avião foi alterado", Toast.LENGTH_LONG).show()
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/core/workmanager/SelectImageFragment.kt
package br.com.programadordeelite.gdc.codelab.core.workmanager
import android.Manifest
import android.app.Activity
import android.content.ContentResolver
import android.content.Intent
import android.content.pm.PackageManager
import android.net.Uri
import android.os.Bundle
import android.provider.MediaStore
import android.view.View
import android.widget.Toast
import androidx.activity.result.ActivityResultLauncher
import androidx.activity.result.contract.ActivityResultContract
import androidx.activity.result.contract.ActivityResultContracts
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import androidx.core.os.bundleOf
import androidx.fragment.app.Fragment
import androidx.navigation.fragment.findNavController
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.databinding.FragmentSelectImageBinding
import timber.log.Timber
class SelectImageFragment : Fragment(R.layout.fragment_select_image) {
// ATUALIZADO
companion object {
private const val KEY_PERMISSIONS_GRANTED = "KEY_PERMISSIONS_GRANTED"
private const val KEY_PERMISSIONS_REQUEST_COUNT = "KEY_PERMISSIONS_REQUEST_COUNT"
private const val MAX_NUMBER_REQUEST_PERMISSIONS = 2
}
private var permissionRequestCount: Int = 0
private lateinit var binding: FragmentSelectImageBinding
private lateinit var launcher: ActivityResultLauncher<String>
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// deve ser definido ou no onAttach ou onCreate
launcher = registerForActivityResult(ActivityResultContracts.GetContent()){
handleImageRequestResult(it)
}
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentSelectImageBinding.bind(view)
// recuperando o ultimo estado caso o usuário rotacione o aparelho
savedInstanceState?.let {
permissionRequestCount = it.getInt(KEY_PERMISSIONS_REQUEST_COUNT, 0)
userHasPermission = it.getBoolean(KEY_PERMISSIONS_GRANTED, false)
}
// asseguresse que o usuário tens as permissões necessárias
requestPermissionsOnlyTwice(userHasPermission)
// abrir o file chooser
binding.selectImage.setOnClickListener {
launcher.launch("image/*")
}
}
private var userHasPermission = false
private val permissions = arrayOf(
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE
)
private fun requestPermissionsOnlyTwice(hasPermissionsAlready: Boolean) {
if(!hasPermissionsAlready){
if (permissionRequestCount < MAX_NUMBER_REQUEST_PERMISSIONS) {
permissionRequestCount += 1
// NOVA API: PRESTA A ATENCÃO AGORA
val permissionChecker = registerForActivityResult(
ActivityResultContracts.RequestMultiplePermissions()
) { acceptedPermissions ->
val permissionsIdentified = acceptedPermissions.all{it.key in permissions}
val permissionsGrant = acceptedPermissions.all{it.value == true}
if(permissionsIdentified && permissionsGrant) {
permissionRequestCount = 0
userHasPermission = true
}
}
if(!userHasPermission){
permissionChecker.launch(permissions)
}
} else {
Toast.makeText(
requireContext(),
R.string.set_permissions_in_settings,
Toast.LENGTH_LONG
).show()
binding.selectImage.isEnabled = false
}
}
}
private fun handleImageRequestResult(uri: Uri) {
// navegar para proxima etapa onde exibimos as opções de blur
navTo(R.id.blurFragment, bundleOf(Pair(KEY_IMAGE_URI, uri.toString())))
//findNavController().navigate(R.id.blurFragment, bundleOf(Pair(KEY_IMAGE_URI, uri.toString())))
}
// salvar estados em caso que o usuario rotacione o aparelho
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
outState.putInt(KEY_PERMISSIONS_REQUEST_COUNT, permissionRequestCount)
outState.putBoolean(KEY_PERMISSIONS_GRANTED, userHasPermission)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/menu/MenuFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.menu
import android.os.Bundle
import android.view.Menu
import android.view.MenuInflater
import android.view.MenuItem
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.codelab.util.toast
import br.com.programadordeelite.gdc.databinding.FragmentMenuBinding
class MenuFragment : Fragment(R.layout.fragment_menu) {
private lateinit var binding: FragmentMenuBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
setHasOptionsMenu(true)
}
override fun onCreateOptionsMenu(menu: Menu, inflater: MenuInflater) {
inflater.inflate(R.menu.menu_main, menu)
super.onCreateOptionsMenu(menu, inflater)
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
when (item.itemId) {
R.id.action_order -> {
navTo(R.id.action_menuFragment_to_orderFragment)
return true
}
R.id.action_status -> {
toast(getString(R.string.action_status_message))
return true
}
R.id.action_favorites -> {
toast(getString(R.string.action_favorites_message))
return true
}
R.id.action_contact -> {
toast(getString(R.string.action_contact_message))
return true
}
else -> Unit
}
return super.onOptionsItemSelected(item)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/themes/ThemeFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.themes
import android.content.pm.ActivityInfo
import android.os.Bundle
import android.view.Menu
import android.view.MenuInflater
import android.view.MenuItem
import android.view.View
import androidx.appcompat.app.AppCompatDelegate
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentThemeBinding
class ThemeFragment : Fragment(R.layout.fragment_theme) {
private var scoreTeam1 = 0
private var scoreTeam2 = 0
private val stateScore1 = "Team 1 Score"
private val stateScore2 = "Team 2 Score"
private lateinit var binding: FragmentThemeBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentThemeBinding.bind(view)
// possibilitar mudanças de orientações nessa tela
requireActivity().requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED
setHasOptionsMenu(true) // importante para que o menu seja exibido
// recuperar valores em rotacões ou quando o tema recriar o app
if (savedInstanceState != null) {
scoreTeam1 = savedInstanceState.getInt(stateScore1)
scoreTeam2 =savedInstanceState.getInt(stateScore2)
binding.score1.text = scoreTeam1.toString()
binding.score2.text = scoreTeam2.toString()
}
binding.decreaseButtonTeam1.setOnClickListener {
binding.score1.text = (--scoreTeam1).toString()
}
binding.decreaseButtonTeam2.setOnClickListener {
binding.score2.text = (--scoreTeam2).toString()
}
binding.increaseButtonTeam1.setOnClickListener {
binding.score1.text = (++scoreTeam1).toString()
}
binding.increaseButtonTeam2.setOnClickListener {
binding.score2.text = (++scoreTeam2).toString()
}
}
override fun onSaveInstanceState(outState: Bundle) {
outState.putInt(stateScore1, scoreTeam1)
outState.putInt(stateScore2, scoreTeam2)
super.onSaveInstanceState(outState)
}
override fun onCreateOptionsMenu(menu: Menu, inflater: MenuInflater) {
requireActivity().menuInflater.inflate(R.menu.theme_menu, menu)
val nightMode = AppCompatDelegate.getDefaultNightMode()
if (nightMode == AppCompatDelegate.MODE_NIGHT_YES) {
menu.findItem(R.id.night_mode).setTitle(R.string.day_mode)
} else {
menu.findItem(R.id.night_mode).setTitle(R.string.night_mode)
}
super.onCreateOptionsMenu(menu, inflater)
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
if (item.itemId == R.id.night_mode) {
val nightMode = AppCompatDelegate.getDefaultNightMode()
if (nightMode == AppCompatDelegate.MODE_NIGHT_YES) {
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_NO)
} else {
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_YES)
}
// Recreate the activity for the theme change to take effect.
requireActivity().recreate()
}
return true
}
override fun onDetach() {
super.onDetach()
// assegurar que tudo esta como era antes
requireActivity().requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/customview/PasswordItem.kt
package br.com.programadordeelite.gdc.codelab.userinterface.customview
import android.content.Context
import android.util.AttributeSet
import br.com.programadordeelite.gdc.R
open class PasswordItem @JvmOverloads constructor(
context: Context,
attrs: AttributeSet? = null,
defStyleAttr: Int = 0,
defStyleRes: Int = R.style.CustomComponents_TextInputLayout
) : Password(context, attrs, defStyleAttr, defStyleRes)<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/tab/TabViewModel.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.tab
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
class TabViewModel : ViewModel() {
var currentTabContent: String = "Current on Tab One!"
var currentTab: String = "Tab One!"
private val currentTabObserver: MutableLiveData<String> = MutableLiveData()
// lógica é programada no view model a view geralmente so é atualizada
// separando responsabilidades CoC (cerparation of concern)
fun getCurrent(tab: String): String {
if (currentTab != tab) {
currentTabContent = "Toque anterior: $currentTab\nÚltimo toque: $tab"
currentTab = tab
}
return currentTabContent
}
fun getContentObserver(): MutableLiveData<String> = currentTabObserver
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/MainApplication.kt
package br.com.programadordeelite.gdc
import android.app.Application
import br.com.programadordeelite.gdc.codelab.datamanagement.roomwithview.WordRepository
import br.com.programadordeelite.gdc.codelab.datamanagement.roomwithview.WordRoomDatabase
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.SupervisorJob
import timber.log.Timber
class MainApplication : Application() {
// NÃO HÁ NECESSIDADE DE CANCELAR ESTE ESCOPO, POIS ELE SERÁ DESTRUÍDO COM O PROCESSO
private val applicationScope = CoroutineScope(SupervisorJob())
// CRIACAO DO NOSSO BANCO DE DADOS LAZY (SO SERA INSTANCIADO QUANDO FOR USADO PELA PRIMEIRA VEZ)
private val database by lazy { WordRoomDatabase.getDatabase(this@MainApplication, applicationScope) }
// DEFINICAO DO NOSSO REPOSITORIO A NIVEL DE APLICACAO PARA FICAR DISPONIVEL EM TODO LUGAR
val repository by lazy { WordRepository(database.wordDao()) }
override fun onCreate() {
super.onCreate()
if (BuildConfig.DEBUG) {
Timber.plant(Timber.DebugTree()) // Timber é o "novo" Logger recomendado pela google
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/GithubService.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import okhttp3.OkHttpClient
import okhttp3.logging.HttpLoggingInterceptor
import okhttp3.logging.HttpLoggingInterceptor.Level
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
import retrofit2.http.GET
import retrofit2.http.Query
const val IN_QUALIFIER = "in:name,description"
/**
* Github API communication setup via Retrofit.
*/
interface GithubService {
/**
* Get repos ordered by stars.
*/
@GET("search/repositories?sort=stars")
suspend fun searchRepos(
@Query("q") query: String,
@Query("page") page: Int,
@Query("per_page") itemsPerPage: Int
): RepoSearchResponse
companion object {
private const val BASE_URL = "https://api.github.com/"
// factory method para criar serviços
fun create(): GithubService {
val logger = HttpLoggingInterceptor()
logger.level = Level.BASIC
val client = OkHttpClient.Builder()
.addInterceptor(logger)
.build()
return Retrofit.Builder()
.baseUrl(BASE_URL)
.client(client)
.addConverterFactory(GsonConverterFactory.create())
.build()
.create(GithubService::class.java)
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/tab/TabOneFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.tab
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.fragment.app.viewModels
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentTabOneBinding
class TabOneFragment (val viewModel: TabViewModel): Fragment(R.layout.fragment_tab_one) {
private lateinit var binding: FragmentTabOneBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentTabOneBinding.bind(view)
// Criar um observador que atualiza a UI
val tabContentObserver = Observer<String> { newContent ->
binding.contentTitle.text = viewModel.getCurrent(newContent)
}
viewModel.getContentObserver().observe(viewLifecycleOwner, tabContentObserver)
binding.btnTabOne.setOnClickListener {
viewModel.getContentObserver().setValue("Tab one!")
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/Repo.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import com.google.gson.annotations.SerializedName
/**
* Immutable model class for a Github repo that holds all the information about a repository.
* Objects of this type are received from the Github API, therefore all the fields are annotated
* with the serialized name.
* This class also defines the Room repos table, where the repo [id] is the primary key.
*/
data class Repo(
@field:SerializedName("id") val id: Long,
@field:SerializedName("name") val name: String,
@field:SerializedName("full_name") val fullName: String,
@field:SerializedName("description") val description: String?,
@field:SerializedName("html_url") val url: String,
@field:SerializedName("stargazers_count") val stars: Int,
@field:SerializedName("forks_count") val forks: Int,
@field:SerializedName("language") val language: String?
)<file_sep>/AppContatos/app/src/main/java/com/example/appcontatos/ContatoAdapter.kt
package com.example.appcontatos
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.recyclerview.widget.RecyclerView
class ContatoAdapter: RecyclerView.Adapter<ContatoViewHolder>() {
private var items = listOf<Contato>()
fun upDateItems(newItems: List<Contato>) {
items = newItems
notifyDataSetChanged()
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ContatoViewHolder {
val view = LayoutInflater
.from(parent.context)
.inflate(R.layout.item_contato, parent, false)
return ContatoViewHolder(view)
}
override fun getItemCount() : Int = items.size
override fun onBindViewHolder(holder: ContatoViewHolder, position: Int) {
holder.bind(items[position])
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/viewmodelfactory/ScoreFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.viewmodelfactory
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.lifecycle.ViewModelProvider
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentScoreBinding
class ScoreFragment : Fragment(R.layout.fragment_score) {
private lateinit var binding: FragmentScoreBinding
private lateinit var viewModel: ScoreViewModel
private lateinit var viewModelFactory: ScoreViewModelFactory
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentScoreBinding.bind(view)
// obtem argumentos passados pela action de gameFragment
viewModelFactory = ScoreViewModelFactory(ScoreFragmentArgs.fromBundle(requireArguments()).score)
// com o factory obtenho o view model
viewModel = ViewModelProvider(this, viewModelFactory).get(ScoreViewModel::class.java)
binding.scoreText.text = viewModel.score.toString()
}
}<file_sep>/AppContatos/app/src/main/java/com/example/appcontatos/CadastroActivity.kt
package com.example.appcontatos
import android.app.DatePickerDialog
import android.app.ProgressDialog.show
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import com.google.android.material.textfield.TextInputEditText
import java.text.SimpleDateFormat
import java.util.*
private val DatePickerDialog.maxDate: Unit
get() {
TODO("Not yet implemented")
}
class CadastroActivity : AppCompatActivity() {
// private lateinit var binding: CadastroActivity
private lateinit var dataCompra: TextInputEditText
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_cadastro)
dataCompra = findViewById(R.id.dataCompra)
val calendario = Calendar.getInstance()
// val picker = DatePickerDialog.OnDateSetListener { view, ano, mes, dia ->
// calendario.set(Calendar.YEAR, ano)
// calendario.set(Calendar.MONDAY, mes)
// calendario.set(Calendar.DAY_OF_MONTH, dia)
// calendario.add(Calendar.DATE, -7)
// calendario.add(Calendar.DATE, 10)
// upDate(calendario)
//
// }
//
// dataCompra.setOnClickListener {
//
// DatePickerDialog(
// this,
// picker,
// calendario.get(Calendar.YEAR),
// calendario.get(Calendar.MONTH),
// calendario.get(Calendar.DAY_OF_MONTH)
//
// ).show()
//
//
// }
// }
// fun upDate(calendario: Calendar) {
// val formato = "dd-mm-yyyy"
// val sdf = SimpleDateFormat(formato, Locale.US)
// dataCompra.setText(sdf.format(calendario.time))
// }
// }
//setText(sdf.format.(calendario.time))
///////////////////////////////////
dataCompra.setOnClickListener {
val todayDate: Calendar = Calendar.getInstance()
val ano: Int = todayDate.get(Calendar.YEAR)
val mes: Int = todayDate.get(Calendar.MONTH)
val dia: Int = todayDate.get(Calendar.DAY_OF_MONTH)
val dateDialog = DatePickerDialog(this, this, ano, mes, dia)
todayDate.add(Calendar.DATE, -7)
dateDialog.datePicker.minDate = todayDate.timeInMillis
todayDate.add(Calendar.DATE, 10)
dateDialog.datePicker.maxDate = todayDate.timeInMillis
dateDialog.datePicker.maxDate = Date().time
dateDialog.show()
}
}
private fun DatePickerDialog(
cadastroActivity: CadastroActivity,
cadastroActivity1: CadastroActivity,
ano: Int,
mes: Int,
dia: Int
): DatePickerDialog {
TODO("Not yet implemented")
}
}
//
// }
// dataCompra.setOnClickListener {
// val calendario = Calendar.getInstance()
//
// val datePicker = DatePickerDialog.OnDateSetListener { view, ano, mes, dia ->
//
// calendario.set(Calendar.YEAR, ano)
// calendario.set(Calendar.MONDAY, mes)
// calendario.set(Calendar.DAY_OF_MONTH, dia)
// calendario.add(Calendar.DATE, -7)
// calendario.add(Calendar.DATE, 10)
//
//
//
//
//
// dateDialog.datePicker.minDate = todayDate.timeInMillis
//
// todayDate.add(Calendar.DATE, 10)
// dateDialog.datePicker.maxDate = todayDate.timeInMillis
//
// dateDialog.show()
//
//
//
//
// private fun DatePickerDialog(
// cadastroActivity: CadastroActivity,
// ano: Int,
// mes: Int,
// dia: Int
// ): DatePickerDialog {
//
// }
<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/roomwithview/RoomRepositoryViewModel.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.roomwithview
import android.content.Context
import androidx.annotation.WorkerThread
import androidx.lifecycle.*
import androidx.room.*
import androidx.sqlite.db.SupportSQLiteDatabase
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.launch
// +----------------------------------------------------------------------+
// | >>> 1° <<< ENTITY: DEFINICÃO DA NOSSA TABELA POR MEIO DE ANOTACÕES |
// +----------------------------------------------------------------------+
@Entity(tableName = "word_table")
class Word(@PrimaryKey @ColumnInfo(name = "word") val word: String)
// +-------------------------------------------------------------------------------------+
// | >>> 2° <<< DAO: DATA ACCESS OBJECT - OBJETO QUE REALIZA OPERACÕES NO BANCO DE DADOS |
// | ESPECIFICA O QUE PODEREMOS ALTERA NA NOSSA TABELA E FACILITA O USO |
// +-------------------------------------------------------------------------------------+
@Dao
interface WordDao {
@Query("SELECT * FROM word_table ORDER BY word ASC")
fun getAlphabetizedWords(): Flow<List<Word>> // Note: Estamos usando flow (o video de Paging Lib 3 explica)
@Insert(onConflict = OnConflictStrategy.IGNORE)
suspend fun insert(word: Word)
@Query("DELETE FROM word_table") // que mágica é essa?
suspend fun deleteAll()
}
// +-----------------------------------------------------------------------------------------------+
// | >>> 3° <<< REPOSITORY: MEDIADOR DE ACESSO A DADOS LOCAIS OU REMOTO CASO EXISTAM VÁRIAS FONTES |
// +-----------------------------------------------------------------------------------------------+
// Declare o DAO como uma propriedade privada no construtor. Passe apenas o DAO
// em vez do banco de dados inteiro, porque você só precisamos acessar o DAO
class WordRepository(private val wordDao: WordDao) {
// Room executa todas as consultas em um thread separado.
// Flow observado notificará o observador quando os dados forem alterados.
val allWords: Flow<List<Word>> = wordDao.getAlphabetizedWords()
// Por padrão, o Room executa consultas suspensas fora do thread principal, portanto,
// não precisamos implementar qualquer outra coisa para garantir que não estamos fazendo
// um trabalho de banco de dados de longa duração fora da Thread principal.
@Suppress("RedundantSuspendModifier")
@WorkerThread
suspend fun insert(word: Word) {
wordDao.insert(word)
}
}
// +-----------------------------------------------------------------------+
// | >>> 4° <<< ROOM - NOSSO BANCO DE DADOS COM AS DEFINICÕES DE TABELAS |
// +-----------------------------------------------------------------------+
@Database(entities = arrayOf(Word::class), version = 1, exportSchema = false)
abstract class WordRoomDatabase : RoomDatabase() {
abstract fun wordDao(): WordDao
// +----------------------------------------------------------------------------------+
// | CALLBACK - SERA USADA PARA INICIALIZAR O NOSSO BANCO DE DADOS NA HORA DA CRIACÃO |
// +----------------------------------------------------------------------------------+
private class WordDatabaseCallback(private val scope: CoroutineScope) : RoomDatabase.Callback() {
override fun onCreate(db: SupportSQLiteDatabase) {
super.onCreate(db)
INSTANCE?.let { database ->
scope.launch {
populateDatabase(database.wordDao()) // SO PARA EXEMPLIFICAR COMO PRE-POPULAR UM BANCO DE DADOS
}
}
}
suspend fun populateDatabase(wordDao: WordDao) {
// APAGAMOS TUDO QUE ESTEJA NO BANCO PRIMEIRO (NÃO FACA ISSO EM PRODUCÃO)
wordDao.deleteAll()
// ADICIONAMOS ALGUNS VALORES ALEATÓRIOS PARA TER ALGO PARA EXIBIR
wordDao.insert(Word("Olá, não se esqueça de se inscrever no canal"))
wordDao.insert(Word("Vai me agradecer na hora da prova! :)"))
}
}
companion object {
// +------------------------------------------------------------------------------------+
// | SINGLETON - PREVINE QUE MULTIPLAS INSTANCIAS DO BANCO SEJAM ABERTAS AO MESMO TEMPO |
// | PADRÃO DE PROJETO QUE SO DEIXA UMA INSTANCIA DO MESMO OBJETO EXISTIR |
// +------------------------------------------------------------------------------------+
@Volatile
private var INSTANCE: WordRoomDatabase? = null
fun getDatabase(context: Context, scope: CoroutineScope): WordRoomDatabase {
// SE O BANCO JA EXISTIR, RETORNE DIRETO, DO CONTRÁRIO CRIE O BANCO
return INSTANCE ?: synchronized(this) {
val instance = Room.databaseBuilder(
context.applicationContext,
WordRoomDatabase::class.java,
"word_database" // NOME DO NOSSO BANCO DE DADOS
).addCallback(WordDatabaseCallback(scope)).build()
INSTANCE = instance
instance
}
}
}
}
// +---------------------------------------------------------------------------+
// | >>> 5° <<< VIEW MODEL: ATUALIZAR DADOS E RETEM A LÓGICA DA UI EM QUESTÃO |
// +---------------------------------------------------------------------------+
class WordViewModel(private val repository: WordRepository) : ViewModel() {
// Usar LiveData e armazenar em cache o que allWords retorna tem vários benefícios:
// - Podemos colocar um observador nos dados (em vez de pesquisar as alterações) e apenas
// atualizar a IU quando os dados realmente mudam.
// - O repositório é completamente separado da IU por meio do ViewModel.
val allWords: LiveData<List<Word>> = repository.allWords.asLiveData()
/** Lançamento de uma nova co-rotina para inserir os dados de forma não bloqueadora */
fun insert(word: Word) = viewModelScope.launch {
repository.insert(word)
}
}
// +---------------------------------------------------------+
// | >>> 6° <<< MODEL FACTORY: CRIADOR DE OBJETOS COMPLEXOS |
// +---------------------------------------------------------+
class WordViewModelFactory(private val repository: WordRepository) : ViewModelProvider.Factory {
@Suppress("UNCHECKED_CAST")
override fun <T : ViewModel> create(modelClass: Class<T>): T {
if (modelClass.isAssignableFrom(WordViewModel::class.java)) return WordViewModel(repository) as T
throw IllegalArgumentException("MODELO DESCONHECIDO")
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/viewmodelfactory/GameFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.viewmodelfactory
import android.os.Bundle
import android.util.Log
import android.view.View
import androidx.fragment.app.Fragment
import androidx.lifecycle.ViewModelProvider
import androidx.navigation.fragment.NavHostFragment
import androidx.navigation.fragment.findNavController
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.codelab.util.toast
import br.com.programadordeelite.gdc.databinding.FragmentGameBinding
class GameFragment : Fragment(R.layout.fragment_game) {
private lateinit var binding: FragmentGameBinding
private lateinit var viewModel: GameViewModel
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentGameBinding.bind(view)
Log.i("GameFragment", "Called ViewModelProvider.get")
viewModel = ViewModelProvider(this).get(GameViewModel::class.java)
binding.correctButton.setOnClickListener { onCorrect() }
binding.skipButton.setOnClickListener { onSkip() }
binding.endGameButton.setOnClickListener { onEndGame() }
updateScoreText()
updateWordText()
}
/** Methods for button click handlers **/
private fun onSkip() {
viewModel.onSkip()
updateWordText()
updateScoreText()
}
private fun onCorrect() {
viewModel.onCorrect()
updateScoreText()
updateWordText()
}
/** Methods for updating the UI **/
private fun updateWordText() {
binding.wordText.text = viewModel.word
}
private fun updateScoreText() {
binding.scoreText.text = viewModel.score.toString()
}
private fun onEndGame() {
gameFinished()
}
/**
* Called when the game is finished
* ATENCAO: Para entender melhor directions, veja isso aqui:
* https://developer.android.com/guide/navigation/navigation-navigate#groovy
*/
private fun gameFinished() {
toast("Game has just finished")
// bacana para passar actions embora tbm possivel por bundles
val action = GameFragmentDirections.actionGameToScore()
action.score = viewModel.score
// outra maneira de navegar
NavHostFragment.findNavController(this).navigate(action)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/tab/TabTwoFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.tab
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.fragment.app.viewModels
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentTabTwoBinding
class TabTwoFragment(val viewModel: TabViewModel) : Fragment(R.layout.fragment_tab_two) {
private lateinit var binding: FragmentTabTwoBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentTabTwoBinding.bind(view)
// Criar um observador que atualiza a UI
val tabContentObserver = Observer<String> { newContent ->
binding.contentTitle.text = viewModel.getCurrent(newContent)
}
viewModel.getContentObserver().observe(viewLifecycleOwner, tabContentObserver)
binding.btnTabTwo.setOnClickListener {
viewModel.getContentObserver().setValue("Tab Two!")
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/materialcomponents/MaterialComponentsFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.materialcomponents
import android.os.Bundle
import android.text.Editable
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.databinding.FragmentMaterialComponentsBinding
import kotlinx.android.synthetic.main.fragment_material_components.*
class MaterialComponentsFragment : Fragment(R.layout.fragment_material_components) {
private lateinit var binding: FragmentMaterialComponentsBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentMaterialComponentsBinding.bind(view) // inicializa o binding
binding.nextButton.setOnClickListener {
if (!isPasswordValid(password_edit_text.text.toString())) {
password_text_input.error = getString(R.string.shr_error_password)
} else {
password_text_input.error = null
navTo(R.id.productGridFragment)
}
}
// Clear the error once more than 8 characters are typed.
binding.passwordEditText.setOnKeyListener { _, _, _ ->
if (isPasswordValid(password_edit_text.text.toString())) {
password_text_input.error = null
}
false
}
binding.cancelButton.setOnClickListener {
binding.passwordEditText.setText("")
binding.username.setText("")
}
}
private fun isPasswordValid(text: String): Boolean = text.length >= 8
}<file_sep>/CURSO ANDROID UDEMY/KotlinRecylcerView/app/src/main/java/com/example/kotlinrecylcerview/MainActivity.kt
package com.example.kotlinrecylcerview
import android.content.Intent
import android.net.Uri
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import androidx.recyclerview.widget.LinearLayoutManager
import kotlinx.android.synthetic.main.activity_main.*
class MainActivity : AppCompatActivity() {
private lateinit var liveAdapter: LiveAdapter
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
initRecyclerView()
addDataSource()
}
private fun addDataSource() {
val dataSource = DataSource.createDataSet()
this.liveAdapter.setDataSet(dataSource)
}
private fun initRecyclerView() {
this.liveAdapter = LiveAdapter{ live ->
openLink(live.link)
}
recyclerview.apply {
layoutManager = LinearLayoutManager(this@MainActivity)
adapter = liveAdapter
}
//OU PODE FAZER DA FORMA ABAIXO
// recyclerview.layoutManager = LinearLayoutManager(this)
// recyclerview.adapter = this.liveAdapter
}
//Pega link do canal para abrir no click
private fun openLink(url : String) {
val intent = Intent(Intent.ACTION_VIEW, Uri.parse(url))
startActivity(intent)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/animation/PulseExplodeDestinationFragment.kt
package br.com.programadordeelite.gdc.codelab.animation
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentPulseExplodeDestinationBinding
class PulseExplodeDestinationFragment : Fragment(R.layout.fragment_pulse_explode_destination) {
private lateinit var binding: FragmentPulseExplodeDestinationBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentPulseExplodeDestinationBinding.bind(view)
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/GithubRepository.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import androidx.paging.Pager
import androidx.paging.PagingConfig
import androidx.paging.PagingData
import kotlinx.coroutines.flow.Flow
import timber.log.Timber
/**
* Repository class that works with local and remote data sources.
*/
class GithubRepository(private val service: GithubService) {
/**
* Search repositories whose names match the query, exposed as a stream of data that will emit
* every time we get more data from the network.
*/
fun getSearchResultStream(query: String): Flow<PagingData<Repo>> {
Timber.d("GithubRepository New query: $query")
return Pager(
config = PagingConfig(pageSize = NETWORK_PAGE_SIZE, enablePlaceholders = false),
pagingSourceFactory = { GithubPagingSource(service, query) }
).flow
}
companion object {
const val NETWORK_PAGE_SIZE = 50
}
}<file_sep>/white-label-firebase-tutorial-master/app/src/main/java/br/com/douglasmotta/whitelabeltutorial/ui/products/ProductsViewModel.kt
package br.com.douglasmotta.whitelabeltutorial.ui.products
import android.util.Log
import androidx.lifecycle.LiveData
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import br.com.douglasmotta.whitelabeltutorial.config.Config
import br.com.douglasmotta.whitelabeltutorial.domain.model.Product
import br.com.douglasmotta.whitelabeltutorial.domain.usecase.GetProductsUseCase
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.launch
import javax.inject.Inject
@HiltViewModel
class ProductsViewModel @Inject constructor(
private val getProductsUseCase: GetProductsUseCase,
config: Config
) : ViewModel() {
private val _productsData = MutableLiveData<List<Product>>()
val productsData: LiveData<List<Product>> = _productsData
private val _addButtonVisibilityData = MutableLiveData(config.addButtonVisibility)
val addButtonVisibilityData: LiveData<Int> = _addButtonVisibilityData
fun getProducts() = viewModelScope.launch {
try {
val products = getProductsUseCase()
_productsData.value = products
} catch (e: Exception) {
Log.d("ProductsViewModel", e.toString())
}
}
}<file_sep>/AtmConsultoria/app/src/main/java/com/example/atmconsultoria/DetalheClienteActivity.kt
package com.example.atmconsultoria
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import kotlinx.android.synthetic.main.activity_detalhe_cliente.*
import kotlinx.android.synthetic.main.activity_main.*
class DetalheClienteActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_detalhe_cliente)
buton_voltar.setOnClickListener {
val intent = Intent (this, MainActivity::class.java)
startActivity(intent)
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/activityintents/ReplyFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.activityintents
import android.os.Bundle
import android.view.View
import androidx.appcompat.app.AppCompatActivity
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.oldschool.UserNavigationActivity
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.codelab.util.startActivity
import br.com.programadordeelite.gdc.databinding.FragmentReplyBinding
class ReplyFragment : Fragment(R.layout.fragment_reply) {
companion object {
const val SEND = "SEND"
}
private lateinit var binding: FragmentReplyBinding
private var sent: String? = null
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentReplyBinding.bind(view)
binding.buttonReply.setOnClickListener {
val args = Bundle()
args.putString(SendFragment.REPLY,binding.inputReply.text.toString())
navTo(R.id.action_replyFragment_to_sendFragment, args)
//startActivity(UserNavigationActivity::class.java)
}
// obtem a mensagem enviada pelo fragment
arguments?.let {
sent = it.getString(SEND, null)
}
// exibe a mensagem obtida
sent?.let {
binding.textMessage.text = it
}
}
override fun onResume() {
super.onResume()
(activity as AppCompatActivity).supportActionBar?.hide()
}
override fun onStop() {
super.onStop()
(activity as AppCompatActivity).supportActionBar?.show()
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/RecyclerViewPagingFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import android.os.Bundle
import android.view.KeyEvent
import android.view.View
import android.view.inputmethod.EditorInfo
import androidx.fragment.app.Fragment
import androidx.lifecycle.ViewModelProvider
import androidx.lifecycle.lifecycleScope
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentRecyclerViewPagingBinding
import kotlinx.coroutines.Job
import kotlinx.coroutines.flow.distinctUntilChangedBy
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.launch
import androidx.paging.LoadState
import androidx.recyclerview.widget.DividerItemDecoration
import kotlinx.coroutines.flow.collect
// https://developer.android.com/topic/libraries/architecture/paging/v3-overview
// https://developer.android.com/codelabs/android-paging?index=..%2F..%2Findex#4
class RecyclerViewPagingFragment : Fragment(R.layout.fragment_recycler_view_paging) {
private lateinit var binding: FragmentRecyclerViewPagingBinding
private lateinit var viewModel: SearchRepositoriesViewModel
private val adapter = ReposAdapter()
private var searchJob: Job? = null
private fun search(query: String) {
// Make sure we cancel the previous job before creating a new one
searchJob?.cancel()
searchJob = lifecycleScope.launch {
viewModel.searchRepo(query).collect {
adapter.submitData(it)
}
}
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentRecyclerViewPagingBinding.bind(view)
// get the view model
viewModel = ViewModelProvider(this, Injection.provideViewModelFactory()).get(SearchRepositoriesViewModel::class.java)
// add dividers between RecyclerView's row items
val decoration = DividerItemDecoration(requireContext(), DividerItemDecoration.VERTICAL)
binding.list.addItemDecoration(decoration)
initAdapter()
val query = savedInstanceState?.getString(LAST_SEARCH_QUERY) ?: DEFAULT_QUERY
search(query)
initSearch(query)
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
outState.putString(LAST_SEARCH_QUERY, binding.searchRepo.text.trim().toString())
}
private fun initAdapter() {
binding.list.adapter = adapter
}
private fun initSearch(query: String) {
binding.searchRepo.setText(query)
binding.searchRepo.setOnEditorActionListener { _, actionId, _ ->
if (actionId == EditorInfo.IME_ACTION_GO) {
updateRepoListFromInput()
true
} else {
false
}
}
binding.searchRepo.setOnKeyListener { _, keyCode, event ->
if (event.action == KeyEvent.ACTION_DOWN && keyCode == KeyEvent.KEYCODE_ENTER) {
updateRepoListFromInput()
true
} else {
false
}
}
// Scroll to top when the list is refreshed from network.
lifecycleScope.launch {
adapter.loadStateFlow
// Only emit when REFRESH LoadState for RemoteMediator changes.
.distinctUntilChangedBy { it.refresh }
// Only react to cases where Remote REFRESH completes i.e., NotLoading.
.filter { it.refresh is LoadState.NotLoading }
.collect { binding.list.scrollToPosition(0) }
}
}
private fun updateRepoListFromInput() {
binding.searchRepo.text.trim().let {
if (it.isNotEmpty()) {
search(it.toString())
}
}
}
private fun showEmptyList(show: Boolean) {
if (show) {
binding.emptyList.visibility = View.VISIBLE
binding.list.visibility = View.GONE
} else {
binding.emptyList.visibility = View.GONE
binding.list.visibility = View.VISIBLE
}
}
companion object {
private const val LAST_SEARCH_QUERY: String = "last_search_query"
private const val DEFAULT_QUERY = "Android"
}
}
<file_sep>/AppContatos/app/src/main/java/com/example/appcontatos/Contato.kt
package com.example.appcontatos
data class Contato (
val nome: String,
val telefone: String,
val email: String,
val id: Int? = null
)
fun teste() {
val c = Contato("nome", "telefone", "email")
val c2 = c.copy("nome2")
}
<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/ViewModelFactory.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
/**
* Factory for ViewModels
*/
class ViewModelFactory(private val repository: GithubRepository) : ViewModelProvider.Factory {
override fun <T : ViewModel> create(modelClass: Class<T>): T {
if (modelClass.isAssignableFrom(SearchRepositoriesViewModel::class.java)) {
@Suppress("UNCHECKED_CAST")
return SearchRepositoriesViewModel(repository) as T
}
throw IllegalArgumentException("Unknown ViewModel class")
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/SearchRepositoriesViewModel.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import androidx.paging.PagingData
import androidx.paging.cachedIn
import kotlinx.coroutines.flow.Flow
/**
* ViewModel for the [SearchRepositoriesActivity] screen.
* The ViewModel works with the [GithubRepository] to get the data.
*/
class SearchRepositoriesViewModel(private val repository: GithubRepository) : ViewModel() {
private var currentQueryValue: String? = null
private var currentSearchResult: Flow<PagingData<Repo>>? = null
fun searchRepo(queryString: String): Flow<PagingData<Repo>> {
// se ja existir algo no cache para a mesma busca, apenas retorne o resultado
val lastResult = currentSearchResult
if (queryString == currentQueryValue && lastResult != null) return lastResult
currentQueryValue = queryString
val newResult: Flow<PagingData<Repo>> = repository.getSearchResultStream(queryString).cachedIn(viewModelScope)
currentSearchResult = newResult // cache/salve na memory
return newResult
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/Injection.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import androidx.lifecycle.ViewModelProvider
/**
* Class that handles object creation.
* Like this, objects can be passed as parameters in the constructors and then replaced for
* testing, where needed.
*/
object Injection {
/**
* Creates an instance of [GithubRepository] based on the [GithubService] and a
* [GithubLocalCache]
*/
private fun provideGithubRepository(): GithubRepository {
return GithubRepository(GithubService.create())
}
/**
* Provides the [ViewModelProvider.Factory] that is then used to get a reference to
* [ViewModel] objects.
*/
fun provideViewModelFactory(): ViewModelProvider.Factory {
return ViewModelFactory(provideGithubRepository())
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/core/toast/ToastSnakeFragment.kt
package br.com.programadordeelite.gdc.codelab.core.toast
import android.content.pm.ActivityInfo
import android.os.Bundle
import android.view.View
import android.widget.Toast
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.toast
import br.com.programadordeelite.gdc.databinding.FragmentToastSnakeBinding
import com.google.android.material.snackbar.Snackbar
class ToastSnakeFragment : Fragment(R.layout.fragment_toast_snake) {
private lateinit var binding: FragmentToastSnakeBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
requireActivity().requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED
binding = FragmentToastSnakeBinding.bind(view)
binding.toast.setOnClickListener {
val msg = "Minha mensagem para você!"
Toast
.makeText(requireContext(), msg, Toast.LENGTH_SHORT)
.show()
}
// Material design guideline:
// https://material.io/archive/guidelines/components/snackbars-toasts.html#snackbars-toasts-usage
binding.snake.setOnClickListener {
Snackbar.make(view, "Oi Snake", Snackbar.LENGTH_SHORT).show()
}
binding.snakeAction.setOnClickListener {
Snackbar
.make(view, "Snake with Action", Snackbar.LENGTH_SHORT)
.setAction(R.string.ok) { toast("I am a snake!") }
.show()
}
}
}
<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/paging/ReposAdapter.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview.paging
import android.view.ViewGroup
import androidx.paging.PagingDataAdapter
import androidx.recyclerview.widget.DiffUtil
/**
* Adapter for the list of repositories.
*/
class ReposAdapter : PagingDataAdapter<Repo, RepoViewHolder>(REPO_COMPARATOR) {
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): RepoViewHolder {
return RepoViewHolder.create(parent)
}
override fun onBindViewHolder(holder: RepoViewHolder, position: Int) {
val repoItem = getItem(position)
if (repoItem != null) {
holder.bind(repoItem)
}
}
companion object {
private val REPO_COMPARATOR = object : DiffUtil.ItemCallback<Repo>() {
override fun areItemsTheSame(oldItem: Repo, newItem: Repo): Boolean =
oldItem.fullName == newItem.fullName
override fun areContentsTheSame(oldItem: Repo, newItem: Repo): Boolean =
oldItem == newItem
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/debugging/calc/Calculator.kt
package br.com.programadordeelite.gdc.codelab.debugging.calc
class Calculator {
enum class Operator { ADD, SUB, DIV, MUL }
fun add(firstOperand: Double, secondOperand: Double): Double = firstOperand + secondOperand
fun sub(firstOperand: Double, secondOperand: Double): Double = firstOperand - secondOperand
fun div(firstOperand: Double, secondOperand: Double): Double = firstOperand / secondOperand
fun mul(firstOperand: Double, secondOperand: Double): Double = firstOperand * secondOperand
}<file_sep>/aad-main/app/src/androidTest/java/com/progradordeelite/gdc/EspressoUiTest.kt
package com.progradordeelite.gdc
import android.content.Intent
import androidx.test.core.app.launchActivity
import androidx.test.espresso.Espresso
import androidx.test.espresso.accessibility.AccessibilityChecks
import androidx.test.espresso.action.ViewActions
import androidx.test.espresso.assertion.ViewAssertions.matches
import androidx.test.espresso.intent.rule.IntentsTestRule
import androidx.test.espresso.matcher.ViewMatchers.withId
import androidx.test.espresso.matcher.ViewMatchers.withText
import androidx.test.ext.junit.rules.activityScenarioRule
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.platform.app.InstrumentationRegistry
import br.com.programadordeelite.gdc.MainActivity
import br.com.programadordeelite.gdc.R
//import com.google.android.apps.common.testing.accessibility.framework.AccessibilityCheckResult
//import com.google.android.apps.common.testing.accessibility.framework.AccessibilityCheckResultBaseUtils.matchesCheckNames
//import com.google.android.apps.common.testing.accessibility.framework.AccessibilityCheckResultUtils.matchesViews
//import org.hamcrest.Matchers.`is`
//import org.hamcrest.Matchers.allOf
import org.junit.Assert.*
import org.junit.Rule
import org.junit.Test
import org.junit.runner.RunWith
/**
* Instrumented test, which will execute on an Android device.
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
@RunWith(AndroidJUnit4::class)
class EspressoUiTest {
init {
val configurator = AccessibilityChecks.enable()
// configurator.setThrowExceptionFor(AccessibilityCheckResult.AccessibilityCheckResultType.ERROR)
// configurator.setRunChecksFromRootView(true)
// configurator.setSuppressingResultMatcher(
// allOf(
// matchesCheckNames(`is`("TextContrastViewCheck")),
// matchesViews(withId(R.id.btnAdd))
// )
// )
}
@Test
fun testIncrement(){
// launch desired activity - OLD WAY NO CASO DE VC ENCONTRAR ISSO POR AI
IntentsTestRule(MainActivity::class.java).launchActivity(Intent())
Espresso.onView(withId(R.id.codelab_accessibility)).perform(ViewActions.click())
Espresso.onView(withId(R.id.btnAdd)).perform(ViewActions.click())
Espresso.onView(withId(R.id.textAdd)).check(matches(withText("Apertou 1 vez!")))
}
@Test
fun testIncrementNew(){
val scenario = launchActivity<MainActivity>()
Espresso.onView(withId(R.id.codelab_accessibility)).perform(ViewActions.click())
Espresso.onView(withId(R.id.btnAdd)).perform(ViewActions.click())
Espresso.onView(withId(R.id.textAdd)).check(matches(withText("Apertou 1 vez!")))
Espresso.onView(withId(R.id.btnAdd)).perform(ViewActions.click())
Espresso.onView(withId(R.id.textAdd)).check(matches(withText("Apertou 2 vez!")))
scenario.onActivity { activity ->
println("se quiser chamar algum metodo da activity, faz aqui!") // do some stuff with the Activity
// exemplo: activity.onBackPressed()
}
scenario.close() // hightly recomended otherwise activity will remain open
}
@get:Rule
val rule = activityScenarioRule<MainActivity>()
@Test
fun testIncrementNewWithRuleEvenBetterSinceItClosesScenarioAutomatically(){
val scenario = rule.scenario
scenario.onActivity { activity ->
println("se quiser chamar algum metodo da activity, faz aqui!") // do some stuff with the Activity
// exemplo: activity.onBackPressed()
}
Espresso.onView(withId(R.id.codelab_accessibility)).perform(ViewActions.click())
Espresso.onView(withId(R.id.textAdd)).check(matches(withText("Aperta lá no botão!")))
Espresso.onView(withId(R.id.btnAdd)).perform(ViewActions.click())
Espresso.onView(withId(R.id.textAdd)).check(matches(withText("Apertou 1 vez!")))
Espresso.onView(withId(R.id.btnAdd)).perform(ViewActions.click())
Espresso.onView(withId(R.id.textAdd)).check(matches(withText("Apertou 2 vez!")))
Espresso.onView(withId(R.id.btnAdd)).perform(ViewActions.click())
Espresso.onView(withId(R.id.textAdd)).check(matches(withText("Apertou 3 vez!")))
}
@Test
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("br.com.programadordeelite.gdc", appContext.packageName)
}
}<file_sep>/AulaMvvM/app/src/main/java/com/example/aulamvvm/repositories/MainRepository.kt
package com.example.aulamvvm.repositories
import com.example.aulamvvm.rest.RetrofitService
class MainRepository constructor(private val retrofitService: RetrofitService) {
fun getAllLives() = retrofitService.getAllLives()
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/activityintents/SendFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.activityintents
import android.os.Bundle
import android.view.View
import androidx.appcompat.app.AppCompatActivity
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.databinding.FragmentSendBinding
class SendFragment : Fragment(R.layout.fragment_send) {
companion object {
const val REPLY = "REPLY"
}
private lateinit var binding: FragmentSendBinding
private var reply: String? = null
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentSendBinding.bind(view)
binding.buttonSend.setOnClickListener {
val args = Bundle()
args.putString(ReplyFragment.SEND,binding.inputSend.text.toString())
navTo(R.id.action_sendFragment_to_replyFragment, args)
}
// obtem a mensagem enviada pelo fragment
arguments?.let {
reply = it.getString(REPLY, null)
}
// exibe a mensagem obtida
reply?.let {
binding.textMessage.text = it
}
}
override fun onResume() {
super.onResume()
(activity as AppCompatActivity).supportActionBar?.hide()
}
override fun onStop() {
super.onStop()
(activity as AppCompatActivity).supportActionBar?.show()
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/themes/BatteryFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.themes
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentBatteryBinding
class BatteryFragment : Fragment(R.layout.fragment_battery) {
private lateinit var binding: FragmentBatteryBinding
private var imageLevel = 0
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentBatteryBinding.bind(view)
binding.minusButton.setOnClickListener {
--imageLevel
if (imageLevel < 0) imageLevel = 0
// alterar ícones dinâmicamente SUPER SIMPLES
// exibir score de um jogo, quantidade ou barra de vidas
// trocar îcones dependendo da estação do ano etc.
binding.batteryLevelImage.setImageLevel(imageLevel)
}
binding.plusButton.setOnClickListener {
++imageLevel
if (imageLevel > 3) imageLevel = 3
binding.batteryLevelImage.setImageLevel(imageLevel)
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/roomwithview/NewWordFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.roomwithview
import android.os.Bundle
import android.text.TextUtils
import android.view.View
import androidx.core.os.bundleOf
import androidx.fragment.app.Fragment
import androidx.navigation.fragment.findNavController
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentNewWordBinding
// +-----------------------------------------------------------------+
// | TELA QUE CADASTRARÁ NOVAS PALAVRAS NO NOSSO BANCO DE DADOS |
// +-----------------------------------------------------------------+
class NewWordFragment : Fragment(R.layout.fragment_new_word) {
private lateinit var binding: FragmentNewWordBinding
companion object {
const val BUNDLE_KEY_WORD = "word"
const val BUNDLE_REQUEST_KEY = "requestKey"
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentNewWordBinding.bind(view)
binding.buttonSave.setOnClickListener {
// BOM SABER QUE EXISTE ESSA CLASSE UTILITÁRIA PARA MANIPULAR OU VALIDAR STRINGS
if (TextUtils.isEmpty(binding.editWord.text)) {
parentFragmentManager.setFragmentResult(BUNDLE_REQUEST_KEY, bundleOf(BUNDLE_KEY_WORD to "vazio"))
} else {
val word = binding.editWord.text.toString()
parentFragmentManager.setFragmentResult(BUNDLE_REQUEST_KEY, bundleOf(BUNDLE_KEY_WORD to word))
}
// NAVEGA PARA A TELA ANTERIOR - FUNCÃO ANALOGICA AO BOTÃO DE BACK
findNavController().popBackStack()
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/recyclerview/RecyclerViewFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.recyclerview
import android.content.Context
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.view.animation.AccelerateDecelerateInterpolator
import android.view.animation.AnimationUtils
import android.widget.TextView
import androidx.core.content.ContextCompat
import androidx.core.view.isVisible
import androidx.fragment.app.Fragment
import androidx.recyclerview.widget.RecyclerView
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.startAnimation
import br.com.programadordeelite.gdc.databinding.FragmentRecyclerViewBinding
class RecyclerViewFragment : Fragment(R.layout.fragment_recycler_view) {
private lateinit var binding: FragmentRecyclerViewBinding
// 0 Defina seu modelo
private var words = mutableListOf("Ricardo", "Julia", "Pedro", "Alfred")
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentRecyclerViewBinding.bind(view)
// +-----------------------------------------------------------------+
// | Cool explosion animation |
// +-----------------------------------------------------------------+
val anim = AnimationUtils.loadAnimation(requireContext(), R.anim.circle_explosion_anim).apply {
duration = 700
interpolator = AccelerateDecelerateInterpolator()
}
binding = FragmentRecyclerViewBinding.bind(view)
// 3 atribua o adapter ao seu recyclerView
binding.wordRecycler.adapter = WordListAdapter(requireActivity(), words)
binding.addWordButton.setOnClickListener {
binding.root.setBackgroundColor(0) // make sure this is set to re-trigger the anim again
words.add("+ New word " + words.size)
(binding.wordRecycler.adapter as WordListAdapter).notifyItemChanged(words.size)
binding.wordRecycler.smoothScrollToPosition(words.size)
// +-----------------------------------------------------------------+
// | Cool explosion animation |
// +-----------------------------------------------------------------+
binding.animCircle.isVisible = true
binding.animCircle.startAnimation(anim) {
// onEnd call back - do what ever you want here...
// example: change the background color
binding.root.setBackgroundColor(ContextCompat.getColor(requireContext(), R.color.colorPrimary))
}
}
}
// 1 Crie seu viewAdapter
class WordListAdapter(context: Context, val words: MutableList<String>) : RecyclerView.Adapter<WordListAdapter.WordViewHolder>() {
private val inflater: LayoutInflater = LayoutInflater.from(context)
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): WordViewHolder {
val item = inflater.inflate(R.layout.wordlist_item, parent, false)
return WordViewHolder(item, this)
}
// Aqui é onde acontece a "adaptação" do modelo para o layout .xml
override fun onBindViewHolder(holder: WordViewHolder, position: Int) {
holder.wordItemView.text = words[position]
}
override fun getItemCount(): Int {
return words.size
}
// // 2 Defina um viewHolder e implemente o onclick de cada item
inner class WordViewHolder(item: View, private val adapter: WordListAdapter) : RecyclerView.ViewHolder(item), View.OnClickListener {
val wordItemView: TextView = item.findViewById(R.id.word)
init {
wordItemView.setOnClickListener(this)
}
override fun onClick(view: View?) {
words[layoutPosition] = "Clicked! " + words[layoutPosition]
adapter.notifyDataSetChanged()
}
}
}
}<file_sep>/CURSO ANDROID UDEMY/KotlinRecylcerView/app/src/main/java/com/example/kotlinrecylcerview/models/Live.kt
package com.example.kotlinrecylcerview.models
import java.net.URL
data class Live(
var title: String,
var autor: String,
var tumbnailURL: String,
var link: String
)
<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/menu/DatePickerFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.menu
import android.app.DatePickerDialog
import android.app.Dialog
import android.os.Bundle
import android.widget.DatePicker
import androidx.fragment.app.DialogFragment
import java.util.*
class DatePickerFragment(val callback: (result: String) -> Unit) : DialogFragment(), DatePickerDialog.OnDateSetListener{
override fun onCreateDialog(savedInstanceState: Bundle?): Dialog {
val c: Calendar = Calendar.getInstance()
val year: Int = c.get(Calendar.YEAR)
val month: Int = c.get(Calendar.MONTH)
val day: Int = c.get(Calendar.DAY_OF_MONTH)
return DatePickerDialog(requireContext(), this, year, month, day)
}
override fun onDateSet(picker: DatePicker?, year: Int, month: Int, day: Int) {
val monthString = (month + 1).toString()
val dayString = day.toString()
val yearString = year.toString()
callback("$dayString / $monthString / $yearString")
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/usernavigation/tab/TabPagerAdapter2.kt
package br.com.programadordeelite.gdc.codelab.userinterface.usernavigation.tab
import androidx.fragment.app.Fragment
import androidx.recyclerview.widget.RecyclerView
import androidx.viewpager2.adapter.FragmentStateAdapter
import androidx.viewpager2.adapter.FragmentViewHolder
// See doc: https://developer.android.com/training/animation/vp2-migration
class TabPagerAdapter2(private val numOfTabs: Int, val host: TabHostFragment) : FragmentStateAdapter(host) {
override fun getItemCount(): Int = numOfTabs
override fun createFragment(position: Int): Fragment {
return when (position) {
0 -> TabOneFragment(host.getTabViewModel())
1 -> TabTwoFragment(host.getTabViewModel())
2 -> TabThreeFragment(host.getTabViewModel())
else -> TabOneFragment(host.getTabViewModel())
}
}
}<file_sep>/AulaMvvM/settings.gradle
rootProject.name = "AulaMvvM"
include ':app'
<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/customview/CustomViewFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.customview
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.snake
import br.com.programadordeelite.gdc.databinding.FragmentCustomViewBinding
import br.com.programadordeelite.gdc.databinding.FragmentReplyBinding
class CustomViewFragment : Fragment(R.layout.fragment_custom_view) {
private lateinit var binding: FragmentCustomViewBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentCustomViewBinding.bind(view)
binding.pwd.passwordForgottenButton?.setOnClickListener {
snake(it, "Esqueci minha senha!")
}
binding.pwdOutlined.passwordForgottenButton?.setOnClickListener {
snake(it, "Esqueci minha senha!")
}
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/core/workmanager/BlurFragment.kt
package br.com.programadordeelite.gdc.codelab.core.workmanager
import android.content.Intent
import android.content.pm.ActivityInfo
import android.os.Bundle
import android.view.View
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import androidx.work.WorkInfo
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentBlurBinding
import com.bumptech.glide.Glide
class BlurFragment : Fragment(R.layout.fragment_blur) {
private lateinit var viewModel: BlurViewModel
private lateinit var binding: FragmentBlurBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentBlurBinding.bind(view)
// ATUALIZADO: Obtenho o viewmodel
// viewModel = ViewModelProviders.of(this).get(BlurViewModel::class.java)
viewModel = ViewModelProvider(this).get(BlurViewModel::class.java)
// pego a uri da imagem selecionada que passei como argumento na tela anterior
val imageUriExtra = arguments?.getString(KEY_IMAGE_URI)
viewModel.setImageUri(imageUriExtra)
viewModel.imageUri?.let { imageUri ->
Glide.with(this).load(imageUri).into(binding.imageView)
}
binding.goButton.setOnClickListener { viewModel.applyBlur(blurLevel) }
// Setup view output image file button
binding.seeFileButton.setOnClickListener {
viewModel.outputUri?.let { currentUri ->
val actionView = Intent(Intent.ACTION_VIEW, currentUri)
actionView.resolveActivity(requireActivity().packageManager)?.run {
startActivity(actionView)
}
}
}
// Hookup the Cancel button
binding.cancelButton.setOnClickListener { viewModel.cancelWork() }
viewModel.outputWorkInfos.observe(requireActivity(), workInfosObserver())
}
private fun workInfosObserver(): Observer<List<WorkInfo>> {
return Observer { listOfWorkInfo ->
// se nao tiver info, continuar sem fazer nada
if (listOfWorkInfo.isNullOrEmpty()) {
return@Observer
}
// We only care about the one output status.
// Every continuation has only one worker tagged TAG_OUTPUT
val workInfo = listOfWorkInfo[0]
if (workInfo.state.isFinished) {
showWorkFinished() // exibir botão
// Normally this processing, which is not directly related to drawing views on
// screen would be in the ViewModel. For simplicity we are keeping it here.
val outputImageUri = workInfo.outputData.getString(KEY_IMAGE_URI)
// se existir um arquivo com blur, exibir botão "See File"
if (!outputImageUri.isNullOrEmpty()) {
viewModel.setOutputUri(outputImageUri as String)
binding.seeFileButton.visibility = View.VISIBLE
}
} else {
showWorkInProgress() // do contrário exibir loading
}
}
}
// UI control
private fun showWorkInProgress() {
with(binding) {
progressBar.visibility = View.VISIBLE
cancelButton.visibility = View.VISIBLE
goButton.visibility = View.GONE
seeFileButton.visibility = View.GONE
}
}
private fun showWorkFinished() {
with(binding) {
progressBar.visibility = View.GONE
cancelButton.visibility = View.GONE
goButton.visibility = View.VISIBLE
}
}
private val blurLevel: Int
get() =
when (binding.radioBlurGroup.checkedRadioButtonId) {
R.id.radio_blur_lv_1 -> 1
R.id.radio_blur_lv_2 -> 2
R.id.radio_blur_lv_3 -> 3
else -> 1
}
}<file_sep>/CURSO ANDROID UDEMY/KotlinRecylcerView/settings.gradle
rootProject.name = "KotlinRecylcerView"
include ':app'
<file_sep>/AppContatos/settings.gradle
rootProject.name = "AppContatos"
include ':app'
<file_sep>/AulaMvvM/app/src/main/java/com/example/aulamvvm/viewmodel/main/MainViewModel.kt
package com.example.aulamvvm.viewmodel.main
import android.util.Log
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.example.aulamvvm.models.Live
import com.example.aulamvvm.repositories.MainRepository
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
class MainViewModel constructor(private val repository: MainRepository) : ViewModel() {
val liveList = MutableLiveData<List<Live>>()
val errorMessage = MutableLiveData<String>()
fun getAllLives() {
val request = repository.getAllLives()
request.enqueue(object : Callback<List<Live>> {
override fun onResponse(call: Call<List<Live>>, response: Response<List<Live>>) {
//QNDO HOUVER UMA RESPOSTA
Log.i("Taina", "onResponse")
liveList.postValue(response.body())
}
override fun onFailure(call: Call<List<Live>>, t: Throwable) {
//qndo houver uma falha
errorMessage.postValue(t.message)
}
})
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/datamanagement/appsettings/AppSettingsFragment.kt
package br.com.programadordeelite.gdc.codelab.datamanagement.appsettings
import android.content.SharedPreferences
import android.os.Bundle
import android.view.*
import android.widget.Toast
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.codelab.util.navTo
import br.com.programadordeelite.gdc.codelab.util.toast
import br.com.programadordeelite.gdc.databinding.FragmentAppSettingsBinding
class AppSettingsFragment : Fragment(R.layout.fragment_app_settings) {
lateinit var binding: FragmentAppSettingsBinding
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentAppSettingsBinding.bind(view)
setHasOptionsMenu(true) // IMPORTANTE
}
override fun onCreateOptionsMenu(menu: Menu, inflater: MenuInflater) {
inflater.inflate(R.menu.menu_settings, menu)
super.onCreateOptionsMenu(menu, inflater)
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
when (item.itemId) {
R.id.action_settings -> {
navTo(R.id.settingsFragment)
return true
}
else -> Unit
}
return super.onOptionsItemSelected(item)
}
override fun onResume() {
super.onResume()
val sharedPref = androidx.preference.PreferenceManager.getDefaultSharedPreferences(requireContext())
val assinatura = sharedPref.getString(KEY_PREF_SIGNATURE, "!") ?: ""
if(assinatura.isNotEmpty()) toast(assinatura)
val resposta = sharedPref.getString(KEY_PREF_REPLY, "!") ?: ""
if(resposta.isNotEmpty()) toast(resposta)
val sincronizacao = sharedPref.getBoolean(KEY_PREF_SYNC, false)
if(sincronizacao) toast("sincronizacao ativada")
val anexos = sharedPref.getBoolean(KEY_PREF_ATTACHMENT, false)
if(anexos) toast("anexos ativados")
}
}<file_sep>/aad-main/settings.gradle
include ':app'
rootProject.name = "gdc"<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/interactiveui/InteractiveUiFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.interactiveui
import android.content.pm.ActivityInfo
import android.os.Bundle
import android.view.View
import android.widget.Toast
import androidx.fragment.app.Fragment
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentInteractiveUiBinding
class InteractiveUiFragment : Fragment(R.layout.fragment_interactive_ui) {
private lateinit var binding: FragmentInteractiveUiBinding
private var count = 0
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
requireActivity().requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentInteractiveUiBinding.bind(view)
binding.showCount.text = count.toString()
binding.buttonToast.setOnClickListener { showToast() }
binding.buttonCount.setOnClickListener { countUp(); showCounter() }
}
private fun showToast() = Toast.makeText(requireContext(), R.string.toast_message, Toast.LENGTH_SHORT).show()
private fun countUp() = count++
private fun showCounter() {
binding.showCount.text = count.toString()
}
}<file_sep>/aad-main/app/src/main/java/br/com/programadordeelite/gdc/codelab/userinterface/accessibility/AccessibilityFragment.kt
package br.com.programadordeelite.gdc.codelab.userinterface.accessibility
import android.os.Bundle
import androidx.fragment.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import br.com.programadordeelite.gdc.R
import br.com.programadordeelite.gdc.databinding.FragmentAccessibilityBinding
import br.com.programadordeelite.gdc.databinding.FragmentInteractiveUiBinding
class AccessibilityFragment : Fragment(R.layout.fragment_accessibility) {
private lateinit var binding: FragmentAccessibilityBinding
private var count = 0
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
binding = FragmentAccessibilityBinding.bind(view)
binding.btnAdd.setOnClickListener {
count++
binding.textAdd.text = "Apertou $count vez!"
}
}
} | addb5876d5dc74123177acee1b1c730d1ca505e6 | [
"Markdown",
"Kotlin",
"Gradle"
] | 86 | Kotlin | TainaRegina/Netflix | c295296c16e5c86368d3698bd038e836c37e266c | b05f126dbcd939e55af7c8aa53ba42ff233e0026 | |
refs/heads/master | <repo_name>i0tool5/zshtemplate<file_sep>/zshrc
# -------------------
# Set up the prompt
# -------------------
autoload -Uz colors && colors
colors
PS1="%{$fg_bold[cyan]%}﹝%{$reset_color%}%{$fg_bold[blue]%}%~%{$reset_color%}%{$fg_bold[cyan]%}﹞%{$reset_color%}%{$fg_bold[white]%}▹%{$reset_color%}%{$fg[red]%}▷ "
RPROMPT="⚫%{$reset_color%}%{$bg[black]%}%{$fg[white]%}%M%{$reset_color%}"
# -------------------
# Set optinons
# -------------------
setopt histignorealldups sharehistory menucomplete correctall
# -------------------
# Use emacs keybindings even if our EDITOR is set to vi
# -------------------
bindkey -e
# -------------------
# Keep 500 lines of history within the shell and save it to ~/.zsh_history:
# -------------------
HISTSIZE=500
SAVEHIST=500
HISTFILE=~/.zsh_history
# -------------------
# Use modern completion system
# -------------------
autoload -U compinit && compinit
zstyle ':completion:*' auto-description 'specify: %d'
zstyle ':completion:*:warnings' format '%BSorry, no matches for: %d%b'
zstyle ':completion:*' format 'Completing %d'
zstyle ':completion:*' group-name ''
zstyle ':completion:*' menu select=1 _complete _ignored _approximate _correct
eval "$(dircolors -b)"
zstyle ':completion:*:default' list-colors ${(s.:.)LS_COLORS}
zstyle ':completion:*' list-colors ''
zstyle ':completion:*' list-prompt %SAt %p: Hit TAB for more, or the character to insert%s
zstyle ':completion:*' matcher-list '' 'm:{a-z}={A-Z}' 'm:{a-zA-Z}={A-Za-z}' 'r:|[._-]=* r:|=* l:|=*'
zstyle ':completion:*' verbose true
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([0-9]#)*=0=01;31'
zstyle ':completion:*:kill:*' command 'ps -u $USER -o pid,%cpu,tty,cputime,cmd'
# -------------------
# Help command
# -------------------
autoload -U run-help
autoload run-help-git
autoload run-help-svn
autoload run-help-svk
alias help=run-help
# -------------------
# Exports
# -------------------
export EDITOR="vim"
# -------------------
# Window title
# -------------------
case $TERM in
termite|*xterm*|rxvt|rxvt-unicode|rxvt-256color|rxvt-unicode-256color|(dt|k|E)term)
precmd () {
print -Pn "\e]0;%n@%M %~%#\a"
}
preexec () { print -Pn "\e]0;%~%# ($1)\a" }
;;
screen|screen-256color)
precmd () {
vcs_info
print -Pn "\e]83;title \"$1\"\a"
print -Pn "\e]0;$TERM - (%L) [%n@%M]%# [%~]\a"
}
preexec () {
print -Pn "\e]83;title \"$1\"\a"
print -Pn "\e]0;$TERM - (%L) [%n@%M]%# [%~] ($1)\a"
}
;;
esac
# Aliases
alias ls='ls --color=auto'
alias grep='grep --color=auto'
alias ttime='/usr/bin/time'
| 024530949f836a12164cd045695d10ae0f6411b5 | [
"Shell"
] | 1 | Shell | i0tool5/zshtemplate | 15f4f16487d90416e114d1bfb20c30bda0c0ace2 | f46ff899eb7bf1f7361beb0827b1aab232581cc9 | |
refs/heads/master | <repo_name>camthompson/key-example<file_sep>/app/components/todo-list.js
import Ember from 'ember';
export default Ember.Component.extend({
tagName: 'ul',
actions: {
complete(todo) {
this.get('todos').removeObject(todo);
}
}
});
| b8991e8080fb959661ae0fcaba3af475338daf8c | [
"JavaScript"
] | 1 | JavaScript | camthompson/key-example | 36065f4f8c54e9f7e57c386ed808661b72617ddb | 2eb5c45fea7793593ffa9844f6ab56e2f2426bd6 | |
refs/heads/master | <repo_name>ShvedAction/RailsTimeTraker<file_sep>/test/integration/time_flow_test.rb
require 'test_helper'
class TimeFlowTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
setup do
@track_hash_example = {
start_time: "2016-11-18T14:56:08+03:00",
total_time: 3000,
tag: "some tage",
work_type: 'developing', #link to type work
}
end
test "create temp user when try add track" do
assert_difference ->{User.count}, 1, "user count should increase" do
assert_difference ->{Track.count}, 1, "track count should increase" do
open_session do |sess|
sess.extend(UserSessionHelper)
sess.add_track @track_hash_example
assert sess.current_user.temporary?, "user should be temp"
end
end
end
end
test "in one session a track should create for current user" do
user_session = build_some_user
#get model from this session
user = user_session.current_user
assert_difference ->{user.tracks.count}, 1, "tracks count of user should increase by 1" do
user_session.add_track @track_hash_example
end
end
test "after sign up, user should be auth" do
login = "new_login"
password = "<PASSWORD>"
#Given:
user_with_login_should_not_exist login, "Given error:"
user_session = build_session_for_user login, password
user_session.sign_up
#Verify that user is auth, passes inside next method. See defenition
user_session.should_success_sign_up
user_session.should_out_login_in_tag 'span'
end
private
def build_some_user login="some_login", password="<PASSWORD>"
#Given:
user_with_login_should_not_exist login, "Given error:"
user = build_session_for_user login, password
user.sign_up
#follow redirect in this method
user.should_success_sign_up
return user
end
end
<file_sep>/test/user_session_helper.rb
module UserSessionHelper
def set_login_password login, password
@login = login
@password = <PASSWORD>
end
def sign_up
post "/users", params: {user: {login: @login, password: <PASSWORD>, password_confirmation: <PASSWORD>}}
end
def log_in
post '/user/log_in', params: {user: {login: @login, password: <PASSWORD>}}
end
def add_track track_hash
post "/tracks", params: {track: track_hash}
end
def should_success_logining message=""
assert !!User.find_by(login: @login), "#{message} For success logining, user should be in database."
assert_equal current_user.id, controller.session[:current_user_id], "#{message} For success logining, session should have user_id."
end
def should_success_sign_up message=""
follow_redirect!
should_success_logining message
end
def should_out_login_in_tag tag_name, message=""
assert_select tag_name, @login, message
end
def current_user
@current_user ||= User.find session[:current_user_id]
end
end
<file_sep>/test/test_helper.rb
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
require 'user_session_helper'
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Add more helper methods to be used by all tests here...
def user_with_login_should_exist login, message = ""
assert !!User.find_by(login: login), "#{message} User with login: #{login} should exist."
end
def user_with_login_should_not_exist login, message = ""
assert_nil User.find_by(login: login), "#{message} User with login: #{login} should not exist."
end
def build_session_for_user login, password, description_case = ""
open_session do |sess|
sess.extend(UserSessionHelper)
sess.set_login_password login, password
end
end
end
<file_sep>/app/controllers/tracks_controller.rb
class TracksController < ApplicationController
before_action :set_track, only: [:show, :edit, :update, :destroy]
before_action :set_user, except: :create
before_action :set_current_user, only: [:update, :destroy, :create]
before_action :chenge_only_my_tracks, only: [:update, :destroy]
# GET /users/1/tracks
# GET /users/1/tracks.json
def index
@tracks = @user.tracks.all
end
# GET /users/1/tracks/1
# GET /users/1/tracks/1.json
def show
end
# GET /users/1/tracks/new
def new
@track = Track.new
end
# GET /users/1/tracks/1/edit
def edit
end
# POST /users/1/tracks
# POST /users/1/tracks.json
def create
@track = @current_user.tracks.new(track_params)
respond_to do |format|
if @track.save
format.html { redirect_to @track, notice: 'Track was successfully created.' }
format.json { render :show, status: :created, location: @track }
else
format.html { render :new }
format.json { render json: @track.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /users/1/tracks/1
# PATCH/PUT /users/1/tracks/1.json
def update
respond_to do |format|
if @track.update(track_params)
format.html { redirect_to @track, notice: 'Track was successfully updated.' }
format.json { render :show, status: :ok, location: @track }
else
format.html { render :edit }
format.json { render json: @track.errors, status: :unprocessable_entity }
end
end
end
# DELETE /users/1/tracks/1
# DELETE /users/1/tracks/1.json
def destroy
@track.destroy
respond_to do |format|
format.html { redirect_to user_tracks_url @user, notice: 'Track was successfully destroyed.' }
format.json { head :no_content }
end
end
def track_url track
user_tracks_url track.user
end
private
# Use callbacks to share common setup or constraints between actions.
def set_track
@track = Track.find(params[:id])
end
def set_user
@user = User.find params[:user_id]
end
def set_current_user
if session[:current_user_id]
@current_user = User.find session[:current_user_id]
else
@current_user = User.create_temp
session[:current_user_id] = @current_user.id
end
end
def chenge_only_my_tracks
redirect_to user_tracks_url @user if !@current_user && @current_user.id != @user.id
end
# Never trust parameters from the scary internet, only allow the white list through.
def track_params
params.require(:track).permit(:start_time, :end_time, :total_time, :tag, :work_type, :user_id)
end
end
<file_sep>/test/models/user_test.rb
require 'test_helper'
class UserTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
test "should not save if confirmation_password does not mach password" do
login = "login_for_incorrect_password"
#Given:
assert_nil User.find_by(login: login), "Given error: User with login #{login} should not exist."
new_user = User.new login: login, password: "<PASSWORD>", password_confirmation:"<PASSWORD>"
#When:
assert_raises ActiveRecord::RecordInvalid, "User with incorrect confirmation password have saved." do
new_user.registration!
end
#Then:
assert_nil User.find_by(login: login), "User with incorrect confirmation password have found."
end
test "should save if confirmation_password mach password" do
login = "no_exist_login"
password = "<PASSWORD>"
#Given:
assert_nil User.find_by(login: login), "Given error: User with login #{login} should not exist."
new_user = User.new login: login, password: <PASSWORD>, password_confirmation: <PASSWORD>
#When:
assert_difference 'User.count', 1, "User with correct confirmation password should saved." do
new_user.registration!
end
#Then:
assert !!User.find_by(login: login), "User with correct confirmation password should found."
end
test "should save without password if his auth_type is temp_user" do
temp_user = User.new auth_type: 'temporary'
#When:
assert_difference 'User.count', 1, "Temp user should saved." do
temp_user.save
end
#Then:
refute_empty temp_user.login, "Temp user should have unic login"
end
test "on new User should auth_type init with deffault auth_type is registred" do
new_user = User.new
assert new_user.registred?, "default auth_type should be registred"
end
test "after registration new user, class method 'log_in' should return user if hash of password and login match record in DB" do
login = "model_try_login"
password = "<PASSWORD> <PASSWORD>"
#Given:
assert_nil User.find_by(login: login), "Given error: User with login #{login} should not exist."
user = User.new login: login, password: <PASSWORD>, password_confirmation: <PASSWORD>
user.registration!
#When:
logining_user = User.log_in({login: login, password: <PASSWORD>})
#Then:
assert !!logining_user, "retunred value from method log_in should not be nil"
assert !!logining_user.id, "id user should not be nil"
assert_equal login, logining_user.login
end
test "class method log_in shoul return nil if wrong password" do
login = "model_try_login"
true_password = "<PASSWORD>"
wron_password = "<PASSWORD>"
#Given:
assert_nil User.find_by(login: login), "Given error: User with login #{login} should not exist."
user = User.new login: login, password: <PASSWORD>, password_confirmation: <PASSWORD>
user.registration!
#When:
logining_user = User.log_in({login: login, password: <PASSWORD>})
#Then:
assert_nil logining_user
end
test "class method create_temp should save user with no empty login" do
assert_difference -> {User.count}, 1, "user count should increase" do
temp_user = User.create_temp
refute_empty temp_user.login, "login of temp user should not be empty"
end
end
end
<file_sep>/config/routes.rb
Rails.application.routes.draw do
resources :users do
resources :tracks, except: :create
end
resources :tracks, only: :create
post "user/log_in" => "users#log_in", as: :user_log_in, defaults: {login: nil, password: nil}
end
<file_sep>/app/views/tracks/_track.json.jbuilder
json.extract! track, :id, :start_time, :end_time, :total_time, :tag, :work_type, :user_id, :created_at, :updated_at
json.url track_url(track, format: :json)<file_sep>/test/controllers/tracks_controller_test.rb
require 'test_helper'
class TracksControllerTest < ActionDispatch::IntegrationTest
setup do
@track = tracks(:one)
@user = users(:one)
end
test "should get index" do
get user_tracks_url(@track.user, @track)
assert_response :success
end
test "should get new for current user" do
get new_user_track_url(@user)
assert_response :success
end
test "should get edit" do
get edit_user_track_url(@track.user, @track)
assert_response :success
end
test "should update track" do
patch user_track_url(@track.user, @track), params: { track: { end_time: @track.end_time, start_time: @track.start_time, tag: @track.tag, total_time: @track.total_time, user_id: @track.user_id, work_type: @track.work_type } }
assert_redirected_to user_tracks_url(@user)
end
test "should destroy track" do
assert_difference('Track.count', -1) do
delete user_track_url(@track.user, @track)
end
follow_redirect!
end
end
<file_sep>/app/models/user.rb
require 'securerandom'
class User < ApplicationRecord
has_many :tracks
enum auth_type: [:registred, :temporary]
validates :password, confirmation: true, if: :registred?
after_initialize :set_default_values
before_save :genrate_unic_login, if: :temporary?
def set_default_values
self.auth_type ||= 'registred'
end
def genrate_unic_login
self.login ||= SecureRandom.uuid.gsub('-','')
end
def registration!
self.password = <PASSWORD>(password)
self.password_confirmation = <PASSWORD>.password_encrypt_method(password_confirmation)
#save
self.registred!
end
def self.log_in params
return User.find_by login: params[:login], password: <PASSWORD>(params[:password])
end
def self.create_temp
return User.create auth_type: 'temporary'
end
protected
def self.password_encrypt_method password
return Digest::MD5.hexdigest password
end
end
<file_sep>/test/controllers/users_controller_test.rb
require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
USER_LOGIN_ALLREADY_USED = 'allready_used_login'
PASSWORD_OF_ALLREADY_USED_USER = '<PASSWORD>'
setup do
@user = User.create login: USER_LOGIN_ALLREADY_USED, password: password_encrypt_method(PASSWORD_OF_ALLREADY_USED_USER)
end
def password_encrypt_method pass
return Digest::MD5.hexdigest pass
end
#общий сценрий при котором пользователь не создаётся
def user_should_not_create_with login, pass, confirm_pass
assert_no_difference('User.count')do
post users_url, params: {
user: {
login: login,
password: <PASSWORD>,
password_confirmation: <PASSWORD>
}
}
end
end
#Для демонстрации
test "should get index" do
get users_url
assert_response :success
end
test "should get new" do
get new_user_url
assert_response :success
end
test "should create user if confirmation match with password" do
login = 'login_for_new_user'
password = '<PASSWORD>'
#Given:
assert_nil User.find_by(login: login), "Given error: User with login:#{login} should not exist."
#When:
assert_difference('User.count') do
post users_url, params: {
user: {
login: login,
password: <PASSWORD>,
password_confirmation: <PASSWORD>
}
}
end
#Then:
created_user = User.last
assert_redirected_to user_url(created_user)
assert_equal login, created_user.login
end
test "should not create user if confirmation does not match with password" do
login = 'login_for_new_user_bad_confirm'
password = '<PASSWORD>'
pass_confirmation = '<PASSWORD>'
#Given:
assert_nil User.find_by(login: login), "Given error: User with login:#{login} should not exist."
#When:
user_should_not_create_with login, password, pass_confirmation
end
test "should not create user if login allready exist" do
login = USER_LOGIN_ALLREADY_USED
password = '<PASSWORD>'
pass_confirmation = '<PASSWORD>'
#Given:
assert !!User.find_by(login: login), "Given error: User with login:#{login} should exist."
#When:
user_should_not_create_with login, password, pass_confirmation
end
test "should show user" do
#Given:
assert !!@user, "Given error: User with login:#{@user.login} should exist."
get user_url(@user)
assert_response :success
end
test "after log in session should had user_id" do
login = USER_LOGIN_ALLREADY_USED
password = <PASSWORD>
password_hash = <PASSWORD>
#Given:
user = User.find_by(login: login)
assert !!user, "Given error: User with login:#{login} should exist."
assert_equal password_hash, user.password, "Given error: User with login:#{login} should have correct hash of password."
#When:
post user_log_in_url, params: {user: {login: login, password: <PASSWORD>}}
#Then:
assert_response :success
assert_equal user.id, session[:current_user_id]
end
end
<file_sep>/app/models/track.rb
class Track < ApplicationRecord
belongs_to :user
enum :work_type => [:designing, :developing, :analis, :testing, :training]
end
| 48b562beb548ef2796ea580497cd3520733b1bd2 | [
"Ruby"
] | 11 | Ruby | ShvedAction/RailsTimeTraker | 96e88c480561f29f23a757b4a956fb91525ab71f | 93572b7751008015893ba3112b63a3a7ef16e721 | |
refs/heads/master | <repo_name>maraghuram/Codechef-Solutions<file_sep>/HELPLIRA.cpp
#include <iostream>
#include <cstdio>
#include <cstdlib>
#include <limits>
#include <cfloat>
using namespace std;
const double epsilon=0.01;
double modDouble(double x)
{
if(x<0) return -x;
return x;
}
int main()
{
int N;
scanf("%d",&N);
double minArea,maxArea;
int minIndex,maxIndex;
minArea=DBL_MAX;
maxArea=DBL_MIN;
for(int i=1;i<=N;++i)
{
int x1,x2,x3,y1,y2,y3;
double area=0.0;
scanf("%d %d %d %d %d %d",&x1,&y1,&x2,&y2,&x3,&y3);
area=0.5*modDouble((((double)(x1-x3)*(y2-y1))-((x1-x2)*(y3-y1))));
if(area<=minArea) minArea=area,minIndex=i;
if(area>=maxArea) maxArea=area,maxIndex=i;
}
printf("%d %d\n",minIndex,maxIndex);
return 0;
}
<file_sep>/PNTNG.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int ll;
typedef pair<int,ll> pil;
int main(){
ll N,M,H;
scanf("%lld %lld %lld",&N,&M,&H);
ll res=0;
ll sqrs = N*M;
vector<pil> paint(H);
for(int i=0;i<H;++i) scanf("%lld %d",&paint[i].second,&paint[i].first);
sort(paint.begin(),paint.end());
for(int i=0;i<H;++i){
ll get = min( sqrs, paint[i].second );
res = res + (get*(ll) paint[i].first);
sqrs = sqrs - get;
if( sqrs == 0 ) break;
}
if( sqrs != 0 ) printf("Impossible\n");
else printf("%lld\n",res);
return 0;
}
<file_sep>/HEADBOB.cpp
#include <bits/stdc++.h>
#define NMAX 1111
using namespace std;
char buf[NMAX];
int main(){
int T;
scanf("%d",&T);
for(int t=0;t<T;++t){
int n,flag=0;
scanf("%d\n",&n);
gets(buf);
for(int i=0;i<n;++i){
if(buf[i]=='Y'){
flag=1; break;
}
else if(buf[i]=='I'){
flag=2; break;
}
}
if(flag==1) puts("NOT INDIAN");
else if(flag==2) puts("INDIAN");
else puts("NOT SURE");
}
return 0;
}
<file_sep>/ANUGCD.cpp
#include <bits/stdc++.h>
#define NMAX 1000010
#define NLIM 1000000
#define pb push_back
#define sz size()
#define L(x) (x<<1)
#define R(x) ((x<<1)+1)
#define ALL(x) ( x.begin(),x.end() )
using namespace std;
struct node{
int val,freq;
node(){
val=freq=-1;
}
node(int x,int y){
val=x,freq=y;
}
};
vector<node> trees[NMAX];
vector<int> primes,primeFactors[NMAX],primesList[NMAX];
int flag[NMAX],N,Q,input[NMAX];
void factorise(){
memset(flag,0,sizeof(flag));
for(int i=2;i<=NLIM;++i){
if(flag[i]==0){
primes.pb(i);
primeFactors[i].pb(i);
for(int j=i<<1;j<=NLIM;j+=i) {
flag[j]=1;
primeFactors[j].pb(i);
}
}
}
}
node merge(node y,node z){
node x;
if(y.val>z.val) x=node(y.val,y.freq);
else if(z.val>y.val) x=node(z.val,z.freq);
else x=node(y.val,y.freq+z.freq);
if(x.val==-1) x.freq=-1;
return x;
}
void build(int t,int index,int i,int j){
if(i==j) {
trees[t][index]=node(input[primesList[t][i-1]-1],1);
return;
}
int mid=(i+j)>>1;
build(t,L(index),i,mid);
build(t,R(index),mid+1,j);
trees[t][index]=merge(trees[t][L(index)],trees[t][R(index)]);
}
node query(int t,int index,int i,int j,int left,int right){
if(i==left && j==right) return trees[t][index];
if(left>right || i>right || j<left) return node(-1,-1);
int mid=(i+j)>>1;
if(right<=mid) return query(t,L(index),i,mid,left,right);
else if(left>mid) return query(t,R(index),mid+1,j,left,right);
else return merge(query(t,L(index),i,mid,left,mid),query(t,R(index),mid+1,j,mid+1,right));
}
void printTree(int t,int index,int i,int j){
if(i==j){
printf("%d,%d - %d %d\n",i,j,trees[t][index].val,trees[t][index].freq);
return;
}
int mid=(i+j)>>1;
printTree(t,L(index),i,mid);
printTree(t,R(index),mid+1,j);
printf("%d,%d - %d %d\n",i,j,trees[t][index].val,trees[t][index].freq);
}
void solve1();
int main(){
solve1();
return 0;
}
void solve1(){
factorise();
scanf("%d %d",&N,&Q);
for(int i=0;i<N;++i){
scanf("%d",&input[i]);
for(int j=0;j<primeFactors[input[i]].sz;++j) primesList[primeFactors[input[i]][j]].pb(i+1);
}
for(int i=0;i<primes.sz;++i)
if(primesList[primes[i]].sz>0){
trees[primes[i]]=vector<node>(primesList[primes[i]].sz<<3);
build(primes[i],1,1,primesList[primes[i]].sz);
//printf("%d\n",primes[i]);
//printTree(primes[i],1,1,primesList[primes[i]].sz);
}
for(int i=0;i<Q;++i){
int g,x,y;
scanf("%d %d %d",&g,&x,&y);
node res;
for(int j=0;j<primeFactors[g].sz;++j){
int l,r;
if(primesList[primeFactors[g][j]].sz==0) continue;
l=lower_bound(primesList[primeFactors[g][j]].begin(),primesList[primeFactors[g][j]].end(),x)-primesList[primeFactors[g][j]].begin();
r=upper_bound(primesList[primeFactors[g][j]].begin(),primesList[primeFactors[g][j]].end(),y)-primesList[primeFactors[g][j]].begin();
//printf("-->%d,%d - %d %d\n",x,y,l,r);
if(l>r-1) continue;
++l,++r;
node temp=query(primeFactors[g][j],1,1,primesList[primeFactors[g][j]].sz,l,r-1);
if(temp.val>res.val) res=temp;
}
printf("%d %d\n",res.val,res.freq);
}
}
<file_sep>/CLPERM.cpp
// author noob333
// H + Shift C + B + E + B + E
#include <bits/stdc++.h>
#define NMAX 55
//#define DEBUG 1
using namespace std;
// Input macros
#define s(n) scanf("%d",&n)
#define sc(n) scanf("%c",&n)
#define sl(n) scanf("%lld",&n)
#define sf(n) scanf("%lf",&n)
#define ss(n) scanf("%s",n)
// Useful constants
#define INF (int)1e9
#define EPS 1e-9
// Useful hardware instructions
#define bitcount __builtin_popcount
#define gcd __gcd
// Useful container manipulation / traversal macros
#define REP(i,a,b) for(int i=a;i<b;i++)
#define RREP(i,a,b) for(int i=a;i>b;i--)
#define foreach(v, c) for( typeof( (c).begin()) v = (c).begin(); v != (c).end(); ++v)
#define all(a) a.begin(), a.end()
#define in(a,b) ( (b).find(a) != (b).end())
#define pb push_back
#define fill(a,v) memset(a, v, sizeof a)
#define sz(a) ((int)(a.size()))
#define mp make_pair
#define fi first
#define se second
// Some common useful functions
#define maX(a,b) ( (a) > (b) ? (a) : (b))
#define miN(a,b) ( (a) < (b) ? (a) : (b))
#define checkbit(n,b) ( (n >> b) & 1)
#define DREP(a) sort(all(a)); a.erase(unique(all(a)),a.end())
#define INDEX(arr,ind) (lower_bound(all(arr),ind)-arr.begin())
typedef long long ll;
typedef vector<int> vi;
typedef vector<vector<int> > vvi;
typedef vector<ll> vl;
typedef pair<int, int> ii;
typedef vector<ii> vii;
typedef set<int> si;
typedef map<string, int> msi;
#ifdef DEBUG
#define debug(args...) {dbg,args; cerr<<endl;}
#else
#define debug(args...) // Just strip off all debug tokens
#endif
struct debugger
{
template<typename T> debugger& operator , (const T& v)
{
cerr<<v<<" ";
return *this;
}
} dbg;
template <typename T1, typename T2>
inline std::ostream& operator << (std::ostream& os, const std::pair<T1, T2>& p)
{
return os << "(" << p.first << ", " << p.second << ")";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::vector<T>& v)
{
bool first = true;
os << "[";
for(unsigned int i = 0; i < v.size(); i++)
{
if(!first)
os << ", ";
os << v[i];
first = false;
}
return os << "]";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::set<T>& v)
{
bool first = true;
os << "[";
for (typename std::set<T>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii;
first = false;
}
return os << "]";
}
template<typename T1, typename T2>
inline std::ostream &operator << (std::ostream & os,const std::map<T1, T2>& v)
{
bool first = true;
os << "[";
for (typename std::map<T1, T2>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii ;
first = false;
}
return os << "]";
}
inline ll ssum(int x){
return (((ll)x)*(x+1))/2;
}
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
while(T--){
int n,k;
cin >> n >> k;
vi a(k+2);
a[0] = 0; a[k+1]=n+1;
REP(i,1,k+1) cin >> a[i];
sort(all(a));
ll sum = 0;
REP(i,1,k+2){
int st = a[i-1]+1;
int en = a[i]-1;
if(st>sum+1 && st<=en) break;
sum += ssum(en)-ssum(st-1);
}
if((sum+1)%2) cout << "Chef"<< endl;
else cout << "Mom" << endl;
}
return 0;
}
<file_sep>/LELEMON.cpp
#include <cstdio>
#include <vector>
#include <algorithm>
typedef long long int x64;
using namespace std;
vector<vector<int> > input;
vector<int> visits;
int N,M;
inline void fastRead_int(int &x) {
register int c = getchar();
x = 0;
int neg = 0;
for(; ((c<48 || c>57) && c != '-'); c = getchar());
if(c=='-') {
neg = 1;
c = getchar();
}
for(; c>47 && c<58 ; c = getchar()) {
x = (x<<1) + (x<<3) + c - 48;
}
if(neg)
x = -x;
}
x64 solve()
{
x64 res=0;
for(int i=0;i<N;++i)
sort(input[i].begin(),input[i].end());
for(int k=0;k<M;++k)
{
if(input[visits[k]].size()>0)
{
res+=input[visits[k]][input[visits[k]].size()-1];
input[visits[k]].pop_back();
}
}
return res;
}
int main()
{
int T;
fastRead_int(T);
while(T>0)
{
fastRead_int(N);
fastRead_int(M);
input=vector<vector<int> >(N,vector<int>(0));
visits=vector<int>(M,0);
for(int i=0;i<M;++i) fastRead_int(visits[i]);
for(int i=0;i<N;++i)
{
int lim;
fastRead_int(lim);
for(int j=0;j<lim;++j)
{
int V;
fastRead_int(V);
input[i].push_back(V);
}
}
printf("%lld\n",solve());
--T;
}
return 0;
}
<file_sep>/CHEFCIRC.cpp
#include <bits/stdc++.h>
#define NMAX 502
#define eps 0.00001
using namespace std;
typedef pair<double,int> pdi;
inline double sq(double x){
return x*x;
}
double X[NMAX], Y[NMAX];
int N,M;
void solve(){
// double minX,minY,maxX,maxY;
// minX=X[0], maxX=X[0];
// minY=Y[0], maxY=Y[0];
// for(int i=1;i<N;++i){
// minX = min(minX,X[i]);
// maxX = max(maxX,X[i]);
// minY = min(minY,Y[i]);
// maxY = max(maxY,Y[i]);
// }
double maxRadius = 707106.000;
double minRadius = 0.0;
while(maxRadius-minRadius > eps){
double checkRadius = (maxRadius + minRadius)/2;
int maxInside = 0;
//cout << maxRadius << " " << minRadius << " " << checkRadius << endl;
for(int i=0;i<N;++i){
double x1=X[i], y1=Y[i];
vector<pdi> events;
//cout << x1 << "," << y1 << endl;
for(int j=0;j<N;++j){
if(i==j) continue;
double x2=X[j], y2=Y[j];
double d = sqrt(sq(x1-x2) + sq(y1-y2))/2;
if(d>checkRadius) continue;
double delta = atan2(y2-y1,x2-x1);
double phi = acos(d/checkRadius);
//cout << x2 << "," << y2 << " " << delta << " " << phi << endl;
events.push_back(pdi(delta-phi,-1));
events.push_back(pdi(delta+phi,+1));
}
sort(events.begin(),events.end());
int currentCount, maxCount;
currentCount = maxCount = 0;
for(int i=0;i<events.size();++i){
if(events[i].second == -1) ++currentCount;
else --currentCount;
maxCount = max(maxCount, currentCount);
//cout << events[i].first << ":" << events[i].second << " " << currentCount << endl;
}
maxCount++;
maxInside = max(maxInside, maxCount);
//cout << "maxOn:" << maxCount << endl;
}
//cout << "maxInside: " << maxInside << endl;
if(maxInside<M) minRadius = checkRadius + 0.01;
else maxRadius = checkRadius;
}
cout << maxRadius << endl;
}
int main(){
ios::sync_with_stdio(false);
cin >> N >> M;
for(int i=0;i<N;++i)
cin >> X[i] >> Y[i];
solve();
return 0;
}
<file_sep>/REN2013K.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int LL;
int main(){
int T;
scanf("%d",&T);
while(T--){
LL x,y,r,res;
scanf("%lld %lld",&x,&y);
r=(max(x,y)-min(x,y))+1;
if(r%2==0){
res=r-1;
r=r/2;
res=res*r;
}
else {
res=r;
res=res*((r-1)/2);
}
printf("%lld\n",res);
}
}
<file_sep>/SEATSR.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define inf 1111111
using namespace std;
time_t start=clock();
int aCost,bCost;
char buffer[NMAX];
int editDistance(string a,string b,int k){
if(a.size() < b.size()) swap(a,b);
if(aCost==0) return 0;
//else if(bCost==0)
//return (a.size()-b.size()<=k?a.size()-b.size():-1);
int sz=b.size()+1;
//sz=max(sz,k+1);
vector<int> pre(sz,inf),dis(sz,inf);
for(int i = 0 ; i <= b.size(); i++){
pre[i] = aCost*i;
}
for(int j=1 ; j <= a.size(); j++) {
dis[0] = j*aCost;
int minIdx = max(j - k, 1);
int maxIdx = min(j + k, (int)b.size());
if(minIdx > 1){
dis[minIdx -1] = inf;
}
int minVal = inf;
for(int i = minIdx ; i <= maxIdx ; i++){
if(a[j-1] == b[i-1]){
dis[i] = pre[i-1];
} else {
dis[i] = min(pre[i]+aCost, min(dis[i-1]+aCost, pre[i-1]+bCost));
}
if(dis[i] < minVal) minVal = dis[i];
}
swap(pre,dis);
if(minVal > k) break;
}
if(pre[b.size()]<=k) return pre[b.size()];
return -1;
}
int main(){
int t;
scanf("%d",&t);
while(t--){
string x,y;
int k;
scanf("%s",buffer);
x=string(buffer);
scanf("%s",buffer);
y=string(buffer);
scanf("%d %d %d",&aCost,&bCost,&k);
//assert(!(k<aCost && k<bCost));
int res=editDistance(x,y,k);
if(res!=inf)
printf("%d\n",res);
else printf("-1\n");
}
//printf("%.6f",((double)clock()-start)/CLOCKS_PER_SEC);
return 0;
}
<file_sep>/FGFS.cpp
#include <bits/stdc++.h>
#define pb push_back
#define x first
#define y second
using namespace std;
typedef pair<int,int> pi;
typedef pair<pi,int> pii;
vector<pii> input;
bool comp(const pii &f,const pii &s){
if(f.y<s.y) return true;
if(f.y==s.y) return f.x.y<s.x.y;
return false;
}
int main(){
int T;
scanf("%d",&T);
while(T--){
int res=0;
input.clear();
int N,K;
scanf("%d %d",&N,&K);
for(int i=0;i<N;++i){
int s,f,p;
scanf("%d %d %d",&s,&f,&p);
input.pb(pii(pi(s,f),p));
}
sort(input.begin(),input.end(),comp);
int comp,prev,curr;
prev=-1;
for(int i=0;i<N;++i){
comp=input[i].y;
if(comp!=prev){
++res;
prev=comp;
curr=i;
continue;
}
if(input[i].x.x>=input[curr].x.y) ++res,curr=i;
prev=comp;
}
printf("%d\n",res);
}
return 0;
}
<file_sep>/CAOS2.cpp
#include <cstdio>
#include <cstring>
#include <climits>
#include <iostream>
using namespace std;
const int MAX=502;
char array[MAX][MAX];
int dp[MAX][MAX],prime[MAX];
int R,C;
void solve()
{
int counter=0;
for(int i=0;i<R;++i)
for(int j=0;j<C;++j)
dp[i][j]=INT_MAX;
for(int i=0;i<R;++i)
{
counter=0;
for(int j=0;j<C;++j)
{
if(array[i][j]=='#')
{
counter=0;
dp[i][j]=0;
continue;
}
dp[i][j]=min(dp[i][j],counter);
++counter;
}
}
for(int i=0;i<R;++i)
{
counter=0;
for(int j=C-1;j>=0;--j)
{
if(array[i][j]=='#')
{
counter=0;
dp[i][j]=0;
continue;
}
dp[i][j]=min(dp[i][j],counter);
++counter;
}
}
for(int j=0;j<C;++j)
{
counter=0;
for(int i=R-1;i>=0;--i)
{
if(array[i][j]=='#')
{
counter=0;
dp[i][j]=0;
continue;
}
dp[i][j]=min(dp[i][j],counter);
++counter;
}
}
for(int j=0;j<C;++j)
{
counter=0;
for(int i=0;i<R;++i)
{
if(array[i][j]=='#')
{
counter=0;
dp[i][j]=0;
continue;
}
dp[i][j]=min(dp[i][j],counter);
++counter;
}
}
int result=0;
for(int i=0;i<R;++i)
for(int j=0;j<C;++j)
result+=prime[dp[i][j]];
printf("%d\n",result);
}
int main()
{
int T;
for(int i = 2; i <= 500; i++) prime[i] = 1;
for(int i = 2; i <= 500; i++)
if(prime[i])
{
for(int j = i + i; j <= 500; j += i)
prime[j] = 0;
}
for(int i = 2; i <= 500; i++) prime[i] += prime[i - 1];
scanf("%d",&T);
while(T>0)
{
scanf("%d %d",&R,&C);
for(int i=0;i<R;++i)
scanf("%s",array[i]);
//printf("%d\n",solve());
solve();
--T;
}
return 0;
}
<file_sep>/DGCD.cpp
#include <bits/stdc++.h>
#define NMAX 55555
#define _abs(x) ( x>0?x:-x )
using namespace std;
int N,Q;
int input[NMAX];
int gcd(int a,int b){
if(b==0) return _abs(a);
return gcd(b,a%b);
}
// Begin Segment Tree Definition
struct node{
node *l, *r;
int diffG, lazy;
int first, last;
node(int a=0,int b=0,int c=0,int d=0,node *e=0,node *f=0){
first=a;last=b;diffG=c;lazy=d;l=e;r=f;
}
node(node *x){
first=x->first; last=x->last;
diffG=x->diffG; lazy=x->lazy;
l=x->l;r=x->r;
}
};
struct segTree{
node *root;
vector<int> A;
int sz;
int value;
segTree(vector<int> _A){
A = vector<int>(_A);
sz = A.size();
root = build(1,sz);
}
void update(int st,int en,int val){
value=val;
root=update(root,1,sz,st,en);
}
int query(int st,int en){
node *tmp=query(root,1,sz,st,en);
return gcd(tmp->first,tmp->diffG);
}
node* modify(node *x,int val){
x->first += val; x->last += val;
x->lazy += val;
return x;
}
node* push(node *x){
if(x->l){
if(x->lazy){
modify(x->l,x->lazy);
modify(x->r,x->lazy);
x->lazy=0;
}
}
}
void merge(node *x, node*y, node *z){
z->first=x->first;z->last=y->last;
z->diffG = gcd(x->diffG,gcd(x->last-y->first,y->diffG));
}
node* build(int i,int j){
node *tmp=new node();
if(i==j){
modify(tmp,A[i-1]);
return tmp;
}
int mid=(i+j)>>1;
tmp->l=build(i,mid);
tmp->r=build(mid+1,j);
merge(tmp->l,tmp->r,tmp);
return tmp;
}
node* update(node *root,int i,int j,int st,int en){
//node *tmp = new node(root->first,root->last,root->diffG,root->lazy,root->l,root->r);
node *tmp = new node(root);
if(i==st && j==en){
modify(tmp,value);
return tmp;
//modify(root,value);
//return root;
}
push(root); tmp->lazy=0;// push(tmp);
int mid=(i+j)>>1;
if(en<=mid) tmp->l = update(root->l,i,mid,st,en);
else if(st>mid) tmp->r = update(root->r,mid+1,j,st,en);
else{
tmp->l = update(root->l,i,mid,st,mid);
tmp->r = update(root->r,mid+1,j,mid+1,en);
}
merge(tmp->l,tmp->r,tmp);
return tmp;
}
node* query(node *root,int i,int j,int st,int en){
if(i==st && j==en) return root;
push(root);
int mid=(i+j)>>1;
if(en<=mid) return query(root->l,i,mid,st,en);
else if(st>mid) return query(root->r,mid+1,j,st,en);
else{
node *tmp=new node();
merge(query(root->l,i,mid,st,mid),query(root->r,mid+1,j,mid+1,en),tmp);
return tmp;
}
}
void debug(){
// cout << "----------------" << endl;
// _print(root,1,sz);
// cout << "----------------" << endl;
}
void _print(node *root,int i,int j){
if(i==j){
printf("%d,%d - %d %d %d %d\n",i,j,root->first,root->last,root->diffG,root->lazy);
return;
}
int mid=(i+j)>>1;
_print(root->l,i,mid);
_print(root->r,mid+1,j);
printf("%d,%d - %d %d %d %d\n",i,j,root->first,root->last,root->diffG,root->lazy);
}
};
// End of Segment Tree Definition
// -----------------------------------------------------------------------------------
// Begin LCA Functions
const int L = 19;
int p[NMAX][L];
int _log;
int ti[NMAX], to[NMAX], _tm;
int h[NMAX];
void buildLCA(){
for(_log=0; 1<<_log<N; ++_log);
++_log;
_tm = 0;
}
bool upper(int v, int u){
return v==-1 || u!=-1 && ti[v]<ti[u] && to[v]>to[u];
}
int lca(int v, int u){
if(v==u || upper(v,u)) return v;
if(upper(u,v)) return u;
for(int i=_log-1;i>=0;--i) if(!upper(p[v][i], u)) v = p[v][i];
return p[v][0];
}
int nextDownTo(int from, int downto){
for(int i=_log-1;i>=0;--i) if(upper(from, p[downto][i])) downto = p[downto][i];
return downto;
}
// End LCA Functions
//---------------------------------------------------------------------------------
// Begin Graph Routines
vector<int> adj[NMAX];
int subt[NMAX];
inline void addEdge(int u,int v){
adj[u].push_back(v);
adj[v].push_back(u);
}
int dfs(int u,int par){
ti[u] = ++_tm;
p[u][0] = par;
h[u] = (par==-1 ? 0 : h[par]+1);
subt[u]=1;
for(int i=1;i<_log;++i)
p[u][i] = (p[u][i-1]==-1 ? -1: p[p[u][i-1]][i-1]);
for(int i=0;i<adj[u].size();++i){
int v=adj[u][i];
if(v!=par) subt[u]+=dfs(v,u);
}
to[u] = ++_tm;
return subt[u];
}
// End Graph Routines
//--------------------------------------------------------------------------
// Begin HLD Functions
vector<segTree*> trees;
vector<int> chain[NMAX];
int chId[NMAX], chPos[NMAX],chHd[NMAX];
int chNum;
int q[NMAX], qn;
int hld(int u,int p){
if(chain[chNum].size()==0) chHd[chNum]=u;
chain[chNum].push_back(input[u]);
chId[u]=chNum; chPos[u]=chain[chNum].size();
int most,ind;
most = 0; ind = -1;
for(int i=0;i<adj[u].size();++i){
int v=adj[u][i];
if(v!=p && subt[v]>most) most=subt[v], ind=v;
}
if(ind!=-1) hld(ind,u);
for(int i=0;i<adj[u].size();++i){
int v=adj[u][i];
if(v!=ind && v!=p){
++chNum;
hld(v,u);
}
}
}
void getPath(int from, int to){
qn=0;
for(;;){
int fi = chId[from];
int end = chHd[fi];
if(fi==chId[to]) end = to;
q[qn++]=fi;
q[qn++]=chPos[end];
q[qn++]=chPos[from];
if(end==to) break;
from = p[end][0];
}
}
void buildSegTrees(){
for(int i=0;i<=chNum;++i){
segTree *tmp = new segTree(chain[i]);
trees.push_back(tmp);
tmp->debug();
}
}
void changePath(int u,int p,int val){
getPath(u,p);
for(int i=0;i<qn;i+=3){
int id=q[i];
trees[id]->update(q[i+1],q[i+2],val);
}
}
int queryPath(int u,int p){
int res=0;
getPath(u,p);
for(int i=0;i<qn;i+=3)
res = gcd(res, trees[q[i]]->query(q[i+1],q[i+2]));
return res;
}
// End HLD functions
//-------------------------------------------------------------------------
// Solve Problem
void solve(){
buildLCA();
dfs(1,-1);
hld(1,-1);
buildSegTrees();
char buf[3];
int u,v,val,lc;
for(int i=0;i<Q;++i){
scanf("%s %d %d",buf,&u,&v);
++u;++v;
lc = lca(u,v);
if(buf[0]=='F')
printf("%d\n",gcd(queryPath(u,lc),queryPath(v,lc)));
else{
scanf("%d",&val);
changePath(u,lc,val);
if(v!=lc) changePath(v,nextDownTo(lc,v),val);
}
// for(int i=0;i<=chNum;++i){
// cout << i << endl;
// trees[i]->debug();
// }
}
}
int main(){
int u,v;
scanf("%d",&N);
for(int i=0;i<N-1;++i){
scanf("%d %d",&u,&v);
++u;++v;
addEdge(u,v);
}
for(int i=0;i<N;++i) scanf("%d",&input[i+1]);
scanf("%d",&Q);
solve();
return 0;
}
<file_sep>/BUY1GET1.c
#include <stdio.h>
int alpha[52];
void setZero();
main()
{
int T,i,j,cost=0;
char str[101][201] ;
scanf("%d",&T);
for(i=0;i<T;++i)
scanf("%s",str[i]);
for(i=0;i<T;++i)
{
setZero();
cost=0;
for( j=0;str[i][j]!='\0';++j)
{
if(str[i][j]<91)
++alpha[str[i][j]-65];
else
++alpha[str[i][j]-71];
}
for( j=0;j<52;++j)
{ cost+=(alpha[j]/2+alpha[j]%2);
}
printf("\n%d",cost);
}
return 0;
}
void setZero()
{
int i;
for(i=0;i<52;++i)
alpha[i]=0;
}
<file_sep>/PROSUM.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
typedef long long int LL;
LL input[NMAX];
int main(){
LL T;
scanf("%lld",&T);
while(T--){
LL N,zeroes,ones,twos;
LL res=0;
scanf("%lld",&N);
zeroes=ones=twos=0;
for(LL i=0;i<N;++i){
scanf("%lld",&input[i]);
if(input[i]==0) ++zeroes;
else if(input[i]==1) ++ones;
else if(input[i]==2) ++twos;
}
res=N*(N-1)>>1;
for(LL i=0;i<zeroes;++i) res-=(N-i-1);
N=N-zeroes;
for(LL i=0;i<ones;++i) res-=(N-i-1);
res-=(twos*(twos-1)>>1);
printf("%lld\n",res);
}
return 0;
}
<file_sep>/COMPILER.cpp
#include <bits/stdc++.h>
#define NMAX 1111111
using namespace std;
char str[NMAX];
int main(){
int T;
scanf("%d",&T);
while(T--){
scanf("%s",str);
int len=strlen(str);
stack<int> t;
int match=0;
for(int i=0;i<len;++i){
if(str[i]=='<') t.push(1);
else if(str[i]=='>'){
if(t.empty()) break;
t.pop();
}
if(t.empty()) match=i+1;
}
printf("%d\n",match);
}
return 0;
}
<file_sep>/RECTQUER.cpp
//============================================================================
// Name : RECTQUER.cpp
// Author :
// Version :
// Copyright : Your copyright notice
// Description : Hello World in C++, Ansi-style
//============================================================================
#include <cstdio>
#include <cstring>
using namespace std;
#define NMAX 333
int input[NMAX][NMAX],state[NMAX][NMAX][10];
int main(){
int N;
scanf("%d",&N);
memset(state,0,sizeof(state));
for(int i=0;i<N;++i)
for(int j=0;j<N;++j){
scanf("%d",&input[i][j]);
if(i>0) for(int k=0;k<10;++k) state[i][j][k]+=state[i-1][j][k];
if(j>0) for(int k=0;k<10;++k) state[i][j][k]+=state[i][j-1][k];
if(i>0 && j>0) for(int k=0;k<10;++k) state[i][j][k]-=state[i-1][j-1][k];
state[i][j][input[i][j]-1]+=1;
}
/* printf("\n STATE SPACE");
for(int i=0;i<N;++i)
for(int j=0;j<N;++j)
{
printf("\n @ %d,%d - ",i,j);
for(int k=0;k<10;++k) printf(" %d",state[i][j][k]);
}
*/
int Q;
scanf("%d",&Q);
for(int i=0;i<Q;++i){
int x1,y1,x2,y2,res;
scanf("%d %d %d %d",&x1,&y1,&x2,&y2);
--x1;
--x2;
--y1;
--y2;
res=0;
for(int i=0;i<10;++i){
int ans=state[x2][y2][i];
if(x1>0) ans-=state[x1-1][y2][i];
if(y1>0) ans-=state[x2][y1-1][i];
if(x1>0 && y1>0) ans+=state[x1-1][y1-1][i];
if(ans>0) ++res;
}
printf("%d\n",res);
}
return 0;
}
<file_sep>/FOURSQ.cpp
#include <bits/stdc++.h>
#define NMAX 1000000
#define SMAX 100000
#define L(x) (x<<1)
#define R(x) ((x<<1)|1)
using namespace std;
typedef long long int ll;
typedef unsigned long long int ull;
const ull ten11 = 100000000000LL;
const ull ten10 = 10000000000LL;
const ull ten9 = 1000000000LL;
const int ten3 = 1000;
const int ten6 = 1000000;
vector<int> squares;
vector<int> splits[NMAX+1];
int sqRoot[NMAX+1], input[SMAX+1], fourSquares[NMAX+1];
ll MOD;
int check(int x,int depth){
if(x>NMAX) assert(false);
if(depth>3) return 0;
if(fourSquares[x] == 1 ) return 1;
int pos = upper_bound(squares.begin(),squares.end(),x)-squares.begin()-1;
if(squares[pos] == x){
fourSquares[x] = 1;
splits[x].push_back(x);
return 1;
}
while(pos>=0){
if(check(x-squares[pos], depth+1)){
if(splits[x-squares[pos]].size()<=3){
for(int i=0;i<splits[x-squares[pos]].size();++i)
splits[x].push_back(splits[x-squares[pos]][i]);
splits[x].push_back(squares[pos]);
//splits[x].push_back(squares[pos]);
//splits[x].push_back(x-squares[pos]);
fourSquares[x] = 1;
return 1;
}
}
--pos;
}
//assert(false);
return 0;
}
struct node{
ll a,b,c,d;
node(): a(0), b(0), c(0), d(0) {};
node(ll _a, ll _b, ll _c, ll _d): a(_a), b(_b), c(_c), d(_d) {};
}tree[SMAX<<3];
inline ull mulmod(ull A, ull B)
{
register int x1,y1,x2,y2;
register ull res;
x1 = A/ten3;
y1 = (A-(x1*ten3));
x2 = B/ten3;
y2 = (B-(x2*ten3));
res = ((ull)x1*x2);
if(res>=MOD) res %= MOD;
res *= ten6;
ull tmp = ((ull)x1*y2*ten3) + ((ull)x2*y1*ten3) + (y1*y2);
res += tmp;
if(res>=MOD) res %= MOD;
// register ull x1,y1,res;
// register ull x2,x3,y2,y3;
// x3 = A/ten11;
// x2 = (A-(x3*ten11))/ten10;
// x1 = (A-(x3*ten11)-(x2*ten10));
// y3 = B/ten11;
// y2 = (B-(y3*ten11))/ten10;
// y1 = (B-(y3*ten11)-(y2*ten10));
// res = (x1*y1);
// if(res>=MOD) res %= MOD;
// ull tmp = (y1*ten9);
// if(tmp>=MOD) tmp %= MOD;
// tmp *= (10*x2+100*x3);
// if(tmp>=MOD) tmp %= MOD;
// res += tmp;
// if(res>=MOD) res -= MOD;
// res += ((y1*ten9))*(10*x2+100*x3);
// res += ((x1*ten9))*(10*y2+100*y3);
//res += ((x2+10*x3)*y1*1000);
//res += ((y2+10*y3)*x1*1000)%MOD;
//res += ((((x1*y2 + y1*x2 + 10*x1*y3 + 10*y1*x3)*1000)%MOD)*10000000LL)%MOD;
// //res += ((((((x2*y2 + 10*x2*y3 + 10*x3*y2 + 100*x3*y3)*ten10)%MOD)*100000)%MOD)*100000)%MOD;
// res %= MOD;
// if ( A == 0 || B == 0 ) return 0;
// if ( 128-__builtin_clzll(A)-__builtin_clzll(B) <= 64 ) return (A*B)%MOD;
// ull u = multiply(A, B>>1);
// ull res = 0;
// u <<= 1;
// res += u;
// if ( B&1 )
// res += A;
//res %= MOD;
return res;
}
// ull hand_multiply(ull a, ull b) {
// if(b == 0 || b<=10000000) {
// return (a*b)%MOD;
// }
// else {
// ll result = (hand_multiply(a, b/10)*10)%MOD;
// ll lastDigit = b%10;
// result = (result+((a*lastDigit)))%MOD;
// return result;
// }
// }
//long long mulmod(long long A, long long B){
// if(A == 0 || LLONG_MAX/A >= B)
// return (A*B)%MOD;
//return multiply(max(A,B), min(A,B));
//}
// inline ll mulmod(ll a, ll b) {
// if(a == | LLONG_MAX/a >= b)
// return (a*b)%MOD;
// ll res = 0;
// while (a != 0) {
// if (a & 1) res = (res + b) % MOD;
// a >>= 1;
// b = (b << 1) % MOD;
// }
// return res;
// }
inline node splitToNode(int x){
vector<int> splitX = splits[x];
if(splitX.size()==1) return node(sqRoot[splitX[0]], 0, 0, 0);
else if(splitX.size()==2) return node(sqRoot[splitX[0]], sqRoot[splitX[1]], 0, 0);
else if(splitX.size()==3) return node(sqRoot[splitX[0]], sqRoot[splitX[1]], sqRoot[splitX[2]], 0);
else return node(sqRoot[splitX[0]], sqRoot[splitX[1]], sqRoot[splitX[2]], sqRoot[splitX[3]]);
}
inline node merge(node x, node y){
register ll a,b,c,d;
a = mulmod(x.a,y.a);
a += mulmod(x.b,y.b); if(a>=MOD) a-= MOD;
a += mulmod(x.c,y.c); if(a>=MOD) a-= MOD;
a += mulmod(x.d,y.d); if(a>=MOD) a-= MOD;
b = mulmod(x.a,y.b);
b -= mulmod(x.b,y.a); if(b<0) b += MOD;
b += mulmod(x.c,y.d); if(b>=MOD) b -= MOD;
b -= mulmod(x.d,y.c); if(b<0) b += MOD;
c = mulmod(x.a,y.c);
c -= mulmod(x.c,y.a); if(c<0) c += MOD;
c += mulmod(x.d,y.b); if(c>=MOD) c -= MOD;
c -= mulmod(x.b,y.d); if(c<0) c += MOD;
d = mulmod(x.a,y.d);
d -= mulmod(x.d,y.a); if(d<0) d += MOD;
d += mulmod(x.b,y.c); if(d>=MOD) d -= MOD;
d -= mulmod(x.c,y.b); if(d<0) d += MOD;
// ll a = mulmod(x.a,y.a) + mulmod(x.b,y.b) + mulmod(x.c,y.c) + mulmod(x.d,y.d);
// ll b = mulmod(x.a,y.b) - mulmod(x.b,y.a) + mulmod(x.c,y.d) - mulmod(x.d,y.c);
// ll c = mulmod(x.a,y.c) - mulmod(x.c,y.a) + mulmod(x.d,y.b) - mulmod(x.b,y.d);
// ll d = mulmod(x.a,y.d) - mulmod(x.d,y.a) + mulmod(x.b,y.c) - mulmod(x.c,y.b);
// a = a%MOD;
// if(a<0) a+= MOD;
// b = b%MOD;
// if(b<0) b+= MOD;
// c = c%MOD;
// if(c<0) c+= MOD;
// d = d%MOD;
// if(d<0) d+= MOD;
return node(a,b,c,d);
}
void init(int idx, int i, int j){
if(i==j){
tree[idx] = splitToNode(input[i-1]);
return;
}
int mid = (i+j)>>1;
init(L(idx),i,mid);
init(R(idx),mid+1,j);
tree[idx] = merge(tree[L(idx)],tree[R(idx)]);
}
void update(int idx, int i, int j, int pos, int Y){
if(i==j && i==pos){
tree[idx] = splitToNode(Y);
return;
}
int mid=(i+j)>>1;
if(pos<=mid) update(L(idx),i,mid,pos,Y);
else update(R(idx),mid+1,j,pos,Y);
tree[idx] = merge(tree[L(idx)],tree[R(idx)]);
}
node query(int idx, int i, int j, int left, int right){
if(i==left && j==right) return tree[idx];
int mid = (i+j)>>1;
if(right<=mid) return query(L(idx),i,mid,left,right);
else if(left>mid) return query(R(idx),mid+1,j,left,right);
else{
return merge(query(L(idx),i,mid,left,mid)
,query(R(idx),mid+1,j,mid+1,right));
}
}
int main(){
//ios::sync_with_stdio(false);
//clock_t start = clock();
for(int a=0;a*a<=NMAX;++a){
squares.push_back(a*a);
sqRoot[a*a]=a;
}
for(int i=0;i<=NMAX;++i){
check(i,0);
// int s=0;
// for(int j=0;j<splits[i].size();++j)
// s += splits[i][j];
// if(splits[i].size()==0 || splits[i].size()>4 || s != i)
// assert(false);
}
int T;
//cin >> T;
scanf("%d",&T);
while(T--){
int N,Q;
//cin >> N >> Q >> MOD;
scanf("%d %d %lld", &N, &Q, &MOD);
for(int i=0;i<N;++i) scanf("%d", &input[i]);
// cin >> input[i];
//for(int i=0;i<N;++i) check(input[i], 0);
init(1,1,N);
for(int i=0;i<Q;++i){
int type, u, v;
//cin >> type >> u >> v;
scanf("%d %d %d", &type, &u, &v);
if(type == 1){
//check(v, 0);
update(1,1,N,u,v);
}
else{
node res = query(1,1,N,u,v);
//cout << res.a << " " << res.b << " " << res.c << " " << res.d << endl;
printf("%lld %lld %lld %lld\n", res.a, res.b, res.c, res.d);
}
}
}
//clock_t stop = clock();
//cout << (stop - start)*1.0 / CLOCKS_PER_SEC << endl;
return 0;
}
<file_sep>/README.md
## Codechef solutions
Solutions to some of the problems on [Codechef](http://www.codechef.com).
(\<problem-code>.cpp)
I intend to add detailed solutions to some of the problems [here](http://maraghuram.github.io).
<file_sep>/RETPO.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int ll;
inline ll abs_(ll x){
if( x>0 ) return x;
return -x;
}
int main(){
int T;
scanf("%d",&T);
while(T--){
ll x,y,res=0;
scanf("%lld %lld",&x,&y);
x=abs_(x);
y=abs_(y);
if( x==y )res=x+y;
else if(x>y){
res = y+y;
res += (((x-y-1)/2)*4)+1-((x-y)%2)+3;
}
else{
res = x+x;
res += (((y-x)/2)*4)+(y-x)%2;
}
printf("%lld\n",res);
}
return 0;
}
<file_sep>/FROGV.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
typedef long long int ll;
ll A[NMAX];
int rank[NMAX],maxInd[NMAX];
inline void swap(int &x,int &y){
int t;
t=x,x=y,y=t;
}
int main(){
int N,P;
ll K;
vector<ll> B;
scanf("%d %lld %d",&N,&K,&P);
for(int i=0;i<N;++i){
scanf("%lld",&A[i]);
B.push_back(A[i]);
}
sort(B.begin(),B.end());
B.erase(unique(B.begin(),B.end()),B.end());
for(int i=0;i<N;++i){
rank[i]= lower_bound(B.begin(),B.end(),A[i])-B.begin();
}
int best=B.size()-1;
maxInd[best]=best;
for(int i=B.size()-2;i>=0;--i){
if(B[i+1]-B[i]>K) best=i;
maxInd[i]=best;
//cout<<B[i]<<"<<- "<<maxInd[i]<<endl;
}
for(int i=0;i<P;++i){
int u,v;
scanf("%d %d",&u,&v);
--u,--v;
int x,y;
if(rank[u]<rank[v]) x=u,y=v;
else x=v,y=u;
if(rank[y]<=maxInd[rank[x]]) printf("Yes\n");
else printf("No\n");
}
return 0;
}
<file_sep>/SPCANDY.cpp
#include <cstdio>
typedef long long int x64;
int main()
{
int T;
scanf("%d",&T);
while(T>0)
{
x64 N,K;
scanf("%lld %lld",&N,&K);
if(K==0)
printf("%d %lld\n",0,N);
else
printf("%lld %lld\n",N/K,N%K);
--T;
}
return 0;
}
<file_sep>/PLZLYKME.cpp
#include <bits/stdc++.h>
using namespace std;
typedef unsigned long long int LL;
int main(){
int T;
scanf("%d",&T);
while(T--){
LL L,D,S,C;
scanf("%llu %llu %llu %llu",&L,&D,&S,&C);
int flag=0;
LL res=S;
LL days=1;
while(1){
if(res>=L){
flag=1;
break;
}
++days;
if(days>D) break;
res=res*(C+1);
}
if(flag) printf("ALIVE AND KICKING\n");
else printf("DEAD AND ROTTING\n");
}
return 0;
}
<file_sep>/MARBLEGF.cpp
#include <cstdio>
#include <cstring>
#define NMAX 1111111
typedef long long int LL;
LL tree[NMAX];
void update(int idx,LL val){
for(int i=idx;i<NMAX;i+=(i&-i))
tree[i]+=val;
}
LL read(int idx){
LL val=0;
for(int i=idx;i>0;i-=(i&-i))
val+=tree[i];
return val;
}
int main(){
int N,Q;
memset(tree,0,sizeof(tree));
scanf("%d %d",&N,&Q);
for(int i=0;i<N;++i){
int in;
scanf("%d",&in);
update(i+1,in);
}
for(int i=0;i<Q;++i){
char type[3];
int u,v;
scanf("%s %d %d",type,&u,&v);
switch(type[0]){
case 'S' : ++u,++v;
printf("%lld\n",u>1?read(v)-read(u-1):read(v));
break;
case 'G' : ++u;
update(u,v);
break;
case 'T' : ++u;
update(u,-v);
break;
}
}
return 0;
}
<file_sep>/DOWNLOAD.cpp
// By noob.
#include <cstdio>
#include <cmath>
#include <cstring>
#include <cstdlib>
#include <ctime>
#include <iostream>
#include <fstream>
#include <sstream>
#include <algorithm>
#include <string>
#include <vector>
#include <set>
#include <map>
#include <list>
#include <complex>
#pragma comment(linker, "/STACK:266777216")
using namespace std;
#define assert(f) { if(!(f)) { fprintf(stderr,"Assertion failed: "); fprintf(stderr,#f); fprintf(stderr,"\n"); exit(1); } }
typedef long long LL;
typedef unsigned long long ULL;
typedef vector<int> VI;
typedef vector<VI> VVI;
typedef pair<int,int> PII;
typedef vector<PII> VPII;
typedef vector<double> VD;
typedef pair<double,double> PDD;
const int inf=1000000000;
const LL INF=LL(inf)*inf;
const double eps=1e-9;
const double PI=2*acos(0.0);
#define bit(n) (1<<(n))
#define bit64(n) ((LL(1))<<(n))
#define pb push_back
#define sz size()
#define mp make_pair
#define cl clear()
#define all(a) (a).begin(),(a).end()
#define fill(ar,val) memset((ar),(val),sizeof (ar))
#define MIN(a,b) {if((a)>(b)) (a)=(b);}
#define MAX(a,b) {if((a)<(b)) (a)=(b);}
#define sqr(x) ((x)*(x))
#define X first
#define Y second
clock_t start=clock();
#define N 330000
#define G 5010
int x[N],y[N],tree[N];
VI aliens[G];
int previous[G],result[G],m;
void update (int i, int delta) {
for (; i < m; i |= i + 1)
tree[i] += delta;
}
int read(int r)
{
int res = 0;
for (; r >= 0; r = (r & (r + 1)) - 1)
res += tree[r];
return res;
}
int main()
{
#ifdef OFFLINE_JUDGE
freopen("1.in","r",stdin);
#endif
#ifdef WRITE_JUDGE
freopen("1.out","w",stdout);
#endif
int n,q;
scanf("%d",&n);
for(int i=0;i<n;++i) scanf("%d %d",&x[i],&y[i]);
scanf("%d",&q);
for(int i=0;i<q;++i)
{
int k;
scanf("%d",&k);
aliens[i].resize(k);
for(int j=0;j<k;++j) scanf("%d",&aliens[i][j]);
}
//Mix everything and compress (mix masala)
VI mixture;
vector<pair<int,pair<int,int> > > queue;
mixture.clear();
queue.clear();
for(int i=0;i<n;++i)
{
mixture.pb(x[i]);
mixture.pb(y[i]);
}
for(int i=0;i<q;++i)
for(int j=0;j<aliens[i].sz;++j)
mixture.pb(aliens[i][j]);
sort(all(mixture));
mixture.erase(unique(all(mixture)),mixture.end());
m=mixture.sz;
for(int i=0;i<n;++i)
{
x[i]=lower_bound(all(mixture),x[i])-mixture.begin();
y[i]=lower_bound(all(mixture),y[i])-mixture.begin();
queue.pb(mp(x[i],mp(-1,i)));
queue.pb(mp(y[i],mp(+1,i)));
}
for(int i=0;i<q;++i)
for(int j=0;j<aliens[i].sz;++j)
{
aliens[i][j]=lower_bound(all(mixture),aliens[i][j])-mixture.begin();
queue.pb(mp(aliens[i][j],mp(0,i)));
}
sort(all(queue));
memset(result,0,sizeof(result));
memset(previous,-1,sizeof(previous));
memset(tree,0,sizeof(tree));
for(int i=0;i<queue.sz;++i)
{
int point=queue[i].X;
int type=queue[i].Y.X;
int id=queue[i].Y.Y;
if(type)
{
update(x[id],-type);
}
else
{
result[id]+=read(point);
if(previous[id]!=-1) result[id]-=read(previous[id]);
previous[id]=point;
}
}
for(int i=0;i<q;++i) printf("%d\n",result[i]);
#ifdef OFFLINE_JUDGE
fprintf(stderr,"time=%.3lfsec\n",0.001*(clock()-start));
#endif
return 0;
}
<file_sep>/LAPIN.cpp
#include <cstdio>
#include <cstring>
#include <vector>
const int MAX=1005;
using namespace std;
vector<int> count;
int main()
{
int T;
scanf("%d",&T);
while(T>0)
{
char str[MAX];
int len,i;
bool flag=true;
scanf("%s",str);
len=strlen(str);
count.clear();
count.resize(26,0);
for(i=0;i<len/2;++i)
++count[str[i]-'a'];
for(i=(len+1)/2;i<len;++i)
--count[str[i]-'a'];
for(i=0;i<26;++i)
if(count[i])
{
flag=false;
break;
}
if(!flag)
printf("NO\n");
else
printf("YES\n");
--T;
}
return 0;
}
<file_sep>/WALK.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
int input[NMAX];
int main(){
int T;
scanf("%d",&T);
while(T--){
int N;
int diff=0,res=-1;
scanf("%d",&N);
for(int i=0;i<N;++i){ scanf("%d",&input[i]); res=max(res,input[i]+i); }
printf("%d\n",res);
}
return 0;
}
<file_sep>/MAXDIFF.cpp
#include <stdio.h>
#include <vector>
#include <algorithm>
using namespace std;
vector<int> weights;
int solve(int n,int k)
{
int chefSum,kidSum,mid;
sort (weights.begin(), weights.end());
chefSum=kidSum=0;
mid=k<n-k?k:n-k;
for(int i=0;i<n;++i)
if(i<mid)
kidSum+=weights[i];
else
chefSum+=weights[i];
return chefSum-kidSum;
}
int main()
{
int T,N,K,R[102];
scanf("%d",&T);
for(int i=0;i<T;++i)
{
scanf("%d %d",&N,&K);
weights.resize(N,0);
for(int j=0;j<N;++j)
{
int temp;
scanf("%d",&temp);
weights[j]=temp;
}
R[i]=solve(N,K);
}
for(int i=0;i<T;++i)
{
printf("%d\n",R[i]);
}
return 0;
}
<file_sep>/DIGJUMP.cpp
#include <bits/stdc++.h>
#define SMAX 11
#define NMAX 111111
#define inf 99999
#define x first
#define y second
using namespace std;
typedef pair<int,int> pii;
pii dist[SMAX];
char input[NMAX];
void solve1();
int main(){
solve1();
return 0;
}
void solve1(){
int len;
for(int i=0;i<SMAX;++i) dist[i]=pii(inf,inf);
scanf("%s",input);
len=strlen(input);
dist[input[0]-'0'].x=dist[input[0]-'0'].y=0;
for(int T=0;T<50;++T){
for(int i=1;i<len-1;++i){
int prevNum=input[i-1]-'0';
int num=input[i]-'0';
int nextNum=input[i+1]-'0';
int currDist=(dist[num].y==i?0:1);
currDist+=dist[num].x;
int prevdist=(dist[prevNum].y==i-1?0:1);
prevdist+=dist[prevNum].x+1;
if(prevdist<currDist) dist[num].x=prevdist,dist[num].y=i;
currDist=(dist[num].y==i?0:1);
currDist+=dist[num].x;
int nextDist=(dist[nextNum].y==i+1?0:1);
nextDist+=dist[nextNum].x+1;
if(nextDist<currDist) dist[num].x=nextDist,dist[num].y=i;
currDist=(dist[num].y==i?0:1);
currDist+=dist[num].x;
int prevDist=(dist[prevNum].y==i-1?0:1);
prevDist+=dist[prevNum].x;
nextDist=(dist[nextNum].y==i+1?0:1);
nextDist+=dist[nextNum].x;
if(nextDist>currDist+1) dist[nextNum].x=currDist+1,dist[nextNum].y=i+1;
if(prevDist>currDist+1) dist[prevNum].x=currDist+1,dist[prevNum].y=i-1;
}
if(len>1) {
int prevDist=(dist[input[len-2]-'0'].y==len-2?0:1);
prevDist+=dist[input[len-2]-'0'].x+1;
int currDist=(dist[input[len-1]-'0'].y==len-1?0:1);
currDist+=dist[input[len-1]-'0'].x;
if(currDist>prevDist) dist[input[len-1]-'0'].x=prevDist,dist[input[len-1]-'0'].y=len-1;
}
}
//printf("%d\n",dist[1].x);
printf("%d\n",(dist[input[len-1]-'0'].y==len-1?dist[input[len-1]-'0'].x:dist[input[len-1]-'0'].x+1));
//return 0;
}
<file_sep>/TOTR.cpp
#include <map>
#include <iostream>
#include <stdio.h>
std::map< char,char > lookupTable;
void createMap(char s[])
{
char c='a';
for(int i=0;s[i]!='\0';++i,++c)
{
lookupTable[c]=s[i];
lookupTable[(char)(c-32)]=(char)(s[i]-32);
}
lookupTable['_']=' ';
}
int main()
{
int T;
char str[30],N[101][101];
scanf("%d %s",&T,str);
createMap(str);
for(int i=0;i<T;++i)
{
scanf("%s",N[i]);
}
for(int i=0;i<T;++i)
{
for(int j=0;N[i][j]!='\0';++j)
{
if(lookupTable.find(N[i][j])!=lookupTable.end())
N[i][j]=lookupTable[N[i][j]];
}
printf("%s\n",N[i]);
}
}
<file_sep>/QSET.cpp
// author noob333
// H + Shift C + B + E + B + E
#include <bits/stdc++.h>
#define NMAX 111111
//#define DEBUG 1
using namespace std;
// Input macros
#define s(n) scanf("%d",&n)
#define sc(n) scanf("%c",&n)
#define sl(n) scanf("%lld",&n)
#define sf(n) scanf("%lf",&n)
#define ss(n) scanf("%s",n)
// Useful constants
#define INF (int)1e9
#define EPS 1e-9
// Useful hardware instructions
#define bitcount __builtin_popcount
#define gcd __gcd
// Useful container manipulation / traversal macros
#define REP(i,a,b) for(int i=a;i<b;i++)
#define RREP(i,a,b) for(int i=a;i>b;i--)
#define foreach(v, c) for( typeof( (c).begin()) v = (c).begin(); v != (c).end(); ++v)
#define all(a) a.begin(), a.end()
#define in(a,b) ( (b).find(a) != (b).end())
#define pb push_back
#define fill(a,v) memset(a, v, sizeof a)
#define sz(a) ((int)(a.size()))
#define mp make_pair
#define fi first
#define se second
#define L(x) ((x<<1))
#define R(x) ((x<<1)+1)
// Some common useful functions
#define maX(a,b) ( (a) > (b) ? (a) : (b))
#define miN(a,b) ( (a) < (b) ? (a) : (b))
#define checkbit(n,b) ( (n >> b) & 1)
#define DREP(a) sort(all(a)); a.erase(unique(all(a)),a.end())
#define INDEX(arr,ind) (lower_bound(all(arr),ind)-arr.begin())
typedef long long ll;
typedef vector<int> vi;
typedef vector<vector<int> > vvi;
typedef vector<ll> vl;
typedef pair<int, int> ii;
typedef vector<ii> vii;
typedef set<int> si;
typedef map<string, int> msi;
#ifdef DEBUG
#define debug(args...) {dbg,args; cerr<<endl;}
#else
#define debug(args...) // Just strip off all debug tokens
#endif
struct debugger
{
template<typename T> debugger& operator , (const T& v)
{
cerr<<v<<" ";
return *this;
}
} dbg;
template <typename T1, typename T2>
inline std::ostream& operator << (std::ostream& os, const std::pair<T1, T2>& p)
{
return os << "(" << p.first << ", " << p.second << ")";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::vector<T>& v)
{
bool first = true;
os << "[";
for(unsigned int i = 0; i < v.size(); i++)
{
if(!first)
os << ", ";
os << v[i];
first = false;
}
return os << "]";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::set<T>& v)
{
bool first = true;
os << "[";
for (typename std::set<T>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii;
first = false;
}
return os << "]";
}
template<typename T1, typename T2>
inline std::ostream &operator << (std::ostream & os,const std::map<T1, T2>& v)
{
bool first = true;
os << "[";
for (typename std::map<T1, T2>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii ;
first = false;
}
return os << "]";
}
struct node{
int val[3];
int lazy;
node(){
val[0] = val[1] = val[2] = 0;
lazy = 0;
}
}tree[NMAX<<2];
inline void oneswap(node &x){
swap(x.val[0],x.val[1]);
swap(x.val[0],x.val[2]);
}
int a[NMAX];
int mod3[NMAX];
char input[NMAX];
void merge(node &a,node b,node c){
REP(i,0,3)
a.val[i] = b.val[i] + c.val[i];
}
void build(int idx,int i,int j){
if(i==j){
REP(k,0,3) tree[idx].val[k]=0; tree[idx].lazy=0;
tree[idx].val[mod3[i-1]]=1;
return;
}
int mid=(i+j)>>1;
build(L(idx),i,mid);
build(R(idx),mid+1,j);
merge(tree[idx],tree[L(idx)],tree[R(idx)]);
}
node get(int idx,int i,int j,int st,int en){
//debug("GET",i,j);
if(tree[idx].lazy){
if(tree[idx].lazy%3){
REP(k,0,tree[idx].lazy%3) oneswap(tree[idx]);
if(i!=j){
tree[L(idx)].lazy += tree[idx].lazy;
tree[R(idx)].lazy += tree[idx].lazy;
}
}
tree[idx].lazy=0;
}
if(i>en || j <st) return node();
if(st<=i && j<=en) return tree[idx];
//if(i==st && j==en) return tree[idx];
int mid=(i+j)>>1;
node res,r1,r2;
r1=get(L(idx),i,mid,st,en);
r2=get(R(idx),mid+1,j,st,en);
merge(res,r1,r2);
//debug("***",vi(res.val,res.val+3),"***");
return res;
}
void add(int idx,int i,int j,int st,int en,int newMod){
if(tree[idx].lazy){
if(tree[idx].lazy%3){
REP(k,0,tree[idx].lazy%3) oneswap(tree[idx]);
if(i!=j){
tree[L(idx)].lazy += tree[idx].lazy;
tree[R(idx)].lazy += tree[idx].lazy;
}
}
tree[idx].lazy=0;
}
if(i>en || j<st) return;
if(st<=i && j<=en){
REP(k,0,newMod) oneswap(tree[idx]);
if(i!=j){
tree[L(idx)].lazy += newMod;
tree[R(idx)].lazy += newMod;
}
return;
}
int mid = (i+j)>>1;
add(L(idx),i,mid,st,en,newMod);
add(R(idx),mid+1,j,st,en,newMod);
merge(tree[idx],tree[L(idx)],tree[R(idx)]);
}
void printTree(int idx,int i,int j){
if(i==j){
//debug(i,j,vi(tree[idx].val,tree[idx].val+3),tree[idx].lazy);
return;
}
int mid = (i+j)>>1;
printTree(L(idx),i,mid);
printTree(R(idx),mid+1,j);
//debug(i,j,vi(tree[idx].val,tree[idx].val+3),tree[idx].lazy);
}
int main(){
ios::sync_with_stdio(false);
int n,m;
cin >> n >> m;
cin >> input;
int tot = 0;
REP(i,0,n){
a[i] = (input[i]-'0')%3;
tot += input[i]-'0';
tot %= 3;
mod3[i] = tot;
//add(tot,i+1,1);
}
build(1,1,n);
REP(i,0,m){
int ty,u,v;
cin >> ty >> u >> v;
if(ty==1){
v %= 3;
//debug("-->",a[u-1],v,"<--");
if(a[u-1]==v) continue;
else{
int delta = (v-a[u-1]+3)%3;
add(1,1,n,u,n,delta);
// REP(j,0,3){
// cur[j] = get(j,n)-get(j,u-1);
// add(j,u,-cur[j]);
// }
// REP(j,0,3) add(j,u,cur[(j+delta)%3]);
// //add(a[u-1],u,-1);
// //add(v,u,1);
a[u-1] = v;
}
}
else{
ll ans = 0;
node res= get(1,1,n,u,v);
REP(j,0,3){
ans += ((ll)res.val[j]*((ll)res.val[j]-1))/2;
}
//debug(vi(res.val,res.val+3));
int prevMod = 0;
if(u>1){
node tmp = get(1,1,n,u-1,u-1);
REP(j,0,3){
if(tmp.val[j]>0){
prevMod = j;
break;
}
}
}
//debug(prevMod);
ans += res.val[prevMod];
cout << ans << endl;
}
//debug('\n');
//printTree(1,1,n);
}
return 0;
}
<file_sep>/LEMUSIC.cpp
#include <stdio.h>
#include <vector>
#include <algorithm>
#include <map>
using namespace std;
typedef unsigned long long x64;
int main()
{
int T,N;
x64 B,L;
scanf("%d",&T);
while(T>0)
{
scanf("%d",&N);
vector<x64> bands,songs;
map<x64,x64> flags;
x64 sweetness=0;
for(int i=0;i<N;++i)
{
scanf("%llu %llu",&B,&L);
if(flags.find(B)==flags.end())
flags[B]=L;
else
{
x64 min=flags[B];
if(L<min)
{
flags[B]=L;
songs.push_back(min);
}
else
{
songs.push_back(L);
}
}
}
map<x64,x64>::iterator itr;
for(itr=flags.begin();itr!=flags.end();++itr)
{
bands.push_back(itr->second);
}
sort(bands.begin(),bands.end());
int count=1;
sweetness=0;
for(vector<x64>::iterator itr2=bands.begin();itr2!=bands.end();++itr2)
{
sweetness+=count*(*itr2);
++count;
}
--count;
for(vector<x64>::iterator itr3=songs.begin();itr3!=songs.end();++itr3)
{
sweetness+=count*(*itr3);
}
printf("%llu\n",sweetness);
--T;
}
}
<file_sep>/REN2013A.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
int T;
scanf("%d",&T);
while(T--){
int A,B,res;
scanf("%d %d",&A,&B);
res=0;
int temp=A;
int base=-1;
while(temp){
base=max(base,temp%10);
temp=temp/10;
}
++base;
temp=A;
int x=1;
while(temp){
res+=((temp%10)*x);
x*=base;
temp/=10;
}
temp=B;
base=-1;
while(temp){
base=max(base,temp%10);
temp=temp/10;
}
++base;
temp=B;
x=1;
while(temp){
res+=((temp%10)*x);
x*=base;
temp/=10;
}
printf("%d\n",res);
}
return 0;
}
<file_sep>/OJUMPS.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int LL;
int main(){
LL x;
scanf("%lld",&x);
if(x%6==0 || x%6==1 || x%6==3) printf("yes\n");
else printf("no\n");
return 0;
}
<file_sep>/GIFTCHEF.cpp
#include <bits/stdc++.h>
#define MOD 1000000007
using namespace std;
//typedef unsigned long long ull;
typedef long long int ll;
ll hash(const string & s)
{
ll ret = 0;
for(int i = 0; i < s.size(); ++i){
ret *= 257;
ret += s[i];
ret %= MOD;
}
return ret;
}
ll pow2(ll base, ll exp){
ll res = 1;
while( exp > 0 ){
if( exp & 1 ) res *= base, res %= MOD;
base *= base, base %= MOD;
exp >>= 1;
}
return res;
}
vector<int> rollingHashMatch(const string& needle, const string& haystack)
{
vector<int> isMatch(haystack.size(),0);
ll hash1 = hash(needle);
ll hash2 = 0;
ll maxCoff = pow2( 257, needle.size() );
for (int i = 0; i < haystack.size(); i++){
hash2 = hash2 * 257 + haystack[i];
hash2 %= MOD;
if (i >= needle.size()){
hash2 -= (maxCoff * haystack[i-needle.size()])%MOD;
hash2 += MOD, hash2 %= MOD;
}
if (i >= needle.size()-1 && hash1 == hash2)
isMatch[i] = 1;
}
return isMatch;
}
ll solve(const string& needle, const string& haystack){
int n = needle.size();
vector<int> matches = rollingHashMatch( needle, haystack );
vector<ll> dp(haystack.size(),0), cumulativeDp(haystack.size(),0);
for(int i=0;i<haystack.size();++i){
dp[i] = 0;
if( matches[i] ){
//cout << "HERE" << i << endl;
ll res = 1;
if( i - n >= 0 ) res += cumulativeDp[i-n], res %= MOD;
dp[i] = res;
}
cumulativeDp[i] = dp[i];
if( i > 0 ) cumulativeDp[i] += cumulativeDp[i-1], cumulativeDp[i] %= MOD;
}
return cumulativeDp[haystack.size()-1];
}
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
string haystack,needle;
cin >> haystack >> needle;
cout << solve( needle, haystack ) << endl;
}
return 0;
}
<file_sep>/CHEFZOT.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
int N;
int main(){
int res,count;
scanf("%d",&N);
res=count=0;
for(int i=0;i<N;++i){
int x;
scanf("%d",&x);
if(x==0) res=max(res,count),count=0;
else ++count;
}
res=max(res,count);
printf("%d\n",res);
return 0;
}
<file_sep>/GUESS.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int S64;
int main(){
int T;
scanf("%d",&T);
while(T--){
S64 N,M;
S64 num,denom;
scanf("%lld %lld",&N,&M);
if(N%2==0 || M%2==0) num=1,denom=2;
else{
denom=M*N;
num=denom>>1;
}
printf("%lld/%lld\n",num,denom);
}
}
<file_sep>/CPERM.cpp
#include <bits/stdc++.h>
#define MOD 1000000007
using namespace std;
typedef long long int ll;
ll powmod( ll base, ll exp ){
ll res = 1;
while( exp ){
if( exp & 1 ) res *= base, res %= MOD;
base *= base, base %= MOD;
exp >>= 1;
}
return res;
}
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
int N;
cin >> N;
if( N < 2 ){
cout << 0 << endl;
continue;
}
cout << (powmod( 2, N-1 )+MOD-2)%MOD << endl;
}
return 0;
}
<file_sep>/CHEFLR.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define MOD 1000000007
using namespace std;
typedef long long int ll;
char buffer[NMAX];
ll modpow( int base, int exp ){
ll res=1;
ll b = (ll) base;
while( exp > 0 ){
if( exp&1 ) res = ( res * b ) % MOD;
b = ( b * b ) % MOD;
exp >>= 1;
}
return res;
}
int main(){
//cout << modpow(2,4) << " " << modpow(2,5) << " " << modpow(2,6)<< endl;
int T;
scanf("%d\n",&T);
while(T--){
scanf("%s",buffer);
int n = strlen( buffer )+1;
ll ignore = 0;
for(int i=0;i<n;++i){
if( buffer[i] == 'r' ) {
ignore += modpow( 2, n-i-2 );
ignore %= MOD;
}
}
ll upto = 0;
ll start;
if( n%2 ) start = 1;
else start = 2;
while( start < n ){
upto += modpow( 2, start-1 );
upto %= MOD;
start +=2 ;
}
//cout << ignore << " " << upto << endl;
ll res = ( ( ( ignore + upto ) %MOD ) * 2 ) %MOD;
res = ( res + (n%2?1:2) ) %MOD;
printf("%lld\n", res );
}
return 0;
}
<file_sep>/CHEFGR.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
int t;
scanf("%d",&t);
while(t--){
int m,n,bst=-1;
scanf("%d %d",&n,&m);
vector<int> a(n);
for(int i=0;i<n;++i){
scanf("%d",&a[i]);
bst=max(a[i],bst);
}
int res=0;
for(int i=0;i<n;++i) res += (bst-a[i]);
if(m==res) printf("Yes\n");
else if(m>res && (res-m)%n==0) printf("Yes\n");
else printf("No\n");
}
return 0;
}
<file_sep>/TUX03.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
int T;
scanf("%d",&T);
while(T--){
int bestTeam,diff;
diff=1111111;
for(int i=0;i<10;++i){
int small,large;
large=-1;
small=1111111;
for(int j=0;j<15;++j){
int x;
scanf("%d",&x);
small=min(small,x);
large=max(large,x);
}
if(large-small<=diff) diff=large-small,bestTeam=i;
}
printf("%d %d\n",bestTeam+1,diff);
}
return 0;
}
<file_sep>/PHYSICS.cpp
#include <bits/stdc++.h>
#define NMAX 11111
using namespace std;
typedef long long int ll;
typedef pair<int,int> pi;
ll f,s,l;
int n;
vector<ll> a;
int main(){
int T;
scanf("%d",&T);
for(int t=0;t<T;++t){
ll res=0;
l=0;
scanf("%d %lld",&n,&f);
a=vector<ll>(n);
for(int i=0;i<n;++i){
scanf("%lld",&a[i]);
l=max(l,a[i]);
}
sort(a.begin(),a.end());
for(int i=0;i<n;++i){
ll val=a[i]; ll cnt=0;
while(val<=l){
cnt = upper_bound(a.begin(),a.end(),val)-a.begin();
cnt -= lower_bound(a.begin(),a.end(),val)-a.begin();
res += cnt;
val *= f;
}
ll _f=f;
val=a[i];
while(val/_f>=1){
if(val%_f==0){
cnt = upper_bound(a.begin(),a.end(),val/_f)-a.begin();
cnt -= lower_bound(a.begin(),a.end(),val/_f)-a.begin();
res += cnt;
}
_f=_f*f;
}
}
printf("%lld\n",(res-n)/2);
}
return 0;
}
<file_sep>/PRLADDU.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
inline int _abs(int x){
return (x>=0?x:(-x));
}
int a[NMAX];
int main(){
int t;
scanf("%d",&t);
while(t--){
int n;
scanf("%d",&n);
for(int i=0;i<n;++i) scanf("%d",&a[i]);
int x,y;
x=y=0;
long long res=0;
while(1){
while(x<n && a[x]<=0) ++x;
while(y<n && a[y]>=0) ++y;
if(x>=n || y>=n ) break;
int tmp=min(a[x],_abs(a[y]));
res += (_abs(x-y)*tmp);
a[x]-=tmp;
a[y]+=tmp;
//cout << x << " " << y <<endl;
}
printf("%lld\n",res);
}
return 0;
}
<file_sep>/ALEXTASK.cpp
#include <bits/stdc++.h>
using namespace std;
int computeGcd( int x, int y ){
int tmp;
while( y > 0 ){
tmp = x%y;
x = y;
y = tmp;
}
return x;
}
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
while( T-- ){
int N;
cin >> N;
vector<int> a(N);
for(int i=0;i<N;++i) cin >> a[i];
unsigned long long int res = ULLONG_MAX;
for(int i=0;i<N;++i){
for(int j=i+1;j<N;++j){
int gcd = computeGcd( a[i], a[j] );
unsigned long long int mul, lcm;
if( a[i] % gcd ){
mul = a[i]/gcd;
lcm = a[j]*mul;
}
else{
mul = a[j]/gcd;
lcm = a[i]*mul;
}
res = min( res, lcm );
}
}
cout << res << endl;
}
return 0;
}
<file_sep>/EQUAKE.cpp
#include <bits/stdc++.h>
#define NMAX 800011
#define DMAX 12
#define L(x) (x<<1)
#define R(x) ((x<<1)+1)
#define gc getchar_unlocked
void _scan(int &x)
{
register int c = gc();
x = 0;
for(;(c<48 || c>57);c = gc());
for(;c>47 && c<58;c = gc()) {x = (x<<1) + (x<<3) + c - 48;}
}
using namespace std;
time_t start=clock();
struct node{
int lazy,currPtr;
int best[DMAX];
node(){
lazy=0;
currPtr=0;
}
}tree[NMAX<<2];
int N,M;
int A[NMAX],B[NMAX];
node null;
vector<int> pre[10000];
int cnt[10000];
int st,en,f;
void preCompute(){
for(int i=0;i<10;++i){
pre[i].push_back(i);
for(int j=0;j<11;++j) pre[i].push_back(i);
cnt[i]=1;
}
for(int i=10;i<100;++i){
pre[i].push_back(i);
pre[i].push_back((i%10)*10 + i/10);
for(int j=0;j<5;++j){
pre[i].push_back(pre[i][0]);
pre[i].push_back(pre[i][1]);
}
cnt[i]=2;
}
for(int i=100;i<1000;++i){
pre[i].push_back(i);
pre[i].push_back((i%10)*10 + ((i/10)%10)*100 + i/100);
pre[i].push_back((i%10)*100 + (i/100)*10 + (i/10)%10);
for(int j=0;j<3;++j){
pre[i].push_back(pre[i][0]);
pre[i].push_back(pre[i][1]);
pre[i].push_back(pre[i][2]);
}
cnt[i]=3;
}
for(int i=1000;i<10000;++i){
pre[i].push_back(i);
pre[i].push_back((i%10)*10 + ((i/10)%10)*100 + ((i/100)%10)*1000 + i/1000);
pre[i].push_back((i%10)*100 + ((i/10)%10)*1000 + ((i/100)%10) + (i/1000)*10);
pre[i].push_back((i%10)*1000 + ((i/10)%10) + ((i/100)%10)*10 + (i/1000)*100);
for(int j=0;j<2;++j){
pre[i].push_back(pre[i][0]);
pre[i].push_back(pre[i][1]);
pre[i].push_back(pre[i][2]);
pre[i].push_back(pre[i][3]);
}
cnt[i]=4;
}
}
inline node merge( node x, node y ){
node z;
for(int i=0;i<DMAX;++i)
z.best[i]=max(x.best[(x.currPtr+i)%DMAX],y.best[(y.currPtr+i)%DMAX]);
return z;
}
inline void setVal(int index,int i,int j,int val){
int cnt=val%DMAX;
tree[index].currPtr += cnt;
tree[index].currPtr %= DMAX;
if(i!=j){
tree[L(index)].lazy += cnt;
tree[L(index)].lazy %= DMAX;
tree[R(index)].lazy += cnt;
tree[R(index)].lazy %= DMAX;
}
}
inline void propagate(int index,int i,int j){
if(tree[index].lazy){
setVal(index,i,j,tree[index].lazy);
tree[index].lazy=0;
}
}
inline void build(int index,int i,int j){
tree[index].currPtr=tree[index].lazy=0;
if(i==j){
tree[index].currPtr=0;
for(int k=0;k<DMAX;++k)
tree[index].best[k]=pre[A[i]][k];
return;
}
int mid=(i+j)>>1;
build(L(index),i,mid);
build(R(index),mid+1,j);
tree[index]=merge(tree[L(index)],tree[R(index)]);
}
inline void update(int index,int i,int j){
propagate(index,i,j);
if(i>=st && j<=en){
setVal(index,i,j,f);
return;
}
if(i>en || j<st) return;
int mid=(i+j)>>1;
update(L(index),i,mid);
update(R(index),mid+1,j);
tree[index]=merge(tree[L(index)],tree[R(index)]);
}
inline int query(int index,int i,int j){
propagate(index,i,j);
if(i>=st && j<=en){
return tree[index].best[tree[index].currPtr];
}
if(i>en || j<st) return 0;
int mid=(i+j)>>1;
int x=query(L(index),i,mid);
int y=query(R(index),mid+1,j);
tree[index]=merge(tree[L(index)],tree[R(index)]);
return max(x,y);
}
void printTree(int index,int i,int j){
if(i==j){
cout<<i<<","<<j<<" ";
for(int k=tree[index].currPtr;(k+1)%DMAX!=tree[index].currPtr;k=(k+1)%DMAX) cout<<tree[index].best[k]<<" ";
cout<<"--"<<tree[index].lazy<<" "<<tree[index].currPtr<<endl;
return;
}
int mid=(i+j)>>1;
printTree(L(index),i,mid);
printTree(R(index),mid+1,j);
cout<<i<<","<<j<<" ";
for(int k=tree[index].currPtr;(k+1)%DMAX!=tree[index].currPtr;k=(k+1)%DMAX) cout<<tree[index].best[k]<<" ";
cout<<"--"<<tree[index].lazy<<" "<<tree[index].currPtr<<endl;
}
void debug(){
#ifdef deb
cout<<endl<< "---- DEBUGGIING ----"<<endl;
printTree(1,1,N);
cout<<endl<< "--- END ------" <<endl;
#endif
}
int main(){
// freopen("1.in","r",stdin);
//freopen("5.out","w",stdout);
preCompute();
//scanf("%d",&N);
_scan(N);
//N=fi.ReadNext();
for(int i=1;i<=N;++i){
//scanf("%d",&A[i]);
_scan(A[i]);
//A[i]=fi.ReadNext();
//B[i]=cnt[A[i]];
}
build(1,1,N);
//scanf("%d",&M);
_scan(M);
//M=fi.ReadNext();
for(int i=0;i<M;++i){
int type,u,v;
//scanf("%d %d %d",&type,&u,&v);
//type=fi.ReadNext();u=fi.ReadNext();v=fi.ReadNext();
_scan(type);_scan(u);_scan(v);
++u,++v;
if( type == 1 ){
//fo.PrintUint(query(u,v),'\n');
st=u;en=v;
printf("%d\n",query(1,1,N));
}
else{
//scanf("%d",&f);
//f=fi.ReadNext();
_scan(f);
st=u;en=v;
update(1,1,N);
//update(u,v,f);
}
}
//fo.Flush();
//fprintf(stderr,"%.3lf\n",(((double)clock())-start)/CLOCKS_PER_SEC);
return 0;
}
<file_sep>/FUNC.cpp
#include <bits/stdc++.h>
#define NMAX 11111
#define MOD 1000000007
#define pb push_back
using namespace std;
typedef long long int ll;
const ll MAXVAL = 1000000000000000000LL;
vector<ll> powers[65];
void pre_compute( ) {
for( int i = 3; i<65; ++i ) powers[i].pb( (ll)1 );
for( ll i = 2; i<=1000000; ++i ) {
ll cnt,prod;
cnt = 2;
prod = i*i;
while( i <= MAXVAL / prod ) {
++cnt;
prod = prod * i;
powers[cnt].pb( prod );
}
}
}
ll power( ll n, ll exp ) {
ll res = 1;
while( exp > 0 ){
if( exp%2 ) res = res * n, exp = exp-1;
else n = n * n, exp = exp>>1;
}
return res;
}
int main() {
pre_compute();
int T;
scanf( "%d", &T );
while( T-- ) {
int N, Q;
scanf( "%d %d", &N, &Q );
vector<ll> A(N+5), B(N+5);
B[0]=0;
for(int i=1;i<=N;++i) {
scanf("%lld",&A[i]);
B[i]=B[i-1]+A[i];
A[i]=A[i]%MOD;
if( A[i] < 0 ) A[i] += MOD;
}
for(int i=0;i<Q;++i){
ll x,res;
int j;
scanf("%lld",&x);
res=0;
for(j=1;j<=N;++j){
ll root;
if( j==1 ) root = x;
else if( j==2 ) root = (ll) sqrt(x);
else{
ll index = (ll) ( upper_bound( powers[j].begin(), powers[j].end(), x ) - powers[j].begin() + 3 );
while( pow(index, j) > x ) --index;
root = index;
}
if( root == 1 ) break;
root = root % MOD;
res = ( res + ( root * A[j] ) %MOD ) % MOD;
}
ll temp = ( B[N] - B[j-1] ) % MOD;
if( temp < 0 ) temp += MOD;
res = ( res + temp ) % MOD;
if( res < 0 ) res += MOD;
printf("%lld",res);
if(i < Q-1 ) printf(" ");
}
printf("\n");
}
return 0;
}
<file_sep>/FATCHEF.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define MOD 1000000009
using namespace std;
typedef pair<int,char> pic;
pic a[NMAX];
int main(){
int t;
char buf[3];
scanf("%d",&t);
while(t--){
int n,m;
scanf("%d %d",&n,&m);
for(int i=0;i<m;++i){
scanf("%s %d",buf,&a[i].first);
a[i].second=buf[0];
}
sort(a,a+m);
long long res=1;
for(int i=0;i<m-1;++i){
pic next=a[i+1];
if(next.second!=a[i].second){
res *= (next.first-a[i].first);
res %= MOD;
}
}
printf("%lld\n",res);
}
return 0;
}
<file_sep>/JOHNY.cpp
#include <cstdio>
#define NMAX 111
using namespace std;
int input[NMAX];
int main(){
int T;
scanf("%d",&T);
while(T>0){
int N,K,count=0;
scanf("%d",&N);
for(int i=0;i<N;++i) scanf("%d",&input[i]);
scanf("%d",&K);
for(int i=0;i<N;++i) if(input[i]<input[K-1]) ++count;
printf("%d\n",count+1);
--T;
}
return 0;
}
<file_sep>/CHODE.cpp
#include <cstring>
#include <cstdio>
#include <vector>
#include <algorithm>
#include <iostream>
#define NMAX 155555
using namespace std;
typedef pair<int,char> pic;
vector<pic> calc;
char input[NMAX],freq[26],output[NMAX];
int pos[26];
inline bool alpha(char x){
if(x>=65 && x<=90) return true;
if(x>=97 && x<=122) return true;
return false;
}
void solve(){
int len=strlen(input);
calc=vector<pic>(26,pic(0,' '));
for(int i=0;i<26;++i) calc[i].second='A'+i;
//puts(input);
for(int i=0;i<len;++i){
if(alpha(input[i]))
{
char t=input[i];
if(input[i]>=97) t=t-'a'+'A';
calc[t-'A'].first+=1;
//printf("\nHERE");
}
}
sort(calc.begin(),calc.end());
for(int i=0;i<26;++i){
pos[calc[i].second-'A']=i;
}
for(int i=0;i<len;++i){
if(alpha(input[i])){
char t=input[i];
int temp=(t>=97)?t-'a':t-'A';
output[i]=(t>=97?freq[pos[temp]]:freq[pos[temp]]-'a'+'A');
}
else output[i]=input[i];
}
}
int main(){
int T;
scanf("%d\n",&T);
while(T>0){
memset(input,'\0',sizeof(input));
memset(freq,'\0',sizeof(freq));
memset(output,'\0',sizeof(output));
gets(freq);
gets(input);
solve();
puts(output);
--T;
}
return 0;
}
<file_sep>/MCHAIRS.cpp
#include <cstdio>
#define NMAX 100000005
#define MOD 1000000007
using namespace std;
typedef long long int x64;
x64 compute(int exp){
x64 res=1;
x64 base=2;
while(exp>0){
if(exp%2)
res=(res*base)%MOD;
exp=exp>>1;
base=(base*base)%MOD;
}
return res;
}
int main(){
int T;
scanf("%d",&T);
while(T>0){
int N;
scanf("%d",&N);
printf("%lld\n",compute(N)-1);
--T;
}
return 0;
}
<file_sep>/CHEFKEY.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
while( T-- ){
int n,m,c, res = 0;
cin >> n >> m >> c;
for(int i=1; i*i <= c; ++i ){
if( c%i == 0 ){
int h = i, w = c/i;
if( h <= n && w <= m ) ++res;
if( h != w ){
swap( h, w );
if( h <= n && w <= m ) ++res;
}
}
}
cout << res << endl;
}
}
<file_sep>/CHEFPATH.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
while( T-- ){
long long int N,M;
cin >> N >> M;
if( (N==1 && M==2) || (N==2 && M==1))
cout << "Yes" << endl;
else if( N==1 || M==1 )
cout << "No" << endl;
else if( N%2==0 || M%2==0 )
cout << "Yes" << endl;
else
cout << "No" << endl;
}
return 0;
}
<file_sep>/MAXPR.cpp
#include <bits/stdc++.h>
#define NMAX 222222
#define MOD 1000000007
#define gc getchar_unlocked
using namespace std;
typedef long int ll;
int input[NMAX];
int N;
ll sum[111],pow2[NMAX];
inline void scan(int &x)
{
register int c = gc();
x = 0;
int neg = 0;
for(;((c<48 || c>57) && c != '-');c = gc());
if(c=='-') {neg=1;c=gc();}
for(;c>47 && c<58;c = gc()) {x = (x<<1) + (x<<3) + c - 48;}
if(neg) x=-x;
}
void solve() {
ll total = 0;
ll subs = pow2[N];
for(int diff=-100; diff<=100; ++diff){
ll curr=0;
memset(sum,0,sizeof(sum));
for(int i=0;i<N;++i ){
ll res = 0;
if(input[i]-diff >= 0 && input[i]-diff <= 100 )
res = ( sum[input[i]-diff] );
if( res>=MOD ) res=res%MOD;
sum[input[i]] = ( sum[input[i]] + res + 1 );
if( sum[input[i]] >= MOD ) sum[input[i]] = sum[input[i]] %MOD;
curr = ( curr + res + 1);
if( curr >= MOD ) curr = curr % MOD;
}
curr = ( curr - N ) % MOD;
if( curr < 0 ) curr += MOD;
total = ( curr + total );
if( total >= MOD ) total = total % MOD;
}
total = ( total + N );
if( total >= MOD ) total = total % MOD;
subs = (subs - 1 - total)%MOD;
if( subs<0 ) subs += MOD;
printf("%ld\n",subs);
}
int main(){
int T;
//scanf("%d",&T);
scan(T);
pow2[0]=1;
for(int i=1;i<=200000; ++i){
pow2[i]=pow2[i-1]<<1;
if(pow2[i]>=MOD) pow2[i]=pow2[i]%MOD;
}
while( T-- ) {
//scanf("%d",&N);
scan(N);
for(int i=0;i<N;++i ){
//scanf("%d",&input[i]) ;
scan(input[i]);
}
solve();
}
return 0;
}
<file_sep>/CHDOGS.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
int s,v;
cin >> s >> v;
double tmp = (2.0/3.0) * s;
cout << setprecision (6) << fixed << tmp/v << endl;
}
}
<file_sep>/SGARDEN.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define MOD 1000000007
using namespace std;
vector<int> primes;
vector<int> factors[NMAX];
int flag[NMAX];
int next[NMAX];
int highest[NMAX];
void sieve(){
memset(flag,0,sizeof(flag));
flag[0]=flag[1]=1;
for(int i=2;i<NMAX;++i){
if(flag[i]==0){
primes.push_back(i);
highest[i]=1;
for(int j=i;j<NMAX;j+=i){
flag[j]=1;
factors[j].push_back(i);
}
}
}
}
int main(){
sieve();
int T;
scanf("%d",&T);
while(T--){
int N;
scanf("%d",&N);
for(int i=0;i<N;++i){
scanf("%d",&next[i+1]);
flag[i+1]=0;
}
int cnt=0;
vector<int> val;
for(int i=0;i<primes.size();++i){
if(primes[i]>N) break;
highest[primes[i]]=1;
}
for(int i=1;i<=N;++i){
if(flag[i]==0){
int pos=i;
int cnt=0;
do{
flag[pos]=1;
pos=next[pos];
++cnt;
}while(pos!=i);
val.push_back(cnt);
}
}
for(int i=0;i<val.size();++i){
int x=val[i];
for(int j=0;j<factors[x].size();++j){
int temp=x;
int u=factors[x][j];
int cnt=1;
while(temp%u==0) temp=temp/u,cnt=cnt*u;
highest[u]=max(highest[u],cnt);
}
}
long long int res=1;
for(int i=0;i<primes.size();++i){
if(primes[i]>N) break;
res = ( res * (long long int)highest[primes[i]] ) % MOD;
}
printf("%lld\n",res);
}
return 0;
}
<file_sep>/CHSQR.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
int K;
cin >> K;
if( K == 1 ){
cout << 1 << endl;
continue;
}
vector<vector<int> > a(K, vector<int>(K));
for(int i=0;i<K;++i)
for(int j=0;j<K;++j)
a[i][j] = (j-i+K)%K;
for(int i=0,j=K/2-1;i<j;++i,--j)
swap( a[i], a[j] );
for(int i=K/2,j=K-1;i<j;++i,--j)
swap( a[i], a[j] );
//swap( a[K/2], a[K/2+1] );
for(int i=0;i<K;++i){
for(int j=0;j<K-1;++j)
cout << a[i][j] + 1 << " ";
cout << a[i][K-1] + 1 << endl;
}
}
return 0;
}
<file_sep>/SSTORY.cpp
#include <bits/stdc++.h>
#define MAXLEN 555555
using namespace std;
struct state{
int len,link;
map<char,int> next;
};
state st[MAXLEN];
int sz,last;
int v,l,bestpos,best;
void sa_init(){
sz=last=0;
st[0].link=-1;
st[0].len=0;
++sz;
}
void sa_extend(char c){
int cur=sz++;
st[cur].len=st[last].len+1;
int p;
for(p=last;p!=-1 && !st[p].next.count(c); p=st[p].link)
st[p].next[c]=cur;
if(p==-1)
st[cur].link=0;
else{
int q=st[p].next[c];
if(st[q].len== st[p].len+1)
st[cur].link=q;
else{
int clone=sz++;
st[clone].len=st[p].len+1;
st[clone].next=st[q].next;
st[clone].link=st[q].link;
for(;p!=-1 && st[p].next[c]==q;p=st[p].link)
st[p].next[c]=clone;
st[q].link=st[cur].link=clone;
}
}
last=cur;
}
string lcs(string s,string t){
sa_init();
for(int i=0;i<s.length();++i)
sa_extend(s[i]);
v=l=bestpos=best=0;
for(int i=0;i<t.length();++i){
while(v && !st[v].next.count(t[i])){
v=st[v].link;
l=st[v].len;
}
if(st[v].next.count(t[i])){
v=st[v].next[t[i]];
++l;
}
if(l>best)
best=l,bestpos=i;
}
return t.substr(bestpos-best+1,best);
}
int main(){
string s,t,res;
cin>>s>>t;
res=lcs(s,t);
if(best>0)
cout<<res<<endl<<best<<endl;
else
cout<<0;
return 0;
}
<file_sep>/REVERSE.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define gc getchar
//#define debug 1
#define inf NMAX<<2
using namespace std;
typedef pair<int,int> pii;
time_t start=clock();
inline void _scan(int &x)
{
register int c = gc();
x = 0;
for(;((c<48 || c>57) && c != '-');c = gc());
for(;c>47 && c<58;c = gc()) {x = (x<<1) + (x<<3) + c - 48;}
}
vector<pii> adjList[NMAX];
int dist[NMAX],visited[NMAX];
void genTest(){
FILE *fp=freopen("1.in","w",stdout);
printf("%d %d\n",100000,100000);
for(int i=0;i<100000;++i){
int u=rand()%100000+1;
int v=rand()%100000+1;
printf("%d %d\n",u,v);
}
fclose(fp);
}
int N,M;
int main(){
//scanf("%d %d",&N,&M);
#ifdef debug
genTest();
start=clock();
freopen("1.in","r",stdin);
#endif
_scan(N);
_scan(M);
for(int i=0;i<M;++i){
int u,v;
//scanf("%d %d",&u,&v);
_scan(u);
_scan(v);
if(u==v) continue;
adjList[u].push_back(pii(v,0));
adjList[v].push_back(pii(u,1));
}
for(int i=1;i<=N;++i) dist[i]=inf;
set<pii> q;
q.insert(pii(0,1));
while(!q.empty()){
pii best=*q.begin();
q.erase(q.begin());
int u=best.second;
int cst=best.first;
if(u==N){
printf("%d\n",cst);
//#ifdef debug
// fprintf(stderr,"%.5lf",((double)clock()-start)/CLOCKS_PER_SEC);
// assert(((double)clock()-start)/CLOCKS_PER_SEC<1.0);
//#endif
return 0;
}
visited[u]=1;
for(int i=0;i<adjList[u].size();++i){
int v=adjList[u][i].first;
int w=adjList[u][i].second;
if(visited[v]) continue;
if(dist[v]>w+cst){
q.erase(pii(dist[v],v));
dist[v]=w+cst;
q.insert(pii(dist[v],v));
}
}
}
printf("-1\n");
//#ifdef debug
// fprintf(stderr,"%.5lf",((double)clock()-start)/CLOCKS_PER_SEC);
// assert(((double)clock()-start)/CLOCKS_PER_SEC<1.0);
//#endif
return 0;
}
<file_sep>/TRIQUERY.cpp
#include <iostream>
#include <cstdio>
#include <cstring>
#include <algorithm>
#define NMAX 999999
#define TX 0
#define TY 1
using namespace std;
struct query{
int x,y,type;
int d,index;
}q[NMAX];
int res[NMAX/3],tree[NMAX][2];
int N,Q;
void update(int idx,int val,int t){
for(int i=idx;i<NMAX;i+=(i&-i))
tree[i][t]+=val;
}
bool comp(const query &l,const query &r){
if(l.x+l.y<r.x+r.y) return true;
if((l.x+l.y==r.x+r.y) && (l.type<r.type)) return true;
return false;
}
int read(int idx,int t){
int res=0;
for(int i=idx;i>0;i-=(i&-i))
res+=tree[i][t];
return res;
}
int main(){
int qptr=0;
scanf("%d %d",&N,&Q);
for(int i=0;i<N;++i){
scanf("%d %d",&q[qptr].x,&q[qptr].y);
q[qptr].type=0;
++qptr;
}
for(int i=0;i<Q;++i){
scanf("%d %d %d",&q[qptr].x,&q[qptr].y,&q[qptr].d);
q[qptr].index=i;
q[qptr].type=-1;
++qptr;
q[qptr].x=q[qptr-1].x,q[qptr].y=q[qptr-1].y+q[qptr-1].d,q[qptr].d=q[qptr-1].d,q[qptr].index=q[qptr-1].index;
q[qptr].type=1;
++qptr;
}
sort(q,q+qptr,comp);
memset(tree,0,sizeof(tree));
int pts=0;
for(int i=0;i<qptr;++i){
query qry=q[i];
//printf("%d %d %d\n",qry.x,qry.y,qry.type);
if(qry.type==0){
update(qry.x,+1,TX);
update(qry.y,+1,TY);
++pts;
}
else if(qry.type==-1){
res[qry.index]=read(qry.x-1,TX)+read(qry.y-1,TY)-pts;
//printf("%d\n",res[qry.index]);
}
else{
res[qry.index]+=pts-read(qry.x-1,TX)-read(qry.y-qry.d-1,TY);
//printf("%d\n",res[qry.index]);
}
}
for(int i=0;i<Q;++i) printf("%d\n",res[i]);
return 0;
}
<file_sep>/CLMBSTRS.c
#include <stdio.h>
#define MAX 1000001
unsigned long array[MAX];
unsigned long mod=1000000007;
main()
{
unsigned int T,G[100000],i,j;
unsigned long n[100000];
#ifdef _MSC_VER
freopen("input.txt","r",stdin);
#endif
array[0]=1;
array[1]=1;
scanf("%u",&T);
for(i=2;i<MAX;++i)
{
array[i]=array[i-1]+array[i-2];
if(array[i]>mod)
array[i]=array[i]%mod;
}
for(i=0;i<T;++i)
{
scanf("%lu %u",&n[i],&G[i]);
}
for(j=0;j<T;++j)
{
if(countOnes(array[n[j]])==G[j])
printf("\nCORRECT");
else
printf("\nINCORRECT");
}
return 0;
}
int countOnes(unsigned long int n)
{
unsigned long temp=n;
int count=0;
while(temp>0)
{
if(temp%2)
++count;
temp=temp/2;
}
return count;
}
<file_sep>/ERROR.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
char input[NMAX];
int main(){
int T;
scanf("%d",&T);
while(T>0){
scanf("%s",input);
int len=strlen(input);
int flag=0;
for(int i=0;i<len-2;++i){
if((input[i]=='0'&&input[i+1]=='1'&&input[i+2]=='0')||(input[i]=='1'&&input[i+1]=='0'&&input[i+2]=='1'))
{
flag=1;
break;
}
}
if(flag) printf("Good\n");
else printf("Bad\n");
--T;
}
return 0;
}
<file_sep>/URBANDEV.cpp
#include <bits/stdc++.h>
#define NMAX 100002
using namespace std;
typedef pair<int,int> pii;
typedef pair<pair<int,int>,int> ppi;
vector<ppi> xlines[NMAX];
vector<ppi> ylines[NMAX];
bool eventCompare( ppi i, ppi j ){
return (i.first.first == j.first.first? i.first.second < j.first.second: i.first.first < j.first.first);
}
int bitTree[NMAX<<3];
inline void add( int index, int val ){
for(int i=index; i<(NMAX<<3); i+= (i&-i))
bitTree[i] += val;
}
inline int get( int index ){
int result = 0;
for(int i=index; i > 0; i-= (i&-i))
result += bitTree[i];
return result;
}
int signals[NMAX];
long long int counter;
void solve(){
memset(bitTree, 0, sizeof(bitTree));
vector<ppi> events;
for(int i=1;i<NMAX;++i){
for(int j=0;j<xlines[i].size();++j){
events.push_back(ppi(pii(xlines[i][j].first.first,1),i));
events.push_back(ppi(pii(xlines[i][j].first.first,4),i));
events.push_back(ppi(pii(xlines[i][j].first.first,6),i));
events.push_back(ppi(pii(xlines[i][j].first.second,3),i));
}
}
for(int i=1;i<NMAX;++i){
for(int j=0;j<ylines[i].size();++j){
events.push_back(ppi(pii(i,2),j));
events.push_back(ppi(pii(i,5),j));
}
}
sort(events.begin(), events.end(), eventCompare);
for(int i=0;i<events.size();++i){
int xval = events[i].first.first;
int type = events[i].first.second;
if( type == 3 || type == 4 ){
int yval = events[i].second;
add( yval, -1 );
}
else if( type == 1 || type == 6 ){
add( events[i].second, +1 );
}
else if( type == 2 ){
int j = events[i].second;
int l = ylines[xval][j].first.first + 1;
int r = ylines[xval][j].first.second - 1;
int ptr = ylines[xval][j].second;
if(l<=r){
int res = get(r)-get(l-1);
counter += res;
signals[ptr] += res;
}
}
else{
int j = events[i].second;
int l = ylines[xval][j].first.first;
int r = ylines[xval][j].first.second;
int ptr = ylines[xval][j].second;
int res = get(r)-get(r-1);
counter += res;
signals[ptr] += res;
res = get(l)-get(l-1);
counter += res;
signals[ptr] += res;
}
}
}
int main(){
ios::sync_with_stdio(false);
int N;
cin >> N;
for(int i=0;i<N;++i){
int x1,y1,x2,y2;
cin >> x1 >> y1 >> x2 >> y2;
if( x1 == x2 )
ylines[x1].push_back(ppi(pii(min(y1,y2),max(y1,y2)),i));
else
xlines[y1].push_back(ppi(pii(min(x1,x2),max(x1,x2)),i));
}
counter = 0;
solve();
long long int signalCount = counter;
for(int i=1;i<NMAX;++i){
swap( xlines[i], ylines[i] );
}
solve();
cout << signalCount << endl;
for(int i=0;i<N-1;++i) cout << signals[i] << " ";
cout << signals[N-1] << endl;
return 0;
}
<file_sep>/DISHOWN.cpp
#include <bits/stdc++.h>
#define NMAX 11111
using namespace std;
int par[NMAX],best[NMAX],sz[NMAX];
int N;
int find(int x){
if( par[x] == -1) return x;
else par[x]=find(par[x]);
}
int join(int x,int y){
par[x]=y;
best[y]=max(best[y],best[x]);
}
int main(){
int T;
scanf("%d",&T);
while(T--){
scanf("%d",&N);
for(int i=1;i<=N;++i){
par[i]=-1;
sz[i]=1;
scanf("%d",&best[i]);
}
int Q;
scanf("%d",&Q);
for(int i=0;i<Q;++i){
int t;
scanf("%d",&t);
if(t==0){
int u,v;
scanf("%d %d",&u,&v);
int x,y;
x=find(u);
y=find(v);
if(x==y) printf("Invalid query!\n");
else if( best[x]<best[y] ) join(x,y);
else if( best[y]<best[x] ) join(y,x);
}
else{
int u;
scanf("%d",&u);
printf("%d\n",find(u));
}
}
}
return 0;
}
<file_sep>/CHEFSQUA.cpp
#include <bits/stdc++.h>
#define NMAX 2222
#define x first
#define y second
using namespace std;
typedef pair<int,int> pi;
typedef long long int ll;
typedef pair<pi,pi> ppii;
pi a[NMAX];
set<pi> check;
inline ll dist(int x1,int y1,int x2,int y2){
ll res = (x1-x2);
res *= (x1-x2); res += (y1-y2)*(y1-y2);
return res;
}
int main(){
int n;
scanf("%d",&n);
for(int i=0;i<n;++i){
scanf("%d %d",&a[i].x,&a[i].y);
check.insert(a[i]);
}
assert(n==check.size());
if(n==0){
printf("%d\n",4);
return 0;
}
else if(n==1){
printf("%d\n",3);
return 0;
}
int x1,x2,y1,y2;
int best=0;
ll a2;
ppii p1,p2;
pi t1,t2,t3,t4;
int above,below;
for(int i=0;i<n;++i){
for(int j=i+1;j<n;++j){
x1=a[i].x;x2=a[j].x;y1=a[i].y;y2=a[j].y;
a2 = dist(x1,y1,x2,y2);
t1=pi(x1+(y2-y1),y1+(x1-x2));
t2=pi(x1-(y2-y1),y1-(x1-x2));
t3=pi(x2+(y2-y1),y2+(x1-x2));
t4=pi(x2-(y2-y1),y2-(x1-x2));
if(dist(t1.x,t1.y,t3.x,t3.y)==a2){
p1.x=t1; p1.y=t3; p2.x=t2; p2.y=t4;
}
else{
assert(dist(t1.x,t1.y,t4.x,t4.y)==a2);
p1.x=t1; p1.y=t4; p2.x=t2; p2.y=t3;
}
above=below=0;
if(check.find(p1.x)!=check.end()) ++above;
if(check.find(p1.y)!=check.end()) ++above;
if(check.find(p2.x)!=check.end()) ++below;
if(check.find(p2.y)!=check.end()) ++below;
best=max(best,max(above,below)+2);
}
}
printf("%d\n",4-best);
return 0;
}
<file_sep>/TUX01.cpp
#include <bits/stdc++.h>
#define NMAX 1111111
#define L(x) (x<<1)
#define R(x) ((x<<1)+1)
using namespace std;
typedef long long int LL;
struct node{
LL val,freq;
node(LL v,LL f){
val=v,freq=f;
}
node(){
val=-1;
freq=0;
}
};
node tree[NMAX<<3];
LL input[NMAX];
node merge(const node &x,const node &y){
if(x.val==-1 && y.val==-1)
return node(-1,0);
else if(x.val==-1)
return node(y.val,y.freq);
else if(y.val==-1)
return node(x.val,x.freq);
else{
if(x.val==y.val) return node(x.val,x.freq+y.freq);
else if(x.val<y.val) return node(x.val,x.freq);
else return node(y.val,y.freq);
}
}
void init(LL index,LL i,LL j){
if(i==j){
if(input[i-1]%2) tree[index]=node(-1,0);
else tree[index]=node(input[i-1],1);
return;
}
LL mid=(i+j)>>1;
init(L(index),i,mid);
init(R(index),mid+1,j);
tree[index]=merge(tree[L(index)],tree[R(index)]);
}
node query(LL index,LL i,LL j,LL left,LL right){
if(i==left && j==right)
return tree[index];
else if(i>right || j<left)
return node(-1,0);
LL mid=(i+j)>>1;
if(right<=mid) return query(L(index),i,mid,left,right);
else if(left>mid) return query(R(index),mid+1,j,left,right);
else return merge(query(L(index),i,mid,left,mid),query(R(index),mid+1,j,mid+1,right));
}
void update(LL index,LL i,LL j,LL pos,LL val){
if(i==j && i==pos){
if(val%2) tree[index]=node(-1,0);
else tree[index]=node(val,1);
return;
}
LL mid=(i+j)>>1;
if(pos<=mid) update(L(index),i,mid,pos,val);
else update(R(index),mid+1,j,pos,val);
tree[index]=merge(tree[L(index)],tree[R(index)]);
}
/*void prLLTree(LL index,LL i,LL j){
if(i==j){
prLLf("%d,%d - %lld %lld\n",i,j,tree[index].val,tree[index].freq);
return;
}
LL mid=(i+j)>>1;
prLLTree(L(index),i,mid);
prLLTree(R(index),mid+1,j);
prLLf("%d,%d - %lld %lld\n",i,j,tree[index].val,tree[index].freq);
}*/
void swap(LL &x,LL &y){
LL temp=x;
x=y;
y=temp;
}
int main(){
LL N,M;
scanf("%lld %lld",&N,&M);
for(LL i=0;i<N;++i) scanf("%lld",&input[i]);
init(1,1,N);
for(LL i=0;i<M;++i) {
LL type,u,v;
scanf("%lld %lld %lld",&type,&u,&v);
node res;
switch(type){
case 1 : res=query(1,1,N,u,v);
if(res.val==-1) res.freq=0;
printf("%lld %lld\n",res.val,res.freq);
break;
case 2 : update(1,1,N,u,v);
break;
}
}
return 0;
}
<file_sep>/LCH15JAB.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
while(T--){
string s;
cin >> s;
int f=0;
if(s.size()%2) {
cout << "NO" << endl;
continue;
}
for(int i=0;i<26;++i){
int cnt = 0;
for(int j=0; j<s.size(); ++j)
if(s[j]==i+'a') ++cnt;
if(cnt == s.size()/2){
cout << "YES" << endl;
f=1;
break;
}
}
if(!f)
cout << "NO" << endl;
}
}
<file_sep>/ROTATION.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
int A[NMAX];
int start,end;
int main(){
int N,M;
scanf("%d %d",&N, &M);
start = 0;
for(int i=0;i<N;++i)
scanf("%d",&A[i]);
for(int i=0;i<M;++i){
char buf[5];
int x;
scanf("%s %d",buf,&x);
switch(buf[0]){
case 'R' : printf( "%d\n", A[ ( start + x - 1 + N ) % N] );
break;
case 'C' : start = ( start + x ) % N;
break;
case 'A' : start = ( start - x + N ) % N;
break;
}
//cout << start << "-- " << endl;
}
return 0;
}
<file_sep>/CAOS1.cpp
#include <cstdio>
#include <iostream>
using namespace std;
const int MAX=51;
char array[MAX][MAX];
int R,C;
void print()
{
for(int i=0;i<R;++i)
for(int j=0;j<C;++j)
printf("\n%d,%d : %c",i,j,array[i][j]);
}
int find(int x,int y)
{
int i,j;
int up,down,left,right;
up=down=left=right=0;
i=x-1;
while(i>=0)
{
if(array[i][y]=='#') break;
++up;
--i;
}
i=x+1;
while(i<R)
{
if(array[i][y]=='#') break;
++down;
++i;
}
j=y-1;
while(j>=0)
{
if(array[x][j]=='#') break;
++left;
--j;
}
j=y+1;
while(j<C)
{
if(array[x][j]=='#') break;
++right;
++j;
}
return min(left,min(right,min(up,down)));
}
int solve()
{
int count=0;
for(int i=0;i<R;++i)
for(int j=0;j<C;++j)
if(array[i][j]=='^' && find(i,j)>=2) ++count;
return count;
}
int main()
{
int T;
scanf("%d",&T);
while(T>0)
{
scanf("%d %d",&R,&C);
for(int i=0;i<R;++i)
scanf("%s",array[i]);
printf("%d\n",solve());
--T;
}
return 0;
}
<file_sep>/CHEFRRUN.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
vector<int> adj[NMAX];
stack<int> connectedVertices;
int disc[NMAX],low[NMAX],onstack[NMAX];
int result = 0;
int cdisc = 1;
void dfs( int u ){
disc[u] = cdisc;
low[u] = cdisc++;
onstack[u] = 1;
connectedVertices.push( u );
for(int i=0;i<adj[u].size();++i){
int v = adj[u][i];
if( !disc[v] ){
dfs( v );
low[u] = min( low[u], low[v] );
}
else if( onstack[v] ){
low[u] = min( low[u], disc[v] );
}
}
if( disc[u] == low[u] ){
if( connectedVertices.top() != u ) result++;
while( connectedVertices.top() != u ){
result++;
onstack[ connectedVertices.top() ] = 0;
connectedVertices.pop();
}
onstack[ connectedVertices.top() ]=0;
connectedVertices.pop();
}
}
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
int N;
cin >> N;
vector<int> A(N);
for(int i=0;i<N;++i) cin >> A[i];
connectedVertices = stack<int>();
result = 0;
cdisc = 0;
for(int i=1;i<=N;++i) disc[i]=low[i]=onstack[i]=0, adj[i].clear();
for(int i=0;i<N;++i){
int nxt = ((i+A[i]+1)%N);
if( nxt == i ) ++result;
else adj[i+1].push_back( nxt + 1 );
}
for(int i=1;i<=N;++i){
if( !disc[i] )
dfs( i );
}
cout << result << endl;
}
return 0;
}
<file_sep>/IOPC1207.cpp
// author noob333
// H + Shift C + B + E + B + E
#include <bits/stdc++.h>
#define NMAX 100011
//#define DEBUG 1
using namespace std;
// Input macros
#define s(n) scanf("%d",&n)
#define sc(n) scanf("%c",&n)
#define sl(n) scanf("%lld",&n)
#define sf(n) scanf("%lf",&n)
#define ss(n) scanf("%s",n)
// Useful constants
#define INF (int)1e9
#define EPS 1e-9
#define DINF 2.0e7
// Useful hardware instructions
#define bitcount __builtin_popcount
#define gcd __gcd
// Useful container manipulation / traversal macros
#define REP(i,a,b) for(int i=a;i<b;i++)
#define RREP(i,a,b) for(int i=a;i>b;i--)
#define foreach(v, c) for( typeof( (c).begin()) v = (c).begin(); v != (c).end(); ++v)
#define all(a) a.begin(), a.end()
#define in(a,b) ( (b).find(a) != (b).end())
#define pb push_back
#define fill(a,v) memset(a, v, sizeof a)
#define sz(a) ((int)(a.size()))
#define mp make_pair
#define fi first
#define se second
#define L(x) (x<<1)
#define R(x) ((x<<1)+1)
// Some common useful functions
#define sq(a) ( (a)*(a) )
#define maX(a,b) ( (a) > (b) ? (a) : (b))
#define miN(a,b) ( (a) < (b) ? (a) : (b))
#define checkbit(n,b) ( (n >> b) & 1)
#define setbit(n,b) ( n |= (1<<b) )
#define unsetbit(n,b) ( n &= ~(1<<b) )
#define DREP(a) sort(all(a)); a.erase(unique(all(a)),a.end())
#define INDEX(arr,ind) (lower_bound(all(arr),ind)-arr.begin())
typedef long long ll;
typedef vector<int> vi;
typedef pair<int, int> ii;
typedef vector<ii> vii;
typedef set<int> si;
typedef map<string, int> msi;
#ifdef DEBUG
#define debug(args...) {dbg,args; cerr<<endl;}
#else
#define debug(args...) // Just strip off all debug tokens
#endif
struct debugger
{
template<typename T> debugger& operator , (const T& v)
{
cerr<<v<<" ";
return *this;
}
} dbg;
template <typename T1, typename T2>
inline std::ostream& operator << (std::ostream& os, const std::pair<T1, T2>& p)
{
return os << "(" << p.first << ", " << p.second << ")";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::vector<T>& v)
{
bool first = true;
os << "[";
for(unsigned int i = 0; i < v.size(); i++)
{
if(!first)
os << ", ";
os << v[i];
first = false;
}
return os << "]";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::set<T>& v)
{
bool first = true;
os << "[";
for (typename std::set<T>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii;
first = false;
}
return os << "]";
}
template<typename T1, typename T2>
inline std::ostream &operator << (std::ostream & os,const std::map<T1, T2>& v)
{
bool first = true;
os << "[";
for (typename std::map<T1, T2>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii ;
first = false;
}
return os << "]";
}
inline void inp(int &n) {
n = 0;
int ch = getchar();
int sign = 1;
while (ch < '0' || ch > '9') {
if (ch == '-') sign = -1;
ch = getchar();
}
while (ch >= '0' && ch <= '9')
n = (n << 3) + (n << 1) + ch - '0', ch = getchar();
n = n*sign;
}
struct segNode{
int lazy;
int sum,len;
segNode(int a=0,int b=0,int c=0){
lazy=a;
sum=b; len=c;
}
};
struct segTree{
segNode tree[NMAX<<2];
void modify(int index,int val){
if(val%2) tree[index].sum = tree[index].len-tree[index].sum;
tree[index].lazy += val;
}
void push(int index,int i,int j){
if(i!=j){
if(tree[index].lazy){
modify(L(index),tree[index].lazy);
modify(R(index),tree[index].lazy);
tree[index].lazy=0;
}
}
}
void build(int index,int i,int j){
tree[index]=segNode(0,0,j-i+1);
if(i==j) return;
int mid=(i+j)>>1;
build(L(index),i,mid);
build(R(index),mid+1,j);
}
void update(int index,int i,int j,int st,int en){
if(i==st && j==en){
modify(index,1);
return;
}
push(index,i,j);
int mid = (i+j)>>1;
if(en<=mid) update(L(index),i,mid,st,en);
else if(st>mid) update(R(index),mid+1,j,st,en);
else{
update(L(index),i,mid,st,mid);
update(R(index),mid+1,j,mid+1,en);
}
tree[index].sum = tree[L(index)].sum + tree[R(index)].sum;
}
int query(int index,int i,int j,int st,int en){
if(i==st && j==en) return tree[index].sum;
push(index,i,j);
int mid = (i+j)>>1;
int x1,x2;
x1=x2=0;
if(en<=mid) x1=query(L(index),i,mid,st,en);
else if(st>mid) x2=query(R(index),mid+1,j,st,en);
else{
x1=query(L(index),i,mid,st,mid);
x2=query(R(index),mid+1,j,mid+1,en);
}
//tree[index].sum = tree[L(index)].sum + tree[R(index)].sum;
return x1+x2;
}
void printTree(int index,int i,int j){
if(i==j){
printf("%d,%d - %d %d %d\n",i,j,tree[index].lazy,tree[index].sum,tree[index].len);
return;
}
int mid=(i+j)>>1;
printTree(L(index),i,mid);
printTree(R(index),mid+1,j);
printf("%d,%d - %d %d %d\n",i,j,tree[index].lazy,tree[index].sum,tree[index].len);
}
}segTrees[3];
int N[3];
void init(){
REP(i,0,3){
//fill(segTrees[i].tree,0);
segTrees[i].build(1,0,N[i]-1);
}
}
void change(int ind,int u,int v){
segTrees[ind].update(1,0,N[ind]-1,u,v);
//segTrees[ind].printTree(1,0,N[ind]-1);
//cout << endl;
}
ll ask(int x1,int y1,int z1,int x2,int y2,int z2){
ll r1=segTrees[0].query(1,0,N[0]-1,x1,x2);
ll r2=segTrees[1].query(1,0,N[1]-1,y1,y2);
ll r3=segTrees[2].query(1,0,N[2]-1,z1,z2);
ll g1=(x2-x1+1)-r1;
ll g2=(y2-y1+1)-r2;
ll g3=(z2-z1+1)-r3;
//debug(r1,g1,r2,g2,r3,g3);
return (r1*r2*r3+r1*g2*g3+g1*r2*g3+g1*g2*r3);
}
void solve(){
int x1,x2,y1,y2,z1,z2;
int Q;
REP(i,0,3)
inp(N[i]);
init();
inp(Q);
REP(i,0,Q){
int ty;
inp(ty);
if(ty==3){
inp(x1); inp(y1); inp(z1);
inp(x2); inp(y2); inp(z2);
printf("%lld\n",ask(x1,y1,z1,x2,y2,z2));
}
else{
inp(x1); inp(x2);
change(ty,x1,x2);
}
}
}
int main(){
//freopen("1.in","r",stdin);
int T;
inp(T);
while(T--){
solve();
}
//fclose(stdin);
return 0;
}
<file_sep>/FORGETPW.cpp
#include <bits/stdc++.h>
#define NMAX 1111111
#define RMAX 256
using namespace std;
char rule[RMAX],input[NMAX],output[NMAX];
inline void init(){
for(int i=0;i<RMAX;++i) rule[i]=i;
}
int main(){
int T;
scanf("%d",&T);
while(T--){
init();
int rules;
scanf("%d\n",&rules);
for(int i=0;i<rules;++i) {
char str[5];
gets(str);
//printf("\n---RUle%s\n",str);
rule[int(str[0])]=str[2];
}
scanf("%s",input);
//printf("%d %s\n",T,input);
int flag=0;
int ptr=0;
int len=strlen(input);
int i;
assert(len>0);
for(i=0;i<len;++i) input[i]=rule[input[i]];
for(i=0;i<len;++i){
if(input[i]=='.') break;
if(flag || input[i]!='0' ) flag=1,output[ptr++]=input[i];
}
flag=ptr;
for(;i<len;++i){
if(input[i]!='0') flag=ptr+1;
output[ptr++]=input[i];
}
for(i=ptr-1;i>=flag;--i) output[i]='\0',--ptr;
if(output[ptr-1]=='.') output[i]='\0',--ptr;
if(ptr==0) output[0]='0',++ptr;
output[ptr]='\0';
printf("%s\n",output);
}
return 0;
}
<file_sep>/PRPOTION.cpp
#include <bits/stdc++.h>
#define inf 1000000011
using namespace std;
int main(){
int t;
scanf("%d",&t);
while(t--){
int maxR,maxG,maxB,res;
int R,G,B,M,tmp;
maxR=maxG=maxB=0;
scanf("%d %d %d %d",&R,&G,&B,&M);
for(int i=0;i<R;++i){
scanf("%d",&tmp);
maxR=max(maxR,tmp);
}
for(int i=0;i<G;++i){
scanf("%d",&tmp);
maxG=max(maxG,tmp);
}
for(int i=0;i<B;++i){
scanf("%d",&tmp);
maxB=max(maxB,tmp);
}
res=max(maxR,max(maxB,maxG));
for(int i=0;i<M;++i){
if(maxR>=maxG && maxR>=maxB){
maxR/=2;
}
else if(maxG>=maxR && maxG>=maxB){
maxG/=2;
}
else{
maxB/=2;
}
res = min(res,max(maxR,max(maxG,maxB)));
}
printf("%d\n",res);
}
return 0;
}
<file_sep>/CHEFCBA.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio( false );
int a[4], flag = 0;
for(int i=0;i<4;++i) cin >> a[i];
sort(a, a+3);
do {
double x = ((double)a[0])/a[1];
double y = ((double)a[2])/a[3];
if( x == y ){
flag = 1;
break;
}
}while ( next_permutation(a,a+3) );
if(flag) cout << "Possible";
else cout << "Impossible";
return 0;
}
<file_sep>/COLOR.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
string rgb( "RGB" );
while( T-- ){
int N;
cin >> N;
string s;
cin >> s;
int minCost = NMAX;
for(int i=0;i<3;++i){
int cost = 0;
for(int j=0;j<s.size();++j){
if( s[j] != rgb[i] ) ++cost;
}
minCost = min( minCost, cost );
}
cout << minCost << endl;
}
return 0;
}
<file_sep>/QTREE6.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define L(x) ( x<<1 )
#define R(x) ( (x<<1 ) + 1)
#define pb push_back
#define sz size( )
#define CL(S,x) memset(S,x,sizeof(S))
#define FOR(i,x,y) for(i=x;i<=y;i++)
#define _FOR(i,x,y) for(i=x;i>=y;i--)
#define inf 999999999
using namespace std;
typedef long long int ll;
inline int read_int() {
register char c = getchar_unlocked();
while(c<'0' || c>'9') c = getchar_unlocked();
int ret = 0;
while(c>='0' && c<='9') {
ret = (ret<<3)+(ret<<1) + c - 48;
c = getchar_unlocked();
}
return ret;
}
struct node{
int col;
int pos[2];
};
struct node2{
ll lazy[2];
ll sum[2];
};
vector<int> adjList[NMAX];
int subtree[NMAX], parent[NMAX], color[NMAX];
int n1, Q;
struct hlChain{
vector<node> tree;
vector<node2> tree2;
vector<int> subt,inp;
int N, col;
ll val;
hlChain( vector<int> input ){
N = input.sz;
inp = input;
subt = vector<int>(N+1);
for(int i=0;i<N;++i) subt[i] = subtree[input[i]];
tree = vector<node>(N<<2);
tree2 = vector<node2>(N<<2);
init(1,0,N-1);
init2(1,0,N-1);
}
node merge(node x,node y,int mid){
node z;
z.col=0;
z.pos[0] = (y.pos[0]==(mid+1))?min(mid+1,x.pos[0]):y.pos[0];
z.pos[1] = (y.pos[1]==(mid+1))?min(mid+1,x.pos[1]):y.pos[1];
return z;
}
void init(int idx,int i,int j){
if(i==j) {
tree[idx].col=0;
tree[idx].pos[0]=i;
tree[idx].pos[1]=inf;
return;
}
int mid=(i+j)>>1;
init(L(idx),i,mid);
init(R(idx),mid+1,j);
tree[idx] = merge( tree[L(idx)], tree[R(idx)], mid );
}
void update(int idx,int i,int j,int pt,int val){
if(i==j && i==pt){
tree[idx].col = val;
tree[idx].pos[val]=i;
tree[idx].pos[1^val]=inf;
return;
}
int mid=(i+j)>>1;
if(pt<=mid) update(L(idx),i,mid,pt,val);
else update(R(idx),mid+1,j,pt,val);
tree[idx] = merge( tree[L(idx)], tree[R(idx)], mid );
}
node query(int idx,int i,int j,int left,int right){
if(i==left && j==right) return tree[idx];
int mid=(i+j)>>1;
if(right<=mid) return query(L(idx),i,mid,left,right);
else if(left>mid) return query(R(idx),mid+1,j,left,right);
else{
node res1 = query(L(idx),i,mid,left,mid);
node res2 = query(R(idx),mid+1,j,mid+1,right);
return merge( res1, res2, mid);
}
}
void propagate( int idx,int i,int j ){
tree2[idx].sum[0] += (tree2[idx].lazy[0]*(j-i+1));
tree2[idx].sum[1] += (tree2[idx].lazy[1]*(j-i+1));
if(i!=j){
tree2[L(idx)].lazy[0] += tree2[idx].lazy[0];
tree2[L(idx)].lazy[1] += tree2[idx].lazy[1];
tree2[R(idx)].lazy[0] += tree2[idx].lazy[0];
tree2[R(idx)].lazy[1] += tree2[idx].lazy[1];
}
tree2[idx].lazy[0]=tree2[idx].lazy[1]=0;
}
void init2( int idx,int i,int j ){
if(i==j) {
tree2[idx].sum[0]=(ll)subt[i] - 1;
tree2[idx].lazy[0]=tree2[idx].lazy[1]=tree2[idx].sum[1]=0;
return;
}
int mid=(i+j)>>1;
init2(L(idx),i,mid);
init2(R(idx),mid+1,j);
tree2[idx].sum[0]=tree2[L(idx)].sum[0] + tree2[R(idx)].sum[0];
tree2[idx].sum[1]=tree2[L(idx)].sum[1] + tree2[R(idx)].sum[1];
tree2[idx].lazy[0]=tree2[idx].lazy[1]=0;
}
void update2(int idx,int i,int j,int left,int right){
propagate(idx,i,j);
if( left<=i && j<=right ){
tree2[idx].sum[col] += ( val * (j-i+1));
if(i!=j){
tree2[L(idx)].lazy[col] += val;
tree2[R(idx)].lazy[col] += val;
}
return;
}
if(left>j || right<i) return;
int mid= (i+j)>>1;
update2(L(idx),i,mid,left,right);
update2(R(idx),mid+1,j,left,right);
tree2[idx].sum[0]=tree2[L(idx)].sum[0] + tree2[R(idx)].sum[0];
tree2[idx].sum[1]=tree2[L(idx)].sum[1] + tree2[R(idx)].sum[1];
}
ll query2(int idx,int i,int j,int left,int right){
propagate(idx,i,j);
if( left<=i && j<=right ) return tree2[idx].sum[col];
if(left>j || right<i) return 0;
int mid= (i+j)>>1;
ll res1 = query2(L(idx),i,mid,left,right);
ll res2 = query2(R(idx),mid+1,j,left,right);
tree2[idx].sum[0]=tree2[L(idx)].sum[0] + tree2[R(idx)].sum[0];
tree2[idx].sum[1]=tree2[L(idx)].sum[1] + tree2[R(idx)].sum[1];
return res1 + res2;
}
void printTree( int idx,int i,int j){
if(i==j){
cout<<i<<","<<j<<" "<<tree[idx].pos[0]<<" "<<tree[idx].pos[1]<<endl;
return;
}
int mid=(i+j)>>1;
printTree(L(idx),i,mid);
printTree(R(idx),mid+1,j);
cout<<i<<","<<j<<" "<<tree[idx].pos[0]<<" "<<tree[idx].pos[1]<<endl;
}
void printTree2( int idx,int i,int j){
if(i==j){
cout<<i<<","<<j<<" "<<tree2[idx].sum[0]<<" "<<tree2[idx].sum[1]<<"-->"<<tree2[idx].lazy[0]<<" "<<tree2[idx].lazy[1]<<endl;
return;
}
int mid=(i+j)>>1;
printTree2(L(idx),i,mid);
printTree2(R(idx),mid+1,j);
cout<<i<<","<<j<<" "<<tree2[idx].sum[0]<<" "<<tree2[idx].sum[1]<<"-->"<<tree2[idx].lazy[0]<<" "<<tree2[idx].lazy[1]<<endl;
}
node query(int left,int right){
return query(1,0,N-1,left,right);
}
ll query2(int left,int right,int col_){
col= col_;
return query2(1,0,N-1,left,right);
}
void update(int point,int value){
update(1,0,N-1,point,value);
}
void update2(int left,int right,int col_,ll val_){
col=col_;
val=val_;
update2(1,0,N-1,left,right);
}
};
vector<hlChain> chains;
vector< vector<int> > chainVal;
int chainPos[NMAX], chainId[NMAX], chainHead[NMAX];
int cnum;
int dfs( int u,int p ){
subtree[u] = 1;
parent[u] = p;
color[u]=0;
for(int i=0;i<adjList[u].sz;++i){
int v=adjList[u][i];
if( v!=p ) subtree[u] += dfs(v,u);
}
return subtree[u];
}
void hld(int u,int p){
chainVal[cnum].pb(u);
chainPos[u] = chainVal[cnum].sz-1;
chainId[u] = cnum;
if( chainPos[u] == 0 ) chainHead[cnum] = u;
int most,idx;
most = idx =-1;
for(int i=0;i<adjList[u].sz;++i){
int v=adjList[u][i];
if( v!=p && subtree[v] > most ) most=subtree[v], idx=v;
}
if( idx != -1 ) hld(idx,u);
for(int i=0;i<adjList[u].sz;++i){
int v=adjList[u][i];
if( v!=p && v!=idx){
++cnum;
chainVal.pb(vector<int>(0));
hld(v,u);
}
}
}
int lowestWithSameCol( int u ){
int x = u;
int col = color[u];
int pos = chainPos[u];
int idx = chainId[u];
node low;
while(1){
low = chains[idx].query(0,pos);
if(low.pos[col]!=0) return chains[idx].inp[low.pos[col]];
x = parent[chainHead[idx]];
if(x==-1 || color[x]!=col) return chainHead[idx];
pos = chainPos[x];
idx = chainId[x];
}
}
ll ask( int u ){
int top = lowestWithSameCol(u);
int idx = chainId[top];
int pos = chainPos[top];
return chains[idx].query2(pos,pos,color[u]) + 1;
}
ll change1( int u ){
int x = u;
int oldCol = color[u];
int newCol = 1^oldCol;
int pos = chainPos[u];
int idx = chainId[u];
ll sub,add;
node low;
sub = -( chains[idx].query2(pos,pos,oldCol) + 1);
add = chains[idx].query2(pos,pos,newCol)+1;
x = parent[u];
if( x!=-1 && color[x] == oldCol ){
pos = chainPos[x];
idx = chainId[x];
while( 1 ){
low = chains[idx].query(0,pos);
chains[idx].update2(low.pos[oldCol],pos,oldCol,sub);
if( low.pos[oldCol] !=0 ){
x = chains[idx].inp[low.pos[oldCol]-1];
idx = chainId[x], pos = chainPos[x];
break;
}
x = parent[chainHead[idx]];
if(x==-1) break;
idx = chainId[x], pos = chainPos[x];
if(color[x]!=oldCol) break;
}
}
if( x!=-1 ){
pos = chainPos[x],idx=chainId[x];
chains[idx].update2(pos,pos,oldCol,sub);
}
pos=chainPos[u], idx=chainId[u];
//cout<< newCol << " ---< " <<endl;
color[u] = 1^color[u];
chains[idx].update( pos,newCol );
//x = u;
x = parent[u];
if( x!=-1 && color[x] == newCol ){
pos = chainPos[x];
idx = chainId[x];
while( 1 ){
low = chains[idx].query(0,pos);
chains[idx].update2(low.pos[newCol],pos,newCol,add);
if( low.pos[newCol] !=0 ){
x = chains[idx].inp[low.pos[newCol]-1];
idx = chainId[x], pos = chainPos[x];
break;
}
x = parent[chainHead[idx]];
if(x==-1) break;
idx = chainId[x], pos = chainPos[x];
if(color[x]!=newCol) break;
}
}
if( x!=-1 ){
pos = chainPos[x], idx = chainId[x];
chains[idx].update2(pos,pos,newCol,add);
}
}
void solve(){
cnum = 0;
chainVal.pb(vector<int>(0));
dfs(1,-1);
hld(1,-1);
for(int i=0;i<=cnum;++i){
//for(int j=0;j<chainVal[i].sz;++j)
// cout<<chainVal[i][j]<<" ";
//cout<< endl;
hlChain x(chainVal[i]);
chains.pb(x);
}
Q = read_int();
//scanf("%d",&Q);
for(int i=0;i<Q;++i){
int type,u;
type =read_int();
u = read_int();
//scanf("%d %d",&type,&u);
if(type == 0) {
printf("%lld\n",ask(u));
}
else{
change1(u);
// cout<<"---><>"<<endl;
// chains[chainId[u]].printTree2(1,0,chains[chainId[u]].N-1);
}
}
}
int main(){
n1 = read_int();
for(int i=0;i<n1-1;++i){
int u,v;
u = read_int();
v = read_int();
adjList[u].pb(v);
adjList[v].pb(u);
}
solve();
return 0;
}
<file_sep>/FCBARCA.cpp
#include <stdio.h>
#include <vector>
#include <algorithm>
#define MOD 1000000007
using namespace std;
typedef long long int x64;
x64 array[1001][11];
void initialize()
{
x64 runSq;
for(int k=1;k<=10;++k)
array[1][k]=k;
for(int i=1;i<=10;++i)
{
runSq=i*i;
for(int n=2;n<=1000;++n)
{
array[n][i]=(runSq-array[n-1][i])%MOD;
if(array[n][i]<0)
array[n][i]+=MOD;
runSq=runSq*i%MOD;
}
}
}
int main()
{
initialize();
int T,N,K;
scanf("%d",&T);
while(T>0)
{
scanf("%d %d",&N,&K);
printf("%lld\n",array[N-1][K]);
--T;
}
return 0;
}
<file_sep>/ADIGIT.cpp
#include <bits/stdc++.h>
#define DIGITS 10
#define NMAX 111111
using namespace std;
int counter[DIGITS];
int dp[NMAX];
int main(){
memset(counter,0,sizeof(counter));
int N,M;
scanf("%d %d\n",&N,&M);
for(int i=0;i<N;++i){
int a,res=0;
char x;
scanf("%c",&x);
a=x-'0';
for(int j=0;j<DIGITS;++j)
res=res+(counter[j]*abs(a-j));
dp[i]=res;
counter[a]++;
}
for(int i=0;i<M;++i){
int x;
scanf("%d",&x);
printf("%d\n",dp[x-1]);
}
}
<file_sep>/PRIME1.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int ll;
ll mulmod(ll a, ll b, ll mod)
{
ll x = 0,y = a % mod;
while (b > 0)
{
if (b % 2 == 1)
{
x = (x + y) % mod;
}
y = (y * 2) % mod;
b /= 2;
}
return x % mod;
}
ll modulo(ll base, ll exponent, ll mod)
{
ll x = 1;
ll y = base;
while (exponent > 0)
{
if (exponent % 2 == 1)
x = (x * y) % mod;
y = (y * y) % mod;
exponent = exponent / 2;
}
return x % mod;
}
/*
* Miller-Rabin primality test, iteration signifies the accuracy
*/
bool Miller(ll p,int iteration)
{
if (p < 2)
{
return false;
}
if (p != 2 && p % 2==0)
{
return false;
}
ll s = p - 1;
while (s % 2 == 0)
{
s /= 2;
}
for (int i = 0; i < iteration; i++)
{
ll a = rand() % (p - 1) + 1, temp = s;
ll mod = modulo(a, temp, p);
while (temp != p - 1 && mod != 1 && mod != p - 1)
{
mod = mulmod(mod, mod, p);
temp *= 2;
}
if (mod != p - 1 && temp % 2 == 0)
{
return false;
}
}
return true;
}
inline ll ReadNext(){
register ll c = getchar();
ll x = 0;
for(;(c<48 || c>57);c = getchar());
for(;c>47 && c<58;c = getchar()) {x = (x<<1) + (x<<3) + c - 48;}
return x;
}
int main(){
int T;
//scanf("%d",&T);
T=ReadNext();
while(T--){
ll m,n;
//scanf("%lld %lld",&m,&n);
m=ReadNext();
n=ReadNext();
for(ll k=m;k<=n;++k)
if(Miller(k,10))
printf("%lld\n",k);
if(T) printf("\n");
}
return 0;
}
<file_sep>/WSTRING.cpp
#include <cstdio>
#include <cstring>
#include <vector>
#define REP26(i) for(i=0;i<26;++i)
using namespace std;
const int MAX=10005;
struct seg
{
vector<int> count;
vector<int> cumFreq;
int max;
int cumMax;
};
vector<seg> segments;
void solve()
{
vector<seg>::iterator itr;
int prefix,suffix,best,k,total;
if(segments.begin()!=segments.end())
{
REP26(k)
segments[0].cumFreq[k]=segments[0].count[k];
segments[0].cumMax=segments[0].max;
}
for(int k=0;k<segments.size();++k)
{
int i;
if(k+1==segments.size())
break;
REP26(i)
{
segments[k+1].cumFreq[i]=segments[k].cumFreq[i]+segments[k+1].count[i];
segments[k+1].cumMax=max(segments[k+1].cumMax,segments[k+1].cumFreq[i]);
}
}
total=segments.size()-1;
best=0;
for(int i=0;i<segments.size();++i)
{
suffix=0;
if(i+1>=segments.size()||i+2>=segments.size()||i+3>=segments.size())
break;
prefix=segments[i].cumMax;
int j;
REP26(j)
suffix=max(suffix,segments[total].cumFreq[j]-segments[i+2].cumFreq[j]);
if(!(prefix&&suffix&&segments[i+1].max&&segments[i+2].max))
continue;
best=max(best,prefix+segments[i+1].max+segments[i+2].max+suffix);
}
if(best)
printf("%d\n",best+3);
else
printf("%d\n",0);
}
void printSeg()
{
for(int i=0;i<segments.size();++i)
{
int j;
printf("\n\n");
REP26(j)
printf(" %d",segments[i].count[j]);
printf("\n");
REP26(j)
printf(" %d",segments[i].cumFreq[j]);
printf("\n %d %d",segments[i].max,segments[i].cumMax);
}
}
int main()
{
seg dummy;
dummy.cumFreq=vector<int>(26,0);
dummy.count=vector<int>(26,0);
dummy.cumMax=dummy.max=0;
int T;
scanf("%d",&T);
while(T>0)
{
char str[MAX];
int len,k;
scanf("%s",str);
len=strlen(str);
segments.clear();
segments.push_back(dummy);
k=0;
for(int i=0;i<len;++i)
{
if(str[i]=='#')
{
{
++k;
segments.push_back(dummy);
}
}
else
{
++segments[k].count[str[i]-'a'];
segments[k].max=max(segments[k].max,segments[k].count[str[i]-'a']);
}
}
solve();
--T;
}
return 0;
}
<file_sep>/CHEFBM.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
typedef long long int LL;
typedef pair<LL,LL> pii;
vector<LL> modify[NMAX];
void solve1();
int main(){
/* LL N,M,P;
scanf("%lld %lld %lld",&N,&M,&P);
for(LL i=0;i<P;++i){
LL u,v;
scanf("%lld %lld",&u,&v);
modify[u].push_back(v);
}
for(LL i=1;i<=N;++i){
if(M==1){
printf("%lld\n",M+LL(modify[i].size()));
continue;
}
else if(modify[i].size()==0){
printf("%lld\n",M-1);
continue;
}
else{
LL flag=0;
vector<pii> cnt;
LL res=M-1;
cnt.push_back(pii(-1,0));
sort(modify[i].begin(),modify[i].end());
LL sz=modify[i].size();
LL ctr=0;
modify[i].push_back(-1);
for(LL j=0;j<sz;++j){
++ctr;
if(modify[i][j]!=modify[i][j+1]){
cnt.push_back(pii(modify[i][j],ctr));
ctr=0;
}
}
cnt.push_back(pii(M+1,0));
//for(int j=0;j<cnt.size();++j) printf("%lld,%lld ",cnt[j].first,cnt[j].second);
//printf("\n");
for(LL j=1;j<cnt.size()-1;++j){
if(cnt[j-1].first+1!=cnt[j].first){
if(cnt[j].first!=1) res+=cnt[j].second;
}
if(cnt[j].first==M){
continue;
}
LL next=(cnt[j].first+1==cnt[j+1].first)?cnt[j+1].second:0;
if(cnt[j].second>next+1){
flag=1;
break;
}
else res=res+(next-cnt[j].second);
}
if(flag) res=-1;
printf("%lld\n",res);
}
}
*/
solve1();
return 0;
}
void solve1(){
LL N,M,P;
scanf("%lld %lld %lld",&N,&M,&P);
for(LL i=0;i<P;++i){
LL u,v;
scanf("%lld %lld",&u,&v);
modify[u].push_back(v);
}
for(LL i=1;i<=N;++i){
if(M==1){
printf("%d\n",0);
continue;
}
else if(modify[i].size()==0){
printf("%lld\n",M-1);
continue;
}
else{
LL flag=0;
vector<pii> cnt;
LL res=M-1;
cnt.push_back(pii(-1,0));
sort(modify[i].begin(),modify[i].end());
LL sz=modify[i].size();
LL ctr=0;
modify[i].push_back(-1);
for(LL j=0;j<sz;++j){
++ctr;
if(modify[i][j]!=modify[i][j+1]){
cnt.push_back(pii(modify[i][j],ctr));
ctr=0;
}
}
cnt.push_back(pii(M+1,0));
//for(int j=0;j<cnt.size();++j) printf("%lld,%lld ",cnt[j].first,cnt[j].second);
//printf("\n");
for(LL j=1;j<cnt.size()-1;++j){
if(cnt[j-1].first+1!=cnt[j].first){
if(cnt[j].first!=1) res+=cnt[j].second;
}
if(cnt[j].first==M){
continue;
}
LL next=(cnt[j].first+1==cnt[j+1].first)?cnt[j+1].second:0;
if(cnt[j].second>next+1){
flag=1;
break;
}
else res=res+(next-cnt[j].second);
}
if(flag) res=-1;
printf("%lld\n",res);
}
}
}
<file_sep>/DIVQUERY.cpp
#include <cstring>
#include <cstdio>
#include <algorithm>
#include <vector>
#include <iostream>
#define NMAX 111111
using namespace std;
int tree[NMAX],input[NMAX],res[NMAX];
int N,Q;
struct Query{
int pos,k;
int index,count;
}qry[NMAX<<1];
void update(int idx,int val){
for(int i=idx;i<NMAX;i+=(i&-i))
tree[i]+=val;
}
int read(int idx){
int sum=0;
for(int i=idx;i>0;i-=(i&-i))
sum+=tree[i];
return sum;
}
bool comp(const Query &x,const Query &y){
if(x.pos<y.pos) return true;
return false;
}
int main(){
scanf("%d %d",&N,&Q);
input[0]=0;
for(int i=1;i<=N;++i) scanf("%d",&input[i]);
for(int i=0;i<(Q<<1);++i) {
scanf("%d %d %d",&qry[i].pos,&qry[i+1].pos,&qry[i].k);
qry[i].index=i/2;
qry[i+1].index=i/2;
qry[i+1].k=qry[i].k;
--qry[i].pos;
++i;
}
sort(qry,qry+(Q<<1),comp);
//memset(tree,0,sizeof(tree));
//memset(res,0,sizeof(res));
int qptr=0;
for(int i=0;i<=N;++i){
for(int j=1;j*j<=input[i];++j)
if(input[i]%j==0){
++tree[j];
if((input[i]!=j*j)) ++tree[input[i]/j] ;
}
while( qptr<(Q<<1) && qry[qptr].pos==i ){
res[qry[qptr].index]=tree[qry[qptr].k]-res[qry[qptr].index];
++qptr;
}
if(qptr>=(Q<<1)) break;
}
for(int i=0;i<Q;++i) printf("%d\n",res[i]);
return 0;
}
<file_sep>/APPROX.cpp
#include <stdio.h>
#define SIZE 1000002
char array[SIZE];
void compute(int N)
{
int num=103993,rem=4687,div=33102;
for(int i=0;i<N;++i)
{
rem=(rem*10);
num=rem/div;
array[i]=48+(num);
rem=rem-(num*div);
}
}
int main()
{
int T,N[2001],max=-1;
scanf("%d",&T);
for(int i=0;i<T;++i)
{
scanf("%d",&N[i]);
if(N[i]>max)
max=N[i];
}
compute(max);
for(int i=0;i<T;++i)
{
if(N[i]==0)
printf("3\n");
else
{
char c=array[N[i]];
array[N[i]]='\0';
printf("3.%s \n",array);
array[N[i]]=c;
}
}
return 0;
}
<file_sep>/ALEXNUMB.cpp
#include <cstdio>
int main(){
int T;
scanf("%d",&T);
while(T>0){
long long int N,x;
scanf("%lld",&N);
for(int i=0;i<N;++i) scanf("%lld",&x);
printf("%lld\n",(N*(N-1))>>1);
--T;
}
return 0;
}
<file_sep>/SSS.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int LL;
LL pow(LL a, LL b, LL MOD) {
LL x = 1, y = a;
while(b > 0) {
if(b%2 == 1) {
x=(x*y);
if(x>MOD) x%=MOD;
}
y = (y*y);
if(y>MOD) y%=MOD;
b /= 2;
}
return x;
}
LL modInverse(LL a, LL m) {
return pow(a,m-2,m);
}
int main(){
LL T;
scanf("%lld",&T);
while(T--) {
LL n,p,res;
scanf("%lld %lld",&n,&p);
if(n>=p) res=0;
else if(n==p-1) res=-1;
else{
res=1;
for(int i=p-2;i>n;--i) res=(res*modInverse(i,p)%p);
}
printf("%lld\n",res);
}
return 0;
}
<file_sep>/DIGITSEP.cpp
#include <bits/stdc++.h>
#define NMAX 300
#define MMAX 10
using namespace std;
typedef long long int ll;
int N,M,X,Y;
string s;
ll maxGcd;
ll _gcd(ll a, ll b) {
return b == 0 ? a : _gcd(b, a % b);
}
void dfs(int idx, int m, int counter, ll currGcd){
//cout << idx << " " << m << " " << endl;
if(currGcd == 1) return;
if(idx>=N){
--counter;
if(counter >= X && counter <= Y)
maxGcd = max(maxGcd, currGcd);
return;
}
if(counter>Y) return;
ll currVal = 0;
for(int i=0;i<m;++i){
if(idx+i>=N) break;
currVal *= 10;
currVal += s[idx+i]-'0';
}
if(currGcd < 0) currGcd = currVal;
else currGcd = _gcd(currGcd, currVal);
if(currGcd == 1) return;
for(int i=1;i<=M;++i){
dfs(idx+m, i, counter+1, currGcd);
}
}
void solve(){
maxGcd = 1;
for(int i=0;i<=M;++i)
dfs(0,i,0,-1);
cout << maxGcd << endl;
}
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
while(T--){
cin >> N;
cin >> s;
cin >> M >> X >> Y;
solve();
}
return 0;
}
<file_sep>/PPTREE.cpp
// By noob.
#include <cstdio>
#include <cmath>
#include <cstring>
#include <cstdlib>
#include <ctime>
#include <climits>
#include <iostream>
#include <fstream>
#include <sstream>
#include <algorithm>
#include <string>
#include <vector>
#include <set>
#include <map>
#include <list>
#include <complex>
#pragma comment(linker, "/STACK:266777216")
using namespace std;
#define assert(f) { if(!(f)) { fprintf(stderr,"Assertion failed: "); fprintf(stderr,#f); fprintf(stderr,"\n"); exit(1); } }
typedef long long LL;
typedef unsigned long long ULL;
typedef vector<int> VI;
typedef vector<VI> VVI;
typedef pair<int,int> PII;
typedef vector<PII> VPII;
typedef vector<double> VD;
typedef pair<double,double> PDD;
const int inf=1000000000;
const LL INF=LL(inf)*inf;
const double eps=1e-9;
const double PI=2*acos(0.0);
#define bit(n) (1<<(n))
#define bit64(n) ((LL(1))<<(n))
#define pb push_back
#define sz size()
#define mp make_pair
#define cl clear()
#define all(a) (a).begin(),(a).end()
#define fill(ar,val) memset((ar),(val),sizeof (ar))
#define MIN(a,b) {if((a)>(b)) (a)=(b);}
#define MAX(a,b) {if((a)<(b)) (a)=(b);}
#define sqr(x) ((x)*(x))
#define X first
#define Y second
clock_t start=clock();
#define NMAX 111111
int N;
vector<VPII> Tree;
int cost[NMAX];
struct node{
vector<node *>links;
node(){
links.resize(2,NULL);
}
};
struct Trie{
node *root;
Trie(){
root=new node();
}
void insert(int);
int find(int);
};
void Trie::insert(int x){
node *curr=root;
//int lim=1;
//while((lim<<1)<=x) lim=lim<<1;
int lim=1<<30;
for(;lim>0;lim=lim>>1){
int d=(x&lim)==0?0:1;
if(curr->links[d]==NULL)
curr->links[d]=new node();
curr=curr->links[d];
}
}
int Trie::find(int x){
node *curr=root;
int lim=1<<30;
int res=0;
for(;lim>0;lim=lim>>1){
int d=(x&lim)==0?0:1;
if(curr->links[!d]!=NULL) res+=lim,curr=curr->links[!d];
else curr=curr->links[d];
}
return res;
}
void dfs(int u,int p,int weight){
cost[u]=cost[p]^weight;
for(int i=0;i<Tree[u].size();++i){
int v=Tree[u][i].X;
if(v!=p){
dfs(v,u,Tree[u][i].Y);
}
}
}
int main()
{
int T;
scanf("%d",&T);
while(T>0){
scanf("%d",&N);
Tree.clear();
Tree.resize(N+1,VPII(0));
for(int i=0;i<N-1;++i){
int u,v,w;
scanf("%d %d %d",&u,&v,&w);
Tree[u].pb(PII(v,w));
Tree[v].pb(PII(u,w));
}
cost[1]=0;
dfs(1,1,0);
Trie *t=new Trie();
for(int i=1;i<=N;++i){
t->insert(cost[i]);
}
int best=INT_MIN;
for(int i=1;i<=N;++i){
best=max(best,t->find(cost[i]));
}
printf("%d\n",best);
--T;
}
return 0;
}
<file_sep>/MAANDI.cpp
#include <cstdio>
#define gc getchar_unlocked
using namespace std;
void scanint(int &x)
{
register int c = gc();
x = 0;
int neg = 0;
for(;((c<48 || c>57) && c != '-');c = gc());
if(c=='-') {neg=1;c=gc();}
for(;c>47 && c<58;c = gc()) {x = (x<<1) + (x<<3) + c - 48;}
if(neg) x=-x;
}
int check(int num)
{
int flag=0;
while(num)
{
int rem=num%10;
if(rem==4 || rem==7) {
flag=1;
break;
}
num=num/10;
}
return flag;
}
int overlucky(int x)
{
int count=0;
for(int i=1; i*i<=x; ++i)
{
int num=i;
if(x%i==0){
count+=check(i);
if(i*i!=x) count+=check(x/i);
}
}
return count;
}
int main()
{
int T;
scanint(T);
while(T>0)
{
int N;
scanint(N);
printf("%d\n",overlucky(N));
--T;
}
return 0;
}
<file_sep>/PRGIFT.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
int T;
scanf("%d",&T);
for(int t=0;t<T;++t){
int n,k;
int evenCount=0;
scanf("%d %d",&n,&k);
for(int i=0;i<n;++i){
int x;
scanf("%d",&x);
if(x%2==0) ++evenCount;
}
if(evenCount==n && k==0) printf("NO");
else if(k<=evenCount) printf("YES");
else printf("NO");
if(t<T-1) printf("\n");
}
return 0;
}
<file_sep>/CSUB.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
int T;
scanf("%d",&T);
while(T--){
int N;
int cnt=0;
scanf("%d\n",&N);
for(int i=0;i<N;++i){
char c;
scanf("%c",&c);
if(c=='1') cnt++;
}
long long int res= (long long int)cnt;
res = (res * (res-1))/2;
res = res + cnt;
printf("%lld\n",res);
}
return 0;
}
<file_sep>/XRQRS.cpp
// author noob333
// H + Shift C + B + E + B + E
#include <bits/stdc++.h>
#include <stdint.h>
#include <fcntl.h>
#include <unistd.h>
#define MAXVAL 500001
#define NMAX 500010
#define gc getchar_unlocked
#define DEBUG 1
using namespace std;
// Input macros
#define s(n) scanf("%d",&n)
#define sc(n) scanf("%c",&n)
#define sl(n) scanf("%lld",&n)
#define sf(n) scanf("%lf",&n)
#define ss(n) scanf("%s",n)
// Useful constants
#define INF (int)1e9
#define EPS 1e-9
// Useful hardware instructions
#define bitcount __builtin_popcount
#define gcd __gcd
// Useful container manipulation / traversal macros
#define REP(i,a,b) for(int i=a;i<b;i++)
#define RREP(i,a,b) for(int i=a;i>b;i--)
#define foreach(v, c) for( typeof( (c).begin()) v = (c).begin(); v != (c).end(); ++v)
#define all(a) a.begin(), a.end()
#define in(a,b) ( (b).find(a) != (b).end())
#define pb push_back
#define fill(a,v) memset(a, v, sizeof a)
#define sz(a) ((int)(a.size()))
#define mp make_pair
#define fi first
#define se second
// Some common useful functions
#define maX(a,b) ( (a) > (b) ? (a) : (b))
#define miN(a,b) ( (a) < (b) ? (a) : (b))
#define checkbit(n,b) ( (n >> b) & 1)
#define DREP(a) sort(all(a)); a.erase(unique(all(a)),a.end())
#define INDEX(arr,ind) (lower_bound(all(arr),ind)-arr.begin())
#define L(x) ( (x<<1) )
#define R(x) ( (x<<1)+1 )
typedef long long ll;
typedef vector<int> vi;
typedef vector<vector<int> > vvi;
typedef vector<ll> vl;
typedef pair<int, int> ii;
typedef vector<ii> vii;
typedef set<int> si;
typedef map<string, int> msi;
#ifdef DEBUG
#define debug(args...) {dbg,args; cerr<<endl;}
#else
#define debug(args...) // Just strip off all debug tokens
#endif
struct debugger
{
template<typename T> debugger& operator , (const T& v)
{
cerr<<v<<" ";
return *this;
}
} dbg;
template <typename T1, typename T2>
inline std::ostream& operator << (std::ostream& os, const std::pair<T1, T2>& p)
{
return os << "(" << p.first << ", " << p.second << ")";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::vector<T>& v)
{
bool first = true;
os << "[";
for(unsigned int i = 0; i < v.size(); i++)
{
if(!first)
os << ", ";
os << v[i];
first = false;
}
return os << "]";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::set<T>& v)
{
bool first = true;
os << "[";
for (typename std::set<T>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii;
first = false;
}
return os << "]";
}
template<typename T1, typename T2>
inline std::ostream &operator << (std::ostream & os,const std::map<T1, T2>& v)
{
bool first = true;
os << "[";
for (typename std::map<T1, T2>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii ;
first = false;
}
return os << "]";
}
class FastInput {
public:
FastInput() {
m_dataOffset = 0;
m_dataSize = 0;
m_v = 0x80000000;
}
uint32_t ReadNext() {
if (m_dataOffset == m_dataSize) {
int r = read(0, m_buffer, sizeof(m_buffer));
if (r <= 0) return m_v;
m_dataOffset = 0;
m_dataSize = 0;
int i = 0;
if (m_buffer[0] < '0') {
if (m_v != 0x80000000) {
m_data[m_dataSize++] = m_v;
m_v = 0x80000000;
}
for (; (i < r) && (m_buffer[i] < '0'); ++i);
}
for (; i < r;) {
if (m_buffer[i] >= '0') {
m_v = m_v * 10 + m_buffer[i] - 48;
++i;
} else {
m_data[m_dataSize++] = m_v;
m_v = 0x80000000;
for (i = i + 1; (i < r) && (m_buffer[i] < '0'); ++i);
}
}
}
return m_data[m_dataOffset++];
}
public:
uint8_t m_buffer[32768];
uint32_t m_data[16384];
size_t m_dataOffset, m_dataSize;
uint32_t m_v;
};
class FastOutput {
public:
FastOutput() {
m_dataOffset = 0;
}
~FastOutput() {
}
void Flush() {
if (m_dataOffset) {
if (write(1, m_data, m_dataOffset));
m_dataOffset = 0;
}
}
void PrintUint(uint32_t v, char d) {
if (m_dataOffset + 11 > sizeof(m_data)) Flush();
if (v < 100000) {
if (v < 1000) {
if (v < 10) {
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 1;
} else if (v < 100) {
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 2;
} else {
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 3;
}
} else {
if (v < 10000) {
m_data[m_dataOffset + 3] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 4;
} else {
m_data[m_dataOffset + 4] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 3] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 5;
}
}
} else {
if (v < 100000000) {
if (v < 1000000) {
m_data[m_dataOffset + 5] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 4] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 3] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 6;
} else if (v < 10000000) {
m_data[m_dataOffset + 6] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 5] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 4] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 3] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 7;
} else {
m_data[m_dataOffset + 7] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 6] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 5] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 4] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 3] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 8;
}
} else {
if (v < 1000000000) {
m_data[m_dataOffset + 8] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 7] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 6] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 5] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 4] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 3] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 9;
} else {
m_data[m_dataOffset + 9] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 8] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 7] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 6] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 5] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 4] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 3] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 2] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 1] = v - v / 10 * 10 + 48; v /= 10;
m_data[m_dataOffset + 0] = v + 48;
m_dataOffset += 10;
}
}
}
m_data[m_dataOffset++] = d;
}
void PrintChar(char d) {
if (m_dataOffset + 1 > sizeof(m_data)) Flush();
m_data[m_dataOffset++] = d;
}
void ReplaceChar(int offset, char d) {
m_data[m_dataOffset + offset] = d;
}
public:
uint8_t m_data[32768];
size_t m_dataOffset;
};
inline void scanint(int &x)
{
register int c = gc();
x = 0;
int neg = 0;
for(;((c<48 || c>57) && c != '-');c = gc());
if(c=='-') {neg=1;c=gc();}
for(;c>47 && c<58;c = gc()) {x = (x<<1) + (x<<3) + c - 48;}
if(neg) x=-x;
}
int input[NMAX];
int N,M;
struct trieNode{
trieNode* next[2];
int cnt;
trieNode(){
next[0] = next[1] = NULL;
cnt = 0;
}
};
void printTrie(trieNode *root,int val){
if(root->next[0]==NULL && root->next[1]==NULL)
cout << val << ",";
if(root->next[0]) printTrie(root->next[0],val<<1);
if(root->next[1]) printTrie(root->next[1],(val<<1)+1);
}
trieNode *trieVersions[NMAX];
trieNode* insert(trieNode* root,int x,int mask){
int tmp = (x&mask)>0?1:0;
trieNode* ptr = new trieNode();
ptr->next[0] = root->next[0]; ptr->next[1] = root->next[1];
ptr->cnt = root->cnt + 1;
if(root->next[tmp]==NULL){
trieNode* head = ptr;
while(mask){
ptr->next[tmp] = new trieNode();
ptr = ptr->next[tmp];
ptr->cnt += 1;
mask >>= 1;
tmp = (x&mask)>0?1:0;
}
return head;
}
ptr->next[tmp] = insert(root->next[tmp],x,mask>>1);
return ptr;
}
// trieNode* insert(trieNode* root,int x){
// // trieNode *ptr=(trieNode*)malloc(sizeof(trieNode));
// trieNode *ptr = new trieNode();
// trieNode *head = ptr;
// ptr->next[0] = root->next[0]; ptr->next[1] = root->next[1];
// ptr->cnt = root->cnt + 1;
// int mask = 1<<19; int tmp = (mask&x)>0?1:0;
// while(mask && root->next[tmp]!=NULL){
// }
// REP(i,0,20){
// int tmp = (mask&x)>0?1:0;
// if(ptr->next[tmp]==NULL) ptr->next[tmp] = new trieNode();
// ptr=ptr->next[tmp];
// ptr->cnt += 1;
// mask >>= 1;
// }
// return head;
// }
ii getTrieMax(trieNode* st,trieNode* en,int x){
//debug(x);
int mask = 1<<19;
int best = 0, val = 0;
REP(i,0,20){
int tmp = (mask&x)>0?1:0;
//debug(st,en,tmp,mask);
if(en->next[1^tmp] && !(st!=NULL && st->next[1^tmp]!=NULL && en->next[1^tmp]->cnt - st->next[1^tmp]->cnt==0)){
best += mask;
val += ((1^tmp)*mask);
en=en->next[1^tmp]; if(st!=NULL) st=st->next[1^tmp];
}
else{
val += (tmp*mask);
en = en->next[tmp]; if(st!=NULL) st=st->next[tmp];
}
mask >>= 1;
}
return ii(best,val);
}
// inline void insert(trieNode* root,int x){
// trieNode *ptr=root;
// int mask = 1<<19;
// ptr->cnt += 1;
// REP(i,0,20){
// int tmp = (mask&x)>0?1:0;
// if(!ptr->next[tmp]) ptr->next[tmp]=(trieNode*)malloc(sizeof(trieNode));
// ptr=ptr->next[tmp];
// ptr->cnt += 1;
// mask >>= 1;
// }
// }
// inline void remove(trieNode* root,int x){
// trieNode * ptr=root;
// int mask = 1<<19;
// REP(i,0,20){
// int tmp = (mask&x)>0?1:0;
// //debug(tmp,mask);
// ptr->cnt -= 1;
// if(ptr->next[tmp]->cnt==1){
// ptr->next[tmp]=NULL;
// return;
// }
// ptr = ptr->next[tmp];
// // ptr=ptr->next[tmp];
// // ptr->cnt -= 1;
// // if(ptr->cnt == 0 ){
// // delete ptr;
// // //ptr = NULL;
// // return;
// // }
// mask >>= 1;
// }
// }
// inline ii getTrieMax(trieNode *root,int x){
// trieNode *ptr = root;
// assert(ptr!=NULL);
// int mask = 1<<19;
// int best=0,val=0;
// for(int i=0;i<20;++i){
// int tmp = (mask&x)>0?1:0;
// //debug(tmp,mask);
// if(ptr->next[1^tmp]){
// best += mask;
// val += ((1^tmp)*mask);
// ptr = ptr->next[1^tmp];
// }
// else{
// val += (tmp*mask);
// ptr = ptr->next[tmp];
// }
// mask >>= 1;
// }
// return ii(best,val);
// }
// struct segNode{
// trieNode* trie;
// segNode(){
// trie = (trieNode*)malloc(sizeof(trieNode));
// }
// }xorTree[NMAX<<2];
// inline void build(int index,int i,int j){
// xorTree[index]=segNode();
// if(i==j) return;
// int mid=(i+j)>>1;
// build(L(index),i,mid);
// build(R(index),mid+1,j);
// }
// inline void update(int index,int i,int j,int pos,int val){
// if(input[pos]) remove(xorTree[index].trie,input[pos]);
// insert(xorTree[index].trie,val);
// if(i==j) return;
// int mid=(i+j)>>1;
// if(pos<=mid) update(L(index),i,mid,pos,val);
// else update(R(index),mid+1,j,pos,val);
// }
// inline ii query(int index,int i,int j,int st,int en,int val){
// if(i==st && j==en){
// return getTrieMax(xorTree[index].trie,val);
// }
// int mid=(i+j)>>1;
// if(en<=mid) return query(L(index),i,mid,st,en,val);
// else if(st>mid) return query(R(index),mid+1,j,st,en,val);
// else{
// ii r1,r2;
// r1 = query(L(index),i,mid,st,mid,val);
// r2 = query(R(index),mid+1,j,mid+1,en,val);
// if(r1.fi>=r2.fi) return r1;
// else return r2;
// }
// }
// inline void printTree(int index,int i,int j){
// if(i==j){
// cout << i << "," << j << "---";
// printTrie(xorTree[index].trie,0);
// cout << endl;
// return;
// }
// int mid=(i+j)>>1;
// printTree(L(index),i,mid);
// printTree(R(index),mid+1,j);
// cout << i << "," << j << "---";
// printTrie(xorTree[index].trie,0);
// cout << endl;
// }
struct perNode{
perNode *l,*r;
int cnt;
perNode(){
l = r = NULL;
cnt = 0;
}
};
perNode* versions[NMAX];
inline perNode* buildPerNode(int i,int j){
perNode *root = (perNode*) malloc(sizeof(perNode));
if(i==j) return root;
int mid=(i+j)>>1;
root->l = buildPerNode(i,mid);
root->r = buildPerNode(mid+1,j);
return root;
}
inline perNode* updPerNode(perNode *root,int i,int j,int pos){
perNode *tmp = (perNode*) malloc(sizeof(perNode));
//perNode *tmp = root;
tmp->l = root->l; tmp->r = root->r; tmp->cnt = root->cnt;
tmp->cnt += 1;
if(i==j) return tmp;
int mid=(i+j)>>1;
if(pos<=mid)
tmp->l = updPerNode(root->l,i,mid,pos);
else
tmp->r = updPerNode(root->r,mid+1,j,pos);
return tmp;
}
inline int queryPerNode(perNode *root,int i,int j,int st,int en){
if(i==st && j==en) return root->cnt;
int mid=(i+j)>>1;
if(en<=mid) return queryPerNode(root->l,i,mid,st,en);
else if(st>mid) return queryPerNode(root->r,mid+1,j,st,en);
else return queryPerNode(root->l,i,mid,st,mid) + queryPerNode(root->r,mid+1,j,mid+1,en);
}
inline int askLessThan(int st,int en,int x){
return queryPerNode(versions[en],1,MAXVAL,1,x)-queryPerNode(versions[st-1],1,MAXVAL,1,x);
}
inline int findKth(perNode *st,perNode *en,int i,int j,int k){
if(i==j) return i;
int small = en->l->cnt-st->l->cnt;
int mid = (i+j)>>1;
if(k<=small) return findKth(st->l,en->l,i,mid,k);
else return findKth(st->r,en->r,mid+1,j,k-small);
}
inline void addOperation(int x){
N++;
//update(1,1,M,N,x);
trieVersions[N] = insert(trieVersions[N-1],x,1<<19);
versions[N] = updPerNode(versions[N-1],1,MAXVAL,x);
input[N] = x;
// REP(i,1,N+1){
// printTrie(trieVersions[i],0);
// cout << endl;
// }
}
inline void removeOperation(int k){
N -= k;
}
inline int maxXorOperation(int l,int r,int x){
ii res = getTrieMax(trieVersions[l-1],trieVersions[r],x);
//if(x^input[l]>res.fi) return input[l];
return res.se;
}
inline int kThOperation(int l,int r,int x){
return findKth(versions[l-1],versions[r],1,MAXVAL,x);
}
FastInput fi;
FastOutput fo;
int main(){
//ios::sync_with_stdio(false);
int type,u,v,k;
//cin >> M;
//scanint(M);
M=fi.ReadNext();
//build(1,1,M);
versions[0] = buildPerNode(1,MAXVAL);
trieVersions[0] = new trieNode();
N = 0;
REP(i,0,M){
//cin >> type;
//scanint(type);
type=fi.ReadNext();
if(type == 0){
//cin >> k;
//scanint(k);
k=fi.ReadNext();
addOperation(k);
}
else if(type == 1){
//cin >> u >> v >> k;
//scanint(u); scanint(v); scanint(k);
u=fi.ReadNext(); v=fi.ReadNext(); k=fi.ReadNext();
// cout << maxXorOperation(u,v,k) << endl;
//printf("%d\n",maxXorOperation(u,v,k));
fo.PrintUint(maxXorOperation(u,v,k),'\n');
}
else if(type == 2){
//cin >> k;
//scanint(k);
k=fi.ReadNext();
removeOperation(k);
}
else if(type == 3){
//cin >> u >> v >> k;
//scanint(u); scanint(v); scanint(k);
u=fi.ReadNext(); v=fi.ReadNext(); k=fi.ReadNext();
//cout << askLessThan(u,v,k) << endl;
//printf("%d\n",askLessThan(u,v,k));
fo.PrintUint(askLessThan(u,v,k),'\n');
}
else{
//cin >> u >> v >> k;
//scanint(u); scanint(v); scanint(k);
u=fi.ReadNext(); v=fi.ReadNext(); k=fi.ReadNext();
//cout << kThOperation(u,v,k) << endl;
//printf("%d\n",kThOperation(u,v,k));
fo.PrintUint(kThOperation(u,v,k),'\n');
}
}
fo.Flush();
return 0;
}
<file_sep>/FLIPCOIN.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define L(x) (x<<1)
#define R(x) ((x<<1)+1)
using namespace std;
int tree[NMAX<<2][2];
void update(int index,int i,int j,int left,int right){
//printf("%d %d\n",i,j);
if(tree[index][1]){
tree[index][0]=(tree[index][1]%2==0?tree[index][0]:(j-i+1)-tree[index][0]);
if(i!=j) tree[L(index)][1]+=tree[index][1],tree[R(index)][1]+=tree[index][1];
tree[index][1]=0;
}
if(i>right || j<left) return;
if(i>=left && j<=right) {
tree[index][0]=(j-i+1)-tree[index][0];
if(i!=j) ++tree[L(index)][1],++tree[R(index)][1];
return;
}
if(i!=j){
int mid=(i+j)>>1;
update(L(index),i,mid,left,right);
update(R(index),mid+1,j,left,right);
tree[index][0]=tree[L(index)][0]+tree[R(index)][0];
}
}
int query(int index,int i,int j,int left,int right){
if(tree[index][1]){
tree[index][0]=(tree[index][1]%2==0?tree[index][0]:(j-i+1)-tree[index][0]);
if(i!=j) tree[L(index)][1]+=tree[index][1],tree[R(index)][1]+=tree[index][1];
tree[index][1]=0;
}
if(i>right || j<left) return 0;
if(i>=left && j<=right) return tree[index][0];
if(i!=j){
int mid=(i+j)>>1;
int t1=query(L(index),i,mid,left,right);
int t2=query(R(index),mid+1,j,left,right);
tree[index][0]=tree[L(index)][0]+tree[R(index)][0];
return t1+t2;
}
}
void printTree(int index,int i,int j){
if(i==j) {
printf("%d %d - %d,%d \n",i,j,tree[index][0],tree[index][1]);
return;
}
int mid=(i+j)>>1;
printTree(L(index),i,mid);
printTree(R(index),mid+1,j);
printf("%d %d - %d,%d \n",i,j,tree[index][0],tree[index][1]);
}
int main(){
int N,Q;
memset(tree,0,sizeof(tree));
scanf("%d %d",&N,&Q);
for(int i=0;i<Q;++i){
int type,u,v;
scanf("%d %d %d",&type,&u,&v);
++u,++v;
//printf("u=%d,v=%d\n",u,v);
if(type==0) update(1,1,N,u,v);
else printf("%d\n",query(1,1,N,u,v));
//printf("\nPRINTING TREE ----> \n");
//printTree(1,1,N);
}
return 0;
}
<file_sep>/STRPALIN.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
string a,b;
cin >> a >> b;
vector<int> flag(26);
int f=0;
for(int i=0;i<a.size();++i) flag[a[i]-'a']=1;
for(int i=0;i<b.size();++i)
if(flag[b[i]-'a']==1){
f=1;
break;
}
if(f) cout << "Yes" << endl;
else cout << "No" << endl;
}
return 0;
}
<file_sep>/CHEFFED.cpp
#include <bits/stdc++.h>
using namespace std;
int maxSx = 81;
int maxSSx = 16;
int digitSum( int x ){
int sum = 0;
while( x > 0 ) sum += x%10, x/=10;
return sum;
}
int main(){
ios::sync_with_stdio(false);
int N, cnt = 0;
cin >> N;
for(int i=1;i<=maxSx;++i){
for(int j=1;j<=maxSSx;++j){
int x = N - i - j;
if( x < 0 ) continue;
int sx = digitSum( x );
int ssx = digitSum( sx );
if( sx == i && ssx == j ) ++cnt;
}
}
cout << cnt << endl;
return 0;
}
<file_sep>/PREDICT.cpp
#include <cstdio>
const int MAX=10000;
typedef unsigned long long x64;
int main()
{
x64 T;
scanf("%llu",&T);
while(T>0)
{
double p,res;
scanf("%lf",&p);
p=(p>0.5?p:1-p);
res=(MAX+(MAX*2*(1-p)))*p;
printf("%f\n",res);
--T;
}
return 0;
}
<file_sep>/MIXTURES.cpp
#include <bits/stdc++.h>
#define NMAX 111
#define INF 9999999
using namespace std;
int mix[NMAX][NMAX];
int color[NMAX][NMAX];
int input[NMAX];
int N;
void init(){
for(int i=0;i<N;++i){
color[i][i]=input[i];
for(int j=i+1;j<N;++j)
color[i][j]=(color[i][j-1]+input[j])%100;
}
}
void solve(){
for(int i=0;i<N;++i) mix[i][i]=0;
for(int sz=2;sz<=N;++sz){
for(int i=0;i<N;++i){
int j=i+sz-1;
if(j>=N) break;
mix[i][j]=INF;
for(int k=i;k<j;++k)
mix[i][j]=min(mix[i][j],mix[i][k]+mix[k+1][j]+color[i][k]*color[k+1][j]);
}
}
printf("%d\n",mix[0][N-1]);
}
int main(){
//int T;
//scanf("%d",&T);
while(scanf("%d",&N)>0){
//scanf("%d",&N);
for(int i=0;i<N;++i) scanf("%d",&input[i]);
init();
solve();
}
return 0;
}
<file_sep>/COINS.cpp
#include <map>
#include <algorithm>
#include <iostream>
#include <stdio.h>
typedef unsigned long long ull;
using namespace std;
map<ull,ull> bytelandCoins;
void preCompute()
{
for(int i=0;i<12;++i)
bytelandCoins.insert(pair<ull,ull>(i,i));
}
ull solve(ull x)
{
ull ans;
if(bytelandCoins.find(x)!=bytelandCoins.end())
{
ans=bytelandCoins[x];
return ans;
}
ans= solve(x>>1)+solve(x>>2)+solve(x/3);
ans= ans>x? ans:x;
bytelandCoins.insert(pair<ull,ull>(x,ans));
return ans;
}
int main()
{
ull num,N[10];
int i=0;
preCompute();
while(scanf("%llu",&num)!=EOF)
{
N[i]=solve(num);
++i;
}
for(int j=0;j<i;++j)
{
printf("%llu\n",N[j]);
}
return 0;
}
<file_sep>/FRIEMEET.cpp
#include <bits/stdc++.h>
#define NMAX 555555
using namespace std;
typedef pair<int,int> pii;
typedef long long int ull;
ull computeGcd(ull a, ull b)
{
if (a == 0)
return b;
return computeGcd(b%a, a);
}
ull trigger;
int N,M;
int cost[NMAX], friendly[NMAX], subtree[NMAX];
vector<pii> adj[NMAX];
int dfs( int u, int par, int edgeCost ){
cost[u] = edgeCost;
subtree[u] = 0;
if( friendly[u] ) subtree[u] = 1;
for(int i=0;i<adj[u].size();++i){
int next = adj[u][i].first, val = adj[u][i].second;
if( next != par )
subtree[u] += dfs( next, u, val );
}
return subtree[u];
}
ull solve(){
ull result = 0;
dfs( 1, -1, 0 );
for(int i=2;i<=N;++i){
ull inSubtree = subtree[i];
ull outOfSubtree = M - subtree[i];
ull edgeInPaths = cost[i];
edgeInPaths *= inSubtree;
edgeInPaths *= outOfSubtree;
result += edgeInPaths;
if( result > trigger ) assert(false);
}
ull allPairs = ((ull)M)*M;
result *= 2;
ull gcd = computeGcd( result, allPairs );
result /= gcd;
allPairs /= gcd;
cout << result << " " << allPairs << endl;
//cout << result/gcd << " " << allPairs/gcd << endl;
}
void resetVectors(){
for(int i=1; i<=N; ++i)
cost[i] = 0, friendly[i] = 0, subtree[i] = 0, adj[i].clear();
}
int main(){
ios::sync_with_stdio(false);
trigger = 1;
for(int i=0;i<18;++i) trigger *= 10;
int T;
cin >> T;
while( T-- ){
cin >> N >> M;
resetVectors();
for(int i=0;i<N-1;++i){
int u,v,w;
cin >> u >> v >> w;
adj[u].push_back(pii(v,w));
adj[v].push_back(pii(u,w));
}
for(int i=0;i<M;++i){
int x;
cin >> x;
friendly[x] = 1;
}
solve();
}
return 0;
}
<file_sep>/RRSTONE.cpp
#include <bits/stdc++.h>
#define NMAX 111111
using namespace std;
typedef long long int LL;
LL input[NMAX];
int main(){
LL N,K,best;
scanf("%lld %lld",&N,&K);
for(int i=0;i<N;++i){
scanf("%lld",&input[i]);
if(i>0) best=max(best,input[i]);
else best=input[i];
}
if(K>0){
LL val;
for(int i=0;i<N;++i){
input[i]=best-input[i];
if(i>0) val=max(val,input[i]);
else val=input[i];
}
if(K%2==0){
for(int i=0;i<N;++i) input[i]=val-input[i];
}
}
for(int i=0;i<N;++i) printf("%lld ",input[i]);
return 0;
}
<file_sep>/TMSLT.cpp
#include <bits/stdc++.h>
#define NMAX 3333333
#define MOD 1000000
using namespace std;
typedef long long int LL;
int strengths[MOD];
LL readLL()
{
LL cc = getc(stdin);
for (;cc < '0' || cc > '9';) cc = getc(stdin);
LL ret = 0;
for (;cc >= '0' && cc <= '9';)
{
ret = ret * 10 + cc - '0';
cc = getc(stdin);
}
return ret;
}
int main(){
LL T;
//scanf("%lld",&T);
T=readLL();
while(T--){
LL N,a,b,c,d,prev,maxval;
//scanf("%lld %lld %lld %lld %lld",&N,&a,&b,&c,&d);
N=readLL();
a=readLL();
b=readLL();
c=readLL();
d=readLL();
memset(strengths,0,sizeof(strengths));
maxval=prev=d;
strengths[prev]=1;
for(LL i=1;i<N;++i){
LL curr;
curr=(a*prev*prev)+(b*prev)+c;
if(curr>=MOD) curr=curr%MOD;
strengths[curr]+=1;
maxval=max(maxval,curr);
prev=curr;
}
LL l,r;
l=r=0;
int flag=0;
for(LL i=0;i<=maxval;++i){
if(strengths[i]){
for(LL j=0;j<strengths[i];++j){
if(flag==0){
l+=i;
flag=1;
}
else{
r+=i;
flag=0;
}
}
}
}
printf("%lld\n",abs(l-r));
}
return 0;
}
<file_sep>/SREENI.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
//freopen("1.in","r",stdin);
int T;
scanf("%d",&T);
while(T--){
string s1,s2;
vector<int> pos;
int flag=0;
cin>>s1>>s2;
if(s1.length()!=s2.length()){
printf("NO\n");
return 0;
}
for(int i=0;i<s2.length();++i)
if(s2[i]==s1[0]) pos.push_back(i);
for(int k=0;k<pos.size();++k){
int i,j=pos[k];
for(i=0;i<s1.length();++i,j=(j+1)%s2.size())
if(s1[i]!=s2[j]) break;
if(i==s1.length()){
flag=1;
break;
}
}
if(flag) printf("YES\n");
else printf("NO\n");
}
return 0;
}
<file_sep>/LEBALONS.cpp
#include <bits/stdc++.h>
#define NMAX 44
typedef double ld;
using namespace std;
int C[NMAX];
ld P[NMAX];
int flag[NMAX];
ld dp[NMAX][NMAX];
ld F[NMAX][NMAX];
int N,M;
vector<ld> group[NMAX];
void init(){
for(int i=0;i<NMAX;++i)
for(int j=0;j<NMAX;++j)
dp[i][j]=0.0;
dp[0][0]=1.0;
for(int i=1;i<NMAX;++i){
dp[i][0]=1.0;
for(int j=1;j<=i;++j)
dp[i][j]=dp[i-1][j]+dp[i-1][j-1];
}
}
ld pow2(int x){
if(x==0) return 1.0;
else if(x&1) return 2.0*pow2(x-1);
else{
ld t=pow2(x/2);
return t*t;
}
}
vector<ld> k_products_1(vector<ld> a, int n){
vector<ld> newer,older;
older=vector<ld>(n+1);
int d, i;
older[0] = 1.0;
for(d = 1; d <= n; ++d){
newer = vector<ld>(n+1);
newer[0] = a[d-1]*older[0];
for(i = 1; i <= d; ++i){
newer[i] = older[i-1] + a[d-1]*older[i];
}
older = newer;
}
return older;
}
void solve1();
int main(){
solve1();
/*init();
int T;
scanf("%d",&T);
while(T--){
memset(flag,0,sizeof(flag));
for(int i=0;i<NMAX;++i) group[i].clear();
int groups=0;
ld res=0.0;
scanf("%d %d",&N,&M);
for(int i=0;i<N;++i){
scanf("%d %lf",&C[i],&P[i]);
if(flag[C[i]]==0){
++groups;
flag[C[i]]=groups;
}
group[flag[C[i]]].push_back(P[i]);
}
ld tot=0.0;
ld sum=0.0;
vector<ld> powers(groups);
for(int i=0;i<groups;++i){
powers[i]=pow2(group[i+1].size())-1;
//printf("%lf ",powers[i]);
}
//printf("\n");
vector<ld> res2=k_products_1(powers,groups);
//for(int i=0;i<res.size();++i) printf("%lf\n",res[i]);
for(int i=M;i<=groups;++i) tot+=res2[groups-i];
for(int i=1;i<=groups;++i){
vector<ld> other_powers;
vector<ld> res3;
for(int j=1;j<=groups;++j){
if(j!=i) other_powers.push_back(pow2(group[i].size())-1);
}
res3=k_products_1(other_powers,groups-1);
double x,y;
x=y=0.0;
for(int j=0;j<group[i].size();++j) x+=group[i][j];
for(int j=max(0,M-1);j<=groups-1;++j) y+=res3[groups-1-j];
sum+=(x*y*(pow2(group[i].size()-1))/tot);
}
// printf("%0.9Lf %0.9Lf",tot,2.0*tot);
//printf("%lf\n",tot);
ld rem=pow2(N-M);
// tot+=dp[groups][M]*rem;
if(M==0){
for(int j=0;j<N;++j) sum+=((rem-1)*(P[j]/tot));
}
else{
for(int j=0;j<N;++j) sum+=(dp[groups-1][M-1]*rem*(P[j]/tot));
}
res=sum;
printf("%0.9lf\n",res);
}*/
return 0;
}
void solve1(){
init();
int T;
scanf("%d",&T);
while(T--){
memset(flag,0,sizeof(flag));
for(int i=0;i<NMAX;++i) group[i].clear();
int groups=0;
ld res=0.0;
scanf("%d %d",&N,&M);
for(int i=0;i<N;++i){
scanf("%d %lf",&C[i],&P[i]);
if(flag[C[i]]==0){
++groups;
flag[C[i]]=groups;
}
group[flag[C[i]]].push_back(P[i]);
}
ld tot=0.0;
ld sum=0.0;
vector<ld> powers(groups);
for(int i=0;i<groups;++i){
powers[i]=pow2(group[i+1].size())-1;
//printf("%lf ",powers[i]);
}
//printf("\n");
vector<ld> res2=k_products_1(powers,groups);
//for(int i=0;i<res.size();++i) printf("%lf\n",res[i]);
for(int i=M;i<=groups;++i) tot+=res2[groups-i];
for(int i=1;i<=groups;++i){
vector<ld> other_powers;
vector<ld> res3;
for(int j=1;j<=groups;++j){
if(j!=i) other_powers.push_back(pow2(group[j].size())-1);
}
res3=k_products_1(other_powers,groups-1);
double x,y;
x=y=0.0;
for(int j=0;j<group[i].size();++j) x+=group[i][j];
for(int j=max(0,M-1);j<=groups-1;++j) y+=res3[groups-1-j];
sum+=(x*y*(pow2(group[i].size()-1))/tot);
}
/*// printf("%0.9Lf %0.9Lf",tot,2.0*tot);
//printf("%lf\n",tot);
ld rem=pow2(N-M);
// tot+=dp[groups][M]*rem;
if(M==0){
for(int j=0;j<N;++j) sum+=((rem-1)*(P[j]/tot));
}
else{
for(int j=0;j<N;++j) sum+=(dp[groups-1][M-1]*rem*(P[j]/tot));
}
*/
res=sum;
printf("%0.9lf\n",res);
}
}
<file_sep>/CLETAB.cpp
#include <bits/stdc++.h>
#define NMAX 222
#define MMAX 444
using namespace std;
int N,M;
int pages[MMAX];
set<int> buffer;
set<int>::iterator bufferItr;
void solve(){
int currPage = 0;
int pageFaults = 0;
buffer.clear();
while( currPage < M ){
if( buffer.find( pages[currPage] ) == buffer.end() ){
++pageFaults;
if( buffer.size() < N ) buffer.insert( pages[currPage] );
else{
int mxPage,mxDist;
mxPage = mxDist = -1;
for( bufferItr = buffer.begin(); bufferItr != buffer.end(); ++ bufferItr ){
int page = *bufferItr;
int dist = MMAX + NMAX;
for( int i = currPage + 1; i < M; ++i )
if( pages[i] == page )
dist = min( dist, i );
if( mxPage == -1 || dist > mxDist) mxPage = page, mxDist = dist;
}
buffer.erase( mxPage );
buffer.insert( pages[currPage] );
}
}
++currPage;
}
printf("%d\n", pageFaults );
}
int main(){
int T;
scanf("%d",&T);
while(T--){
scanf("%d %d",&N,&M);
for(int i=0;i<M;++i) scanf("%d",&pages[i]);
solve();
}
return 0;
}
<file_sep>/CSS2.cpp
#include <bits/stdc++.h>
using namespace std;
typedef pair<int,int> pi;
typedef long long int ll;
typedef pair<ll,pi> pp;
map<ll,pi> dict;
int main(){
int n,m;
scanf("%d %d",&n,&m);
int id,attr,val,pri;
for(int i=0;i<n;++i){
scanf("%d %d %d %d",&id,&attr,&val,&pri);
ll key=(ll(id)*1000000)+ll(attr);
if(dict.find(key)==dict.end())
dict.insert(pp(key,pi(val,pri)));
else
dict[key]=dict[key].second>pri?dict[key]:pi(val,pri);
}
for(int i=0;i<m;++i){
scanf("%d %d",&id,&attr);
ll key=(ll(id)*1000000)+ll(attr);
printf("%d\n",dict[key].first);
}
return 0;
}
<file_sep>/CHEFSPL.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio(false);
int D;
cin >> D;
while( D-- ){
string s;
cin >> s;
int sz=s.size();
if(sz == 1) cout << "NO" << endl;
else if(sz&1){
int i=0, cnt=0, j=sz/2 + 1;
while( i <= sz/2 && cnt < 2 ){
if( s[i] != s[j] ) cnt++, ++i;
else ++i, ++j;
}
if( cnt < 2 ) cout << "YES" << endl;
else{
reverse(s.begin(),s.end());
i=0, cnt=0, j=sz/2 + 1;
while( i <= sz/2 && cnt < 2 ){
if( s[i] != s[j] ) cnt++, ++i;
else ++i, ++j;
}
if( cnt < 2 ) cout << "YES" << endl;
else cout << "NO" << endl;
}
}
else{
int flag = 0;
for(int i=0,j=sz/2;j<sz;++i,++j)
if(s[i] != s[j]){
flag=1;
break;
}
if( flag ) cout << "NO" << endl;
else cout << "YES" << endl;
}
}
return 0;
}
<file_sep>/CRAWA.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
int T;
scanf("%d",&T);
for(int t=0;t<T;++t){
int x,y,flag=0;
scanf("%d %d",&x,&y);
if(x==0){
if(y==0) printf("YES\n");
else flag=1;
}
else if(x==1){
if(y==1 || y==2) printf("YES\n");
else flag=1;
}
else if(x>0){
if((x+1)&1) flag=1;
else{
int d=(x-1)/2;
if(y>=(-d*2)&&y<=((d+1)*2)) printf("YES\n");
else flag=1;
}
}
else{
int tmp=-x;
if(tmp&1) flag=1;
else{
if(y>=(-tmp) && y<=tmp) printf("YES\n");
else flag=1;
}
}
if(flag){
if(y>0){
if(y&1) printf("NO\n");
else{
if(x>=(-y)&&x<=(y-1)) printf("YES\n");
else printf("NO\n");
}
}
else{
int tmp=(-y);
if(tmp&1) printf("NO\n");
else{
if(x>=(-tmp)&&x<=(tmp+1)) printf("YES\n");
else printf("NO\n");
}
}
}
}
return 0;
}
<file_sep>/FIBQ.cpp
#include <bits/stdc++.h>
#define NMAX 111111
#define MOD 1000000007
#define MOD2INV 500000004
#define L(x) (x<<1)
#define R(x) ((x<<1)|1)
using namespace std;
typedef long long int ll;
template <typename T>
inline T fpow( T base, int exp ){
T res = 1;
while( exp ){
if( exp & 1 ) res= (res*base)%MOD;
base=(base*base)%MOD;
exp >>= 1;
}
return res;
}
struct ComplexNumber {
ll real, img;
ComplexNumber( ll x=0, ll y=0 ){
real = x, img = y;
}
ComplexNumber operator+(const ComplexNumber& x){
return ComplexNumber( real+x.real, img+x.img );
}
ComplexNumber operator-(const ComplexNumber& x){
return ComplexNumber( real-x.real, img-x.img );
}
ComplexNumber operator%(const ll& mod){
return ComplexNumber( real%mod, img%mod );
}
ComplexNumber operator*(const ComplexNumber& x){
return ComplexNumber( real*x.real + 5*img*x.img, real*x.img + img*x.real );
}
ComplexNumber operator=(const ComplexNumber& x){
real = x.real, img = x.img;
}
ll getIm(){ return img; }
friend ostream &operator<<( ostream &output, const ComplexNumber &D )
{
output << D.real << " + " << D.img << "i";
return output;
}
};
ComplexNumber phiPower( int n ){
return fpow( ComplexNumber(1,1), n );
}
int input[NMAX];
ComplexNumber tree[NMAX<<3];
void printTree( int idx, int i, int j ){
cout << i << "," << j << " " << tree[idx] << endl;
if(i==j) return;
int mid=(i+j)>>1;
printTree(L(idx),i,mid);
printTree(R(idx),mid+1,j);
}
ll revmod( int n ){
return fpow( (ll)MOD2INV, n );
}
void init( int idx, int i, int j ){
if( i==j ){
tree[idx] = phiPower( input[i] );
tree[idx] = tree[idx] * revmod(input[i]), tree[idx] = tree[idx] % MOD;
tree[idx] = tree[idx] + ComplexNumber(1,0), tree[idx] = tree[idx]%MOD;
//cout << i << "," << j << " " << tree[idx] << endl;
return;
}
int mid=(i+j)>>1;
init( L(idx), i, mid );
init( R(idx), mid + 1, j );
tree[idx] = tree[L(idx)]*tree[R(idx)];
tree[idx] = tree[idx]%MOD;
}
ComplexNumber query( int idx, int i, int j, int lt, int rt ){
if( i==lt && j==rt ) return tree[idx];
int mid=(i+j)>>1;
if( rt <= mid ) return query( L(idx), i, mid, lt, rt );
else if ( lt > mid ) return query( R(idx), mid + 1, j, lt, rt );
else{
ComplexNumber res = query( L(idx), i, mid, lt, mid )*query( R(idx), mid + 1, j, mid + 1, rt );
res =res%MOD;
return res;
}
}
void update( int idx, int i, int j, int pos, int val ){
if( i == j && i == pos ){
tree[idx] = phiPower( val );
tree[idx] = tree[idx] * revmod(val), tree[idx] = tree[idx] % MOD;
tree[idx] = tree[idx] + ComplexNumber(1,0), tree[idx] = tree[idx]%MOD;
return;
}
int mid=(i+j)>>1;
if( pos <= mid ) update(L(idx), i, mid, pos, val);
else update( R(idx), mid+1, j, pos, val );
tree[idx] = tree[L(idx)]*tree[R(idx)];
tree[idx] = tree[idx]%MOD;
}
int main(){
ios::sync_with_stdio(false);
//cout << phiPower(1) << "\t" << phiPower(2) << "\t" << phiPower(3) << endl;
//cout << revmod[0] << endl << revmod[1] << endl << (phiPower(2) * revmod[2])%MOD << endl << (phiPower(3)*revmod[3])%MOD << endl;
int N,Q;
cin >> N >> Q;
for(int i=1;i<=N;++i)
cin >> input[i];
init(1,1,N);
//printTree(1,1,N);
for(int i=0;i<Q;++i){
string type;
int x,y;
cin >> type >> x >> y;
if( type[0] == 'Q' ){
ll res = query( 1, 1, N, x, y ).getIm();
res = (res * 2) % MOD;
cout << res << endl;
}
else{
update( 1, 1, N, x, y );
}
}
return 0;
}
<file_sep>/BINTREE.cpp
#include <bits/stdc++.h>
using namespace std;
typedef long long int LL;
inline LL find_depth(LL x){
LL res=0;
while(x>1) x=x>>1,res++;
return res;
}
inline LL lca(LL x,LL y){
LL res=0;
while(x!=y){
if(find_depth(x)<find_depth(y)) y=y>>1;
else x=x>>1;
++res;
}
return res;
}
int main()
{
int N;
scanf("%d",&N);
while(N--){
int i,j;
scanf("%d %d",&i,&j);
printf("%lld\n",lca(i,j));
}
return 0;
}
<file_sep>/RESERVOI.cpp
#include <bits/stdc++.h>
#define NMAX 1111
using namespace std;
string str[NMAX];
int N,M;
int main(){
ios::sync_with_stdio(false);
int T;
cin >> T;
while(T--){
cin >> N >> M;
for(int i=0;i<N;++i) cin >> str[i];
int flag = 0;
for(int i=0;i<N;++i){
if(str[i][0] == 'W' || str[i][M-1] == 'W'){
flag = 1;
break;
}
}
for(int i=0;i<M;++i){
if(str[N-1][i] == 'W'){
flag = 1;
break;
}
}
for(int i=0;i<N;++i){
for(int j=0;j<M;++j){
if(str[i][j] == 'W'){
if(j>0 && str[i][j-1]=='A'){
flag = 1;
break;
}
else if(j<M-1 && str[i][j+1]=='A'){
flag = 1;
break;
}
else if(i<N-1 && str[i+1][j]=='A'){
flag = 1;
break;
}
}
if(str[i][j] == 'A'){
if(i>0 && str[i-1][j]=='B'){
flag = 1;
break;
}
}
}
if(flag == 1) break;
}
if(flag == 1) cout << "no" << endl;
else cout << "yes" << endl;
}
return 0;
}
<file_sep>/BINTOUR.cpp
#include <bits/stdc++.h>
#define NMAX (1<<19)
typedef long long int LL;
const LL MOD=1000000009;
using namespace std;
vector<LL> f;
LL pow(LL a, LL b)
{
LL x=1,y=a;
while(b > 0)
{
if(b%2 == 1)
{
x=(x*y);
if(x>MOD) x%=MOD;
}
y = (y*y);
if(y>MOD) y%=MOD;
b /= 2;
}
return x;
}
LL InverseEuler(LL n)
{
return pow(n,MOD-2);
}
LL C(LL n, LL r)
{
return (f[n]*((InverseEuler(f[r]) * InverseEuler(f[n-r])) % MOD)) % MOD;
}
int main(){
int K,N;
scanf("%d",&K);
N=1<<K;
f=vector<LL>(N+1,1);
for (int i=2; i<=N;i++)
f[i]= (f[i-1]*i) % MOD;
/*vector<LL> fact(N+1,0),ifact(N+1,0);
fact[0]=fact[1]=1;
ifact[0]=ifact[1]=1;
for(int i=2;i<N+1;++i){
fact[i]=(fact[i-1]*LL(i)) % MOD;
// ifact[i]=((-(MOD / i) * ifact[MOD % i]) % MOD) + MOD;
ifact[i]=(modInverse(i)*ifact[i-1])%MOD;
}
*/
int L=N>>1;
for(int i=1;i<L;++i) printf("%d\n",0);
for(int i=L;i<N;++i){
/* LL n=fact[i-1];
LL d=(ifact[L-1]*ifact[i-L])%MOD;
LL res=(n*d)%MOD;
res=(res*fact[L])%MOD;
res=(res*fact[L])%MOD;
*/
LL res=C(i-1,L-1);
res=(((res*f[L])%MOD)*f[L])%MOD;
res=(res*2)%MOD;
printf("%lld\n",res);
}
printf("%lld",f[N]);
return 0;
}
<file_sep>/FLAGS.cpp
#include <bits/stdc++.h>
#define NMAX 11111
using namespace std;
typedef long long int ll;
int main(){
int T;
scanf("%d",&T);
while(T--){
ll N;
scanf("%lld",&N);
if(N==1) printf("0\n");
else if(N==2) printf("4\n");
else if(N==3) printf("42\n");
else{
ll res = N*(N-1)*2 + (5*N*(N-1)*(N-2)) + (2*N*(N-1)*(N-2)*(N-3));
printf("%lld\n",res);
}
}
return 0;
}
<file_sep>/FIRESC.cpp
#include <stdio.h>
#include <iostream>
#define MOD 1000000007
struct node
{
int rank,elements;
node *parent;
};
node *nodes[100001];
void MakeSet(node *x)
{
x->parent=x;
x->rank=0;
x->elements=1;
}
node * Find(node *x)
{
if(x->parent!=x)
x->parent=Find(x->parent);
return x->parent;
}
void Link(node *x,node *y)
{
if(x->rank > y->rank)
{
y->parent=x;
x->elements+=y->elements;
}
else
{
x->parent=y;
y->elements+=x->elements;
if(x->rank==y->rank)
y->rank+=1;
}
}
void Union(node *x,node *y)
{
Link(Find(x),Find(y));
}
void CreateDisjointSets(int &groups,unsigned long &leaders)
{
int N,M,count=0;
unsigned long long ways=1;
scanf("%d %d",&N,&M);
for(int i=1;i<=N;++i)
{
node *newNode=new node();
nodes[i]=(newNode);
MakeSet(newNode);
}
for(int j=0;j<M;++j)
{
int u,v;
scanf("%d%d",&u,&v);
if(Find(nodes[u])!=Find(nodes[v]))
{
Union(nodes[u],nodes[v]);
}
}
for(int i=1;i<=N;++i)
{
if(nodes[i]->parent==nodes[i])
{
++count;
ways=ways*nodes[i]->elements;
if(ways>MOD)
ways=ways%MOD;
}
}
groups=count;
leaders=ways;
}
int main()
{
int T,ans[5];
unsigned long leader[5];
scanf("%d",&T);
for(int i=0;i<T;++i)
{
CreateDisjointSets(ans[i],leader[i]);
printf("%d %lu\n",ans[i],leader[i]);
}
}
<file_sep>/LEPAINT.cpp
#include <bits/stdc++.h>
#define NMAX 55
#define CMAX 111
using namespace std;
typedef double ld;
int query[NMAX];
ld prob[NMAX][CMAX];
int n,c,k;
void solve(){
ld ci = ld(1)/c;
ld res=0.0;
ld exp=0.0;
for(int i=0;i<k+1;++i)
for(int j=0;j<c;++j)
prob[i][j]=0.0;
prob[0][1]=1.0;
for(int i=0;i<k+1;++i){
for(int j=0;j<c;++j){
if(prob[i][j]>0.0){
prob[i+1][j] += (prob[i][j]/2.0);
for(int col=0;col<c;++col){
int newCol = (col*j)%c;
prob[i+1][newCol] += ((ci)*(prob[i][j]/2.0));
}
}
//cout<<prob[i][j]<<" ";
}
//cout<<endl;
}
for(int i=0;i<n;++i){
int indx=query[i];
exp=0.0;
for(int j=0;j<c;++j) exp += (ld(j)*prob[indx][j]);
res += exp;
//cout<< query[i]<<" "<<exp << endl;
}
printf("%.9lf\n",res);
}
int main(){
int T;
scanf("%d",&T);
while(T--){
memset(query,0,sizeof(query));
scanf("%d %d %d",&n,&c,&k);
for(int i=0;i<k;++i){
int l,r;
scanf("%d %d",&l,&r);
--l,--r;
for(int j=l;j<=r;++j) query[j] += 1;
}
solve();
}
return 0;
}
<file_sep>/PERMSUFF.cpp
#include <bits/stdc++.h>
using namespace std;
typedef pair<int,int> pii;
int main(){
int T;
scanf("%d",&T);
while(T--){
int N,M;
scanf("%d %d",&N,&M);
vector<int> input(N),flag(N,0);
vector<pii> intervals;
for(int i=0;i<N;++i) scanf("%d",&input[i]);
for(int i=0;i<M;++i){
int u,v;
scanf("%d %d",&u,&v);
intervals.push_back(pii(u,v));
}
sort(intervals.begin(),intervals.end());
stack<pii> s;
s.push(intervals[0]);
for(int i=1;i<M;++i){
pii t = s.top();
if( intervals[i].first > t.second )
s.push(intervals[i]);
else if( intervals[i].second > t.second ){
t.second=intervals[i].second;
s.pop();
s.push(t);
}
}
int poss=1;
while(!s.empty()){
pii curr = s.top();
s.pop();
for(int i=curr.first;i<=curr.second;++i){
if( input[i-1]>curr.second || input[i-1]<curr.first ){
poss=0;
break;
}
flag[i-1]=1;
}
}
if(!poss) printf("Impossible\n");
else{
for(int i=0;i<N;++i)
if(flag[i]==0 && input[i]!=i+1){
poss=0;
break;
}
if(poss) printf("Possible\n");
else printf("Impossible\n");
}
}
return 0;
}
<file_sep>/TOURISTS.cpp
// author noob333
// H + Shift C + B + E + B + E
#include <bits/stdc++.h>
#define NMAX 100002
#define EMAX 200002
#define gc getchar_unlocked
#define DEBUG 1
using namespace std;
// Input macros
#define s(n) scanf("%d",&n)
#define sc(n) scanf("%c",&n)
#define sl(n) scanf("%lld",&n)
#define sf(n) scanf("%lf",&n)
#define ss(n) scanf("%s",n)
// Useful constants
#define INF (int)1e9
#define EPS 1e-9
// Useful hardware instructions
#define bitcount __builtin_popcount
#define gcd __gcd
// Useful container manipulation / traversal macros
#define REP(i,a,b) for(int i=a;i<b;i++)
#define RREP(i,a,b) for(int i=a;i>b;i--)
#define foreach(v, c) for( typeof( (c).begin()) v = (c).begin(); v != (c).end(); ++v)
#define all(a) a.begin(), a.end()
#define in(a,b) ( (b).find(a) != (b).end())
#define pb push_back
#define fill(a,v) memset(a, v, sizeof a)
#define sz(a) ((int)(a.size()))
#define mp make_pair
#define fi first
#define se second
// Some common useful functions
#define maX(a,b) ( (a) > (b) ? (a) : (b))
#define miN(a,b) ( (a) < (b) ? (a) : (b))
#define checkbit(n,b) ( (n >> b) & 1)
#define DREP(a) sort(all(a)); a.erase(unique(all(a)),a.end())
#define INDEX(arr,ind) (lower_bound(all(arr),ind)-arr.begin())
typedef long long ll;
typedef vector<int> vi;
typedef vector<vector<int> > vvi;
typedef vector<ll> vl;
typedef pair<int, int> ii;
typedef vector<ii> vii;
typedef set<int> si;
typedef map<string, int> msi;
#ifdef DEBUG
#define debug(args...) {dbg,args; cerr<<endl;}
#else
#define debug(args...) // Just strip off all debug tokens
#endif
struct debugger
{
template<typename T> debugger& operator , (const T& v)
{
cerr<<v<<" ";
return *this;
}
} dbg;
template <typename T1, typename T2>
inline std::ostream& operator << (std::ostream& os, const std::pair<T1, T2>& p)
{
return os << "(" << p.first << ", " << p.second << ")";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::vector<T>& v)
{
bool first = true;
os << "[";
for(unsigned int i = 0; i < v.size(); i++)
{
if(!first)
os << ", ";
os << v[i];
first = false;
}
return os << "]";
}
template<typename T>
inline std::ostream &operator << (std::ostream & os,const std::set<T>& v)
{
bool first = true;
os << "[";
for (typename std::set<T>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii;
first = false;
}
return os << "]";
}
template<typename T1, typename T2>
inline std::ostream &operator << (std::ostream & os,const std::map<T1, T2>& v)
{
bool first = true;
os << "[";
for (typename std::map<T1, T2>::const_iterator ii = v.begin(); ii != v.end(); ++ii)
{
if(!first)
os << ", ";
os << *ii ;
first = false;
}
return os << "]";
}
void scanint(int &x)
{
register int c = gc();
x = 0;
for(;(c<48 || c>57);c = gc());
for(;c>47 && c<58;c = gc()) {x = (x<<1) + (x<<3) + c - 48;}
}
int nodeDegree[NMAX], visited[NMAX];
vector<int> adjacencyList[NMAX];
int edgeStart[EMAX], edgeEnd[EMAX], direction[EMAX];
int nodeLastIndex[NMAX];
void eulerTour(int u){
while(nodeLastIndex[u]<adjacencyList[u].size()){
int idx=adjacencyList[u][nodeLastIndex[u]++];
if(direction[idx] == 0){
int st = edgeStart[idx], en = edgeEnd[idx];
if(st == u){
direction[idx] = +1;
eulerTour(en);
}
else{
direction[idx] = -1;
eulerTour(st);
}
}
}
}
void dfs(int u){
visited[u] = 1;
for(int i=0;i<adjacencyList[u].size();++i){
int idx = adjacencyList[u][i];
int st = edgeStart[idx], en = edgeEnd[idx];
if(st == u){
if(!visited[en]) dfs(en);
}
else{
if(!visited[st]) dfs(st);
}
}
}
int main(){
//ios::sync_with_stdio(false);
int N, E;
//cin >> N >> E;
//scanf("%d %d",&N,&E);
scanint(N);
scanint(E);
for(int i=0;i<E;++i){
int u,v;
//cin >> u >> v;
//scanf("%d %d",&u,&v);
scanint(u);
scanint(v);
edgeStart[i] = u;
edgeEnd[i] = v;
adjacencyList[u].pb(i);
adjacencyList[v].pb(i);
nodeDegree[u]++;
nodeDegree[v]++;
}
dfs(1);
for(int i=1;i<=N;++i){
if(!visited[i] || nodeDegree[i]%2){
//cout << "NO" << endl;
printf("NO\n");
return 0;
}
}
eulerTour(1);
//cout << "YES" << endl;
printf("YES\n");
for(int i=0;i<E;++i){
if(direction[i] == -1)
printf("%d %d\n",edgeEnd[i], edgeStart[i]);
else
printf("%d %d\n",edgeStart[i], edgeEnd[i]);
}
return 0;
}
<file_sep>/REIGN.cpp
#include <iostream>
#include <cstdio>
#include <climits>
#define NMAX 111111
using namespace std;
typedef long long int LL;
int N,K;
int input[NMAX];
LL forward[NMAX],backward[NMAX];
int main(){
int T;
scanf("%d",&T);
while(T>0){
scanf("%d %d",&N,&K);
for(int i=0;i<N;++i) scanf("%d",&input[i]);
LL sumF,sumB;
LL bestF,bestB;
sumF=sumB=0;
bestF=bestB=LLONG_MIN;
for(int i=0,j=N-1;i<N && j>=0;++i,--j){
sumF+=input[i];
sumB+=input[j];
bestF=max(bestF,sumF);
bestB=max(bestB,sumB);
if(sumF<0) sumF=0;
if(sumB<0) sumB=0;
forward[i]=bestF;
backward[j]=bestB;
}
LL bestSplit=LLONG_MIN;
for(int i=0;i<N-K-1;++i) bestSplit=max(bestSplit,forward[i]+backward[i+K+1]);
printf("%lld\n",bestSplit);
--T;
}
return 0;
}
<file_sep>/DELISH.cpp
#include <cstdio>
#include <vector>
#include <climits>
using namespace std;
typedef long long int x64;
vector<x64> input;
inline x64 absolute(x64 x)
{
if(x>0) return x;
else return (-x);
}
void solve()
{
vector<x64> leftMax,leftMin,rightMax,rightMin;
int len=input.size();
leftMax=vector<x64>(len,0);
leftMin=vector<x64>(len,0);
rightMax=vector<x64>(len,0);
rightMin=vector<x64>(len,0);
/*leftMax[0]=input[0];
leftMin[0]=input[0];
rightMax[len-1]=input[len-1];
rightMin[len-1]=input[len-1];*/
x64 maxResult=LLONG_MIN;
x64 maxSum=0;
x64 minResult=LLONG_MAX;
x64 minSum=0;
for(int i=0;i<len;++i)
{
maxSum+=input[i];
if(maxSum>maxResult) maxResult=maxSum;
if(maxSum<0) maxSum=0;
leftMax[i]=maxResult;
minSum+=input[i];
if(minSum<minResult) minResult=minSum;
if(minSum>0) minSum=0;
leftMin[i]=minResult;
}
maxResult=LLONG_MIN;
maxSum=0;
minResult=LLONG_MAX;
minSum=0;
for(int i=len-1;i>=0;--i)
{
maxSum+=input[i];
if(maxSum>maxResult) maxResult=maxSum;
if(maxSum<0) maxSum=0;
rightMax[i]=maxResult;
minSum+=input[i];
if(minSum<minResult) minResult=minSum;
if(minSum>0) minSum=0;
rightMin[i]=minResult;
}
x64 result=LLONG_MIN;
for(int i=0;i<len-1;++i)
{
x64 leftHigh,leftLow,rightHigh,rightLow;
leftHigh=leftMax[i];
leftLow=leftMin[i];
rightHigh=rightMax[i+1];
rightLow=rightMin[i+1];
result=max(result,max(absolute(rightHigh-leftLow),absolute(leftHigh-rightLow)));
}
printf("%lld\n",result);
}
int main()
{
int T;
scanf("%d",&T);
while(T>0)
{
int N;
scanf("%d",&N);
input=vector<x64>(N,0);
for(int i=0;i<N;++i)
scanf("%lld",&input[i]);
solve();
--T;
}
return 0;
}
<file_sep>/FENWITER.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
string L1,L2,L3;
long long int N;
cin >> L1 >> L2 >> L3 >> N;
long long int result = 0;
for(int i=0;i<L1.size();++i)
result += (L1[i] == '1' ? 1:0);
for(int i=0;i<L3.size();++i)
result += (L3[i] == '1' ? 1:0);
for(int i=0;i<L2.size();++i)
result += (L2[i] == '1' ? N:0);
if( result == 0 ){
cout << result << endl;
continue;
}
//cout << "init:" << result << endl;
int check=L3.size()-1;
while( check >=0 ){
if( L3[check] == '0' ) break;
-- result, -- check;
}
if( check >= 0 ){
cout << result + 1 << endl;
continue;
}
check = L2.size()-1;
long long int tmp = 0;
while( check >=0 ){
if( L2[check] == '0' ) break;
++ tmp, -- check;
}
if( check >= 0 ){
result -= tmp;
cout << result + 1 << endl;
continue;
}
result -= (N*tmp);
check = L1.size()-1;
while( check >=0 ){
if( L1[check] == '0' ) break;
-- result, -- check;
}
cout << result + 1 << endl;
}
return 0;
}
<file_sep>/KINGCON.cpp
#include <stdio.h>
#include <vector>
using namespace std;
typedef vector<int> VI;
typedef vector<VI> VVI;
class Graph
{
VVI adjList;
int missile;
vector<int> lowValues,dfsValues,visited,parent,articulation;
int articulationPoints;
int root,rootCount;
int counter;
public:
void CreateList(int,int,int);
void CountArticulationPoints(int);
long long int GetCost();
};
long long int Graph::GetCost()
{
return ((long long int)missile)*articulationPoints;
}
void Graph::CreateList(int Vertices,int Edges,int val)
{
int N1,N2;
adjList=VVI(Vertices,VI());
missile=val;
lowValues.resize(Vertices,0);
dfsValues.resize(Vertices,0);
visited.resize(Vertices,0);
parent.resize(Vertices,0);
articulation.resize(Vertices,0);
for(int i=0;i<Edges;++i)
{
scanf("%d %d",&N1,&N2);
adjList[N1].push_back(N2);
adjList[N2].push_back(N1);
}
articulationPoints=counter=root=rootCount=0;
CountArticulationPoints(root);
for(int i=0;i<Vertices;++i)
{
articulationPoints+=articulation[i];
}
}
void Graph::CountArticulationPoints(int v)
{
visited[v]=1;
lowValues[v]=dfsValues[v]=counter++;
for(VI::iterator itr=adjList[v].begin();itr!=adjList[v].end();++itr)
{
int val=(*itr);
if(!visited[val])
{
parent[val]=v;
if(v==root)
++rootCount;
CountArticulationPoints(val);
if(lowValues[val]>=dfsValues[v])
{
if(v==root)
{
if(rootCount>1)
{
articulation[v]=1;
}
}
else
{
articulation[v]=1;
}
}
lowValues[v]=min(lowValues[v],lowValues[val]);
}
else if(parent[v]!=val)
{
lowValues[v]=min(lowValues[v],dfsValues[val]);
}
}
}
int main()
{
int N,E,m,T;
scanf("%d",&T);
while(T>0)
{
Graph g;
scanf("%d %d %d",&N,&E,&m);
g.CreateList(N,E,m);
printf("%lld\n",g.GetCost());
--T;
}
return 0;
}
<file_sep>/CHBLLNS.cpp
#include <bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio( false );
int T;
cin >> T;
while( T-- ){
long long int R,G,B,K, res = 0;
cin >> R >> G >> B >> K;
res += min( K-1, R );
res += min( K-1, G );
res += min( K-1, B );
cout << res + 1 << endl;
}
return 0;
}
| 30b68b378f4d3b7229d6f998295d228f1c011021 | [
"Markdown",
"C",
"C++"
] | 118 | C++ | maraghuram/Codechef-Solutions | 3aeb8f0528210e13e17537c692731bf01e01dc87 | 9310a559253082b552cca721f6a99af27019f6a4 | |
refs/heads/master | <repo_name>universaln/MetodaTrapezow<file_sep>/MetodaTrapezów.cpp
#define _USE_MATH_DEFINES
#include <iostream>
#include <conio.h>
#include <math.h>
using namespace std;
void bladWczytywania()
{
if (cin.fail())
{
system("cls");
cout << "Zle dane";
exit(0);
}
}
float poleTrapezu(float a, float b, float h)
{
return (a + b) * h / 2;
}
float wartoscFunkcjiKwadratowej(float a, float b, float c, float x)
{
return a * x * x + b * x + c;
}
float wartoscFunkcjiSzescian(float a, float b, float c, float x)
{
return a * x * x * x + b * x * x + c * x;
}
float funkcjaKwadratowaTrapez(float a, float b, float c, int n, float A, float B)
{
float h = (B - A) / n;
float poleCalkowite = 0;
for (int i = 0; i < n; i++)
{
float a2 = wartoscFunkcjiKwadratowej(a, b, c, A + h * i);
float b2 = wartoscFunkcjiKwadratowej(a, b, c, A + h * (i + 1));
poleCalkowite += poleTrapezu(a2, b2, h);
}
return poleCalkowite;
}
float funkcjaKwadratowaCalka(float a, float b, float c, float A, float B)
{
float a2 = a / 3;
float b2 = b / 2;
float c2 = c;
return wartoscFunkcjiSzescian(a2, b2, c2, B) - wartoscFunkcjiSzescian(a2, b2, c2, A);
}
void obslugaFunkcjiKwadratowej()
{
float a, b, c;
cout << "Podaj wspolczynniki rzeczywiste a (rozne od 0), b, c (ax^2+bx+c): " << endl;
cout << "a = ";
cin >> a;
bladWczytywania();
while (true)
{
if (a == 0)
{
cout << "a ma byc rozne od 0!!!" << endl;
cout << "Podaj a: ";
cin >> a;
}
else break;
}
cout << "b = ";
cin >> b;
bladWczytywania();
cout << "c = ";
cin >> c;
bladWczytywania();
float A, B;
cout << "Podaj przedzial <A, B>: " << endl;
cout << "A = ";
cin >> A;
bladWczytywania();
cout << "B = ";
cin >> B;
bladWczytywania();
if (A > B)
{
cout << "Podales zly przedzial, zamieniam liczby" << endl;
float buff = A;
A = B;
B = buff;
}
int n;
cout << "Podaj gestosc podzialu n: ";
cin >> n;
bladWczytywania();
cout << "Metoda trapezow: " << funkcjaKwadratowaTrapez(a, b, c, n, A, B) << endl;
cout << "Blad procentowy: " <<
(funkcjaKwadratowaTrapez(a, b, c, n, A, B) - funkcjaKwadratowaCalka(a, b, c, A, B)) / funkcjaKwadratowaCalka(a, b, c, A, B);
cout << "\nCalka: " << funkcjaKwadratowaCalka(a, b, c, A, B) << endl;
}
double degToRad(float degrees)
{
return degrees * M_PI / 180;
}
double sinusTrapez(float A, float B, int n)
{
double h = degToRad((B - A) / n);
double poleCalkowite = 0;
double A2 = degToRad(A);
for (int i = 0; i < n; i++)
{
poleCalkowite += (h/2)*(sin(A2+h*i)+sin(A2+h*(i+1)));
}
return poleCalkowite;
}
double sinusCalka(float A, float B)
{
return -cos(degToRad(B)) + cos(degToRad(A));
}
void obslugaSinusa()
{
float A, B;
cout << "Podaj przedzial <A, B> (w stopniach): " << endl;
cout << "A = ";
cin >> A;
bladWczytywania();
cout << "B = ";
cin >> B;
bladWczytywania();
if (A > B)
{
cout << "Podales niepoprawny przedzial! Zamieniam liczby" << endl;
float buff = A;
A = B;
B = buff;
}
int n;
cout << "Podaj gestosc podzialu n: ";
cin >> n;
bladWczytywania();
cout << "Metoda trapezow: " << sinusTrapez(A, B, n) << endl;
cout << "Blad procentowy: " << (sinusCalka(A, B) - sinusTrapez(A, B, n)) / sinusCalka(A, B) << endl;
cout << "Calka: " << sinusCalka(A, B) << endl;
}
float tangensTrapez(float A, float B, int n)
{
while (B - A >= 180)
{
B -= 180;
}
int a2, b2;
a2 = A;
b2 = B;
if (a2 / 90 < b2 / 90)
{
}
A = degToRad(A);
B = degToRad(B);
float h = degToRad((B - A) / n);
float poleCalkowite = 0;
for (int i = 0; i < n; i++)
{
poleCalkowite += poleTrapezu(tan(A + h * i), tan(A + h * (i + 1)), h);
}
return poleCalkowite;
}
void obslugaTangensa()
{
float A, B;
cout << "Podaj przedzial <A, B> (w stopniach): " << endl;
cout << "A = ";
cin >> A;
bladWczytywania();
cout << "B = ";
cin >> B;
bladWczytywania();
if (A > B)
{
cout << "Podales niepoprawny przedzial. Zamieniam liczby!" << endl;
float buff = A;
A = B;
B = buff;
}
int n;
cout << "Podaj gestosc podzialu n: ";
cin >> n;
bladWczytywania();
cout << "Metoda trapezow: ";
}
void opcje()
{
string funkcja;
cin >> funkcja;
while (funkcja != "a" && funkcja != "b" && funkcja != "c" && funkcja != "d")
{
cout << "Musisz podac a, b, c lub d!" << endl;
cin >> funkcja;
}
switch (funkcja[0])
{
case 'a':
obslugaFunkcjiKwadratowej();
break;
case 'b':
obslugaSinusa();
break;
case 'c':
obslugaTangensa();
break;
case 'd':
cout << "Dowidzenia!";
exit(0);
break;
}
}
int main()
{
while (true)
{
cout << "Wybierz funkcje: " << endl;
cout << "a) ax^2+bx+c" << endl;
cout << "b) sin(x)" << endl;
cout << "c) tan(x)" << endl;
cout << "d) zakoncz" << endl;
opcje();
}
}
| f00f601a6d88cdee8dfcb797147f3a1400d21a16 | [
"C++"
] | 1 | C++ | universaln/MetodaTrapezow | 25023c4387d73c6dddb06afdce9792a146683c6a | b4799d1b35b884177c01af9fedc3137dfd99cfc0 | |
refs/heads/master | <repo_name>bazykinlab/gatk-maternal-cell-contamination<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/StateTracker.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import htsjdk.variant.variantcontext.Allele;
import org.broadinstitute.gatk.utils.MathUtils;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* TODO this class (+AFCalculator) is a bit messy... it seems that it combines "debugging" (unnecessarily adding CPU cost in production)
* TODO but it also contains important part of the AF calculation state... why mix both!!!? It seems that the second
* TODO part could be just blend into AFCalculator ... one one hand you want to reduce classes code size ... but these
* TODO two classes code seems to be quite intertwine and makes it difficult to understand what is going on.
* in the production setting without much need
*
* Keeps track of the state information during the exact model AF calculation.
*
* Tracks things like the MLE and MAP AC values, their corresponding likelihood and posterior
* values, the likelihood of the AF == 0 state, and the number of evaluations needed
* by the calculation to compute the P(AF == 0)
*/
final class StateTracker {
protected static final double VALUE_NOT_CALCULATED = Double.NEGATIVE_INFINITY;
protected final static double MAX_LOG10_ERROR_TO_STOP_EARLY = 6; // we want the calculation to be accurate to 1 / 10^6
/**
* These variables are intended to contain the MLE and MAP (and their corresponding allele counts)
* of the site over all alternate alleles
*/
protected double log10MLE;
protected double log10MAP;
/**
* Returns a vector with maxAltAlleles values containing AC values at the MLE
*
* The values of the ACs for this call are stored in the getAllelesUsedInGenotyping order,
* starting from index 0 (i.e., the first alt allele is at 0). The vector is always
* maxAltAlleles in length, and so only the first getAllelesUsedInGenotyping.size() - 1 values
* are meaningful.
*/
private int[] alleleCountsOfMLE;
private int[] alleleCountsOfMAP;
/**
* A vector of log10 likelihood values seen, for future summation. When the size of the
* vector is exceeed -- because we've pushed more posteriors than there's space to hold
* -- we simply sum up the existing values, make that the first value, and continue.
*/
private final double[] log10LikelihoodsForAFGt0 = new double[LIKELIHOODS_CACHE_SIZE];
private static final int LIKELIHOODS_CACHE_SIZE = 5000;
private int log10LikelihoodsForAFGt0CacheIndex = 0;
/**
* The actual sum of the likelihoods. Null if the sum hasn't been computed yet
*/
protected Double log10LikelihoodsForAFGt0Sum = null;
/**
* Contains the likelihood for the site's being monomorphic (i.e. AF=0 for all alternate alleles)
*/
private double log10LikelihoodOfAFzero = 0.0;
/**
* The number of evaluates we've gone through in the AFCalc
*/
private int nEvaluations = 0;
/**
* The list of alleles actually used in computing the AF
*/
private List<Allele> allelesUsedInGenotyping = null;
/**
* Create a results object capability of storing results for calls with up to maxAltAlleles
*
* @param maxAltAlleles an integer >= 1
*/
public StateTracker(final int maxAltAlleles) {
if ( maxAltAlleles < 0 ) throw new IllegalArgumentException("maxAltAlleles must be >= 0, saw " + maxAltAlleles);
alleleCountsOfMLE = new int[maxAltAlleles];
alleleCountsOfMAP = new int[maxAltAlleles];
reset();
}
/**
* Is the likelihood of configuration K too low to consider, related to the
* maximum likelihood seen already?
*
* @param log10LofK the log10 likelihood of the configuration we're considering analyzing
* @return true if the configuration cannot meaningfully contribute to our likelihood sum
*/
private boolean tooLowLikelihood(final double log10LofK) {
return log10LofK < log10MLE - MAX_LOG10_ERROR_TO_STOP_EARLY;
}
/**
* @return true iff all ACs in this object are less than or equal to their corresponding ACs in the provided set
*/
private boolean isLowerAC(final ExactACcounts otherACs, final boolean otherACsContainsReference) {
final int[] otherACcounts = otherACs.getCounts();
final int firstAltAlleleIndex = otherACsContainsReference ? 1 : 0;
for ( int i = firstAltAlleleIndex; i < otherACcounts.length; i++ ) {
if ( alleleCountsOfMLE[i - firstAltAlleleIndex] > otherACcounts[i] )
return false;
}
return true;
}
/**
* Should we stop exploring paths from ACs, given it's log10LofK
*
* @param log10LofK the log10LofK of these ACs
* @param ACs the ACs of this state
* @param exactACcountsContainReference whether the {@code ACs} contains the reference allele count (index == 0) beside all other alternative alleles.
* @return return true if there's no reason to continue with subpaths of AC, or false otherwise
*/
protected boolean abort(final double log10LofK, final ExactACcounts ACs, final boolean enforceLowerACs, final boolean exactACcountsContainReference) {
return tooLowLikelihood(log10LofK) && (!enforceLowerACs || isLowerAC(ACs,exactACcountsContainReference));
}
@Ensures("result != null")
protected int[] getAlleleCountsOfMAP() {
return alleleCountsOfMAP;
}
@Ensures("result >= 0")
protected int getNEvaluations() {
return nEvaluations;
}
/**
* @return the likelihoods summed across all AC values for AC > 0
*/
private double getLog10LikelihoodOfAFNotZero() {
if ( log10LikelihoodsForAFGt0Sum == null ) {
if ( log10LikelihoodsForAFGt0CacheIndex == 0 ) // there's nothing to sum up, so make the sum equal to the smallest thing we have
log10LikelihoodsForAFGt0Sum = MathUtils.LOG10_P_OF_ZERO;
else
log10LikelihoodsForAFGt0Sum = MathUtils.log10sumLog10(log10LikelihoodsForAFGt0, 0, log10LikelihoodsForAFGt0CacheIndex);
}
return log10LikelihoodsForAFGt0Sum;
}
/**
* @return the log10 likelihood of AF == 0
*/
private double getLog10LikelihoodOfAFzero() {
return log10LikelihoodOfAFzero;
}
/**
* Convert this state to an corresponding AFCalcResult.
*
* Assumes that the values in this state have been filled in with meaningful values during the calculation.
* For example, that the allelesUsedInGenotyping has been set, that the alleleCountsOfMLE contains meaningful
* values, etc.
*
* @param log10PriorsByAC the priors by AC
*
* @return an AFCalcResult summarizing the final results of this calculation
*/
@Requires("allelesUsedInGenotyping != null")
protected AFCalculationResult toAFCalculationResult(final double[] log10PriorsByAC) {
final int [] subACOfMLE = Arrays.copyOf(alleleCountsOfMLE, allelesUsedInGenotyping.size() - 1);
//TODO bad calculation of normalized log10 ACeq0 and ACgt0 likelihoods, priors and consequently posteriors calculated in AFCalculationResult constructor.
final double[] log10Likelihoods = MathUtils.normalizeFromLog10(new double[]{getLog10LikelihoodOfAFzero(), getLog10LikelihoodOfAFNotZero()}, true);
final double[] log10Priors = MathUtils.normalizeFromLog10(new double[]{log10PriorsByAC[0], MathUtils.log10sumLog10(log10PriorsByAC, 1)}, true);
final Map<Allele, Double> log10pRefByAllele = new HashMap<Allele, Double>(allelesUsedInGenotyping.size());
for ( int i = 0; i < subACOfMLE.length; i++ ) {
final Allele allele = allelesUsedInGenotyping.get(i+1);
final double log10PRef = alleleCountsOfMAP[i] > 0 ? -10000 : 0; // TODO -- a total hack but in effect what the old behavior was
log10pRefByAllele.put(allele, log10PRef);
}
return new AFCalculationResult(subACOfMLE, nEvaluations, allelesUsedInGenotyping, log10Likelihoods, log10Priors, log10pRefByAllele);
}
// --------------------------------------------------------------------------------
//
// Protected mutational methods only for use within the calculation models themselves
//
// --------------------------------------------------------------------------------
/**
* Reset the data in this results object, so that it can be used in a subsequent AF calculation
*
* Resetting of the data is done by the calculation model itself, so shouldn't be done by callers any longer
*
* @param ensureAltAlleleCapacity indicate the minimum number of alt-alleles that should be supported by the
* tracker.
*/
protected void reset(final int ensureAltAlleleCapacity) {
log10MLE = log10MAP = log10LikelihoodOfAFzero = VALUE_NOT_CALCULATED;
log10LikelihoodsForAFGt0CacheIndex = 0;
log10LikelihoodsForAFGt0Sum = null;
allelesUsedInGenotyping = null;
nEvaluations = 0;
if (alleleCountsOfMAP.length < ensureAltAlleleCapacity) {
final int newCapacity = Math.max(ensureAltAlleleCapacity,alleleCountsOfMAP.length << 1);
alleleCountsOfMAP = new int[newCapacity];
alleleCountsOfMLE = new int[newCapacity];
} else {
Arrays.fill(alleleCountsOfMLE, 0);
Arrays.fill(alleleCountsOfMAP, 0);
}
Arrays.fill(log10LikelihoodsForAFGt0, Double.POSITIVE_INFINITY);
}
/**
* Reset the data in this results object, so that it can be used in a subsequent AF calculation
*
* Resetting of the data is done by the calculation model itself, so shouldn't be done by callers any longer
*/
protected void reset() {
log10MLE = log10MAP = log10LikelihoodOfAFzero = VALUE_NOT_CALCULATED;
log10LikelihoodsForAFGt0CacheIndex = 0;
log10LikelihoodsForAFGt0Sum = null;
allelesUsedInGenotyping = null;
nEvaluations = 0;
Arrays.fill(alleleCountsOfMLE, 0);
Arrays.fill(alleleCountsOfMAP, 0);
Arrays.fill(log10LikelihoodsForAFGt0, Double.POSITIVE_INFINITY);
}
/**
* Tell this result we used one more evaluation cycle
*/
protected void incNEvaluations() {
nEvaluations++;
}
/**
* Update the maximum log10 likelihoods seen, if log10LofKs is higher, and the corresponding ACs of this state
*
* @param log10LofK the likelihood of our current configuration state, cannot be the 0 state
* @param alleleCountsForK the allele counts for this state
*/
@Requires({"alleleCountsForK != null", "MathUtils.sum(alleleCountsForK) >= 0"})
@Ensures("log10MLE == Math.max(log10LofK, log10MLE)")
protected void updateMLEifNeeded(final double log10LofK, final int[] alleleCountsForK) {
addToLikelihoodsCache(log10LofK);
if ( log10LofK > log10MLE ) {
log10MLE = log10LofK;
System.arraycopy(alleleCountsForK, 0, alleleCountsOfMLE, 0, alleleCountsForK.length);
}
}
/**
* Update the maximum log10 posterior seen, if log10PofKs is higher, and the corresponding ACs of this state
*
* @param log10PofK the posterior of our current configuration state
* @param alleleCountsForK the allele counts for this state
*/
@Requires({"alleleCountsForK != null", "MathUtils.sum(alleleCountsForK) >= 0"})
@Ensures("log10MAP == Math.max(log10PofK, log10MAP)")
protected void updateMAPifNeeded(final double log10PofK, final int[] alleleCountsForK) {
if ( log10PofK > log10MAP ) {
log10MAP = log10PofK;
System.arraycopy(alleleCountsForK, 0, alleleCountsOfMAP, 0, alleleCountsForK.length);
}
}
private void addToLikelihoodsCache(final double log10LofK) {
// add to the cache
log10LikelihoodsForAFGt0[log10LikelihoodsForAFGt0CacheIndex++] = log10LofK;
// if we've filled up the cache, then condense by summing up all of the values and placing the sum back into the first cell
if ( log10LikelihoodsForAFGt0CacheIndex == LIKELIHOODS_CACHE_SIZE) {
final double temporarySum = MathUtils.log10sumLog10(log10LikelihoodsForAFGt0, 0, log10LikelihoodsForAFGt0CacheIndex);
Arrays.fill(log10LikelihoodsForAFGt0, Double.POSITIVE_INFINITY);
log10LikelihoodsForAFGt0[0] = temporarySum;
log10LikelihoodsForAFGt0CacheIndex = 1;
}
}
protected void setLog10LikelihoodOfAFzero(final double log10LikelihoodOfAFzero) {
this.log10LikelihoodOfAFzero = log10LikelihoodOfAFzero;
if ( log10LikelihoodOfAFzero > log10MLE ) {
log10MLE = log10LikelihoodOfAFzero;
Arrays.fill(alleleCountsOfMLE, 0);
}
}
@Requires({"MathUtils.goodLog10Probability(log10PosteriorOfAFzero)"})
protected void setLog10PosteriorOfAFzero(final double log10PosteriorOfAFzero) {
if ( log10PosteriorOfAFzero > log10MAP ) {
log10MAP = log10PosteriorOfAFzero;
Arrays.fill(alleleCountsOfMAP, 0);
}
}
/**
* Set the list of alleles used in genotyping
*
* @param allelesUsedInGenotyping the list of alleles, where the first allele is reference
*/
@Requires({"allelesUsedInGenotyping != null", "allelesUsedInGenotyping.size() > 1"})
protected void setAllelesUsedInGenotyping(List<Allele> allelesUsedInGenotyping) {
if ( allelesUsedInGenotyping == null || allelesUsedInGenotyping.isEmpty() )
throw new IllegalArgumentException("allelesUsedInGenotyping cannot be null or empty");
if ( allelesUsedInGenotyping.get(0).isNonReference() )
throw new IllegalArgumentException("The first element of allelesUsedInGenotyping must be the reference allele");
this.allelesUsedInGenotyping = allelesUsedInGenotyping;
}
public void ensureMaximumAlleleCapacity(final int capacity) {
if (this.alleleCountsOfMAP.length < capacity)
reset(capacity);
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/MultivariateGaussian.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantrecalibration;
import Jama.Matrix;
import org.apache.commons.math.special.Gamma;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.collections.ExpandingArrayList;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
/**
* Created by IntelliJ IDEA.
* User: rpoplin
* Date: Mar 4, 2011
*/
public class MultivariateGaussian {
public double pMixtureLog10;
public double sumProb;
final public double[] mu;
final public Matrix sigma;
public double hyperParameter_a;
public double hyperParameter_b;
public double hyperParameter_lambda;
private double cachedDenomLog10;
private Matrix cachedSigmaInverse;
final private ExpandingArrayList<Double> pVarInGaussian;
public MultivariateGaussian( final int numAnnotations ) {
mu = new double[numAnnotations];
sigma = new Matrix(numAnnotations, numAnnotations);
pVarInGaussian = new ExpandingArrayList<>();
}
public void zeroOutMu() {
Arrays.fill( mu, 0.0 );
}
public void zeroOutSigma() {
final double[][] zeroSigma = new double[mu.length][mu.length];
for( final double[] row : zeroSigma ) {
Arrays.fill(row, 0);
}
final Matrix tmp = new Matrix(zeroSigma);
sigma.setMatrix(0, mu.length - 1, 0, mu.length - 1, tmp);
}
public void initializeRandomMu( final Random rand ) {
for( int jjj = 0; jjj < mu.length; jjj++ ) {
mu[jjj] = -4.0 + 8.0 * rand.nextDouble();
}
}
public void initializeRandomSigma( final Random rand ) {
final double[][] randSigma = new double[mu.length][mu.length];
for( int iii = 0; iii < mu.length; iii++ ) {
for( int jjj = iii; jjj < mu.length; jjj++ ) {
randSigma[jjj][iii] = 0.55 + 1.25 * rand.nextDouble();
if( rand.nextBoolean() ) {
randSigma[jjj][iii] *= -1.0;
}
if( iii != jjj ) { randSigma[iii][jjj] = 0.0; } // Sigma is a symmetric, positive-definite matrix created by taking a lower diagonal matrix and multiplying it by its transpose
}
}
Matrix tmp = new Matrix( randSigma );
tmp = tmp.times(tmp.transpose());
sigma.setMatrix(0, mu.length - 1, 0, mu.length - 1, tmp);
}
public double calculateDistanceFromMeanSquared( final VariantDatum datum ) {
return MathUtils.distanceSquared( datum.annotations, mu );
}
public void incrementMu( final VariantDatum datum ) {
incrementMu( datum, 1.0 );
}
public void incrementMu( final VariantDatum datum, final double prob ) {
for( int jjj = 0; jjj < mu.length; jjj++ ) {
mu[jjj] += prob * datum.annotations[jjj];
}
}
public void divideEqualsMu( final double x ) {
for( int jjj = 0; jjj < mu.length; jjj++ ) {
mu[jjj] /= x;
}
}
private void precomputeInverse() {
try {
cachedSigmaInverse = sigma.inverse();
} catch( Exception e ) {
throw new UserException("Error during clustering. Most likely there are too few variants used during Gaussian mixture modeling. Please consider raising the number of variants used to train the negative model (via --percentBadVariants 0.05, for example) or lowering the maximum number of Gaussians to use in the model (via --maxGaussians 4, for example).");
}
}
public void precomputeDenominatorForEvaluation() {
precomputeInverse();
cachedDenomLog10 = Math.log10(Math.pow(2.0 * Math.PI, -1.0 * ((double) mu.length) / 2.0)) + Math.log10(Math.pow(sigma.det(), -0.5)) ;
}
public void precomputeDenominatorForVariationalBayes( final double sumHyperParameterLambda ) {
// Variational Bayes calculations from Bishop
precomputeInverse();
cachedSigmaInverse.timesEquals( hyperParameter_a );
double sum = 0.0;
for(int jjj = 1; jjj <= mu.length; jjj++) {
sum += Gamma.digamma( (hyperParameter_a + 1.0 - jjj) / 2.0 );
}
sum -= Math.log( sigma.det() );
sum += Math.log(2.0) * mu.length;
final double lambda = 0.5 * sum;
final double pi = Gamma.digamma( hyperParameter_lambda ) - Gamma.digamma( sumHyperParameterLambda );
final double beta = (-1.0 * mu.length) / (2.0 * hyperParameter_b);
cachedDenomLog10 = (pi / Math.log(10.0)) + (lambda / Math.log(10.0)) + (beta / Math.log(10.0));
}
public double evaluateDatumLog10( final VariantDatum datum ) {
double sumKernel = 0.0;
final double[] crossProdTmp = new double[mu.length];
Arrays.fill(crossProdTmp, 0.0);
for( int iii = 0; iii < mu.length; iii++ ) {
for( int jjj = 0; jjj < mu.length; jjj++ ) {
crossProdTmp[iii] += (datum.annotations[jjj] - mu[jjj]) * cachedSigmaInverse.get(jjj, iii);
}
}
for( int iii = 0; iii < mu.length; iii++ ) {
sumKernel += crossProdTmp[iii] * (datum.annotations[iii] - mu[iii]);
}
return (( -0.5 * sumKernel ) / Math.log(10.0)) + cachedDenomLog10; // This is the definition of a Gaussian PDF Log10
}
public void assignPVarInGaussian( final double pVar ) {
pVarInGaussian.add( pVar );
}
public void resetPVarInGaussian() {
pVarInGaussian.clear();
}
public void maximizeGaussian( final List<VariantDatum> data, final double[] empiricalMu, final Matrix empiricalSigma,
final double SHRINKAGE, final double DIRICHLET_PARAMETER, final double DEGREES_OF_FREEDOM ) {
sumProb = 1E-10;
final Matrix wishart = new Matrix(mu.length, mu.length);
zeroOutMu();
zeroOutSigma();
int datumIndex = 0;
for( final VariantDatum datum : data ) {
final double prob = pVarInGaussian.get(datumIndex++);
sumProb += prob;
incrementMu( datum, prob );
}
divideEqualsMu( sumProb );
final double shrinkageFactor = (SHRINKAGE * sumProb) / (SHRINKAGE + sumProb);
for( int iii = 0; iii < mu.length; iii++ ) {
for( int jjj = 0; jjj < mu.length; jjj++ ) {
wishart.set(iii, jjj, shrinkageFactor * (mu[iii] - empiricalMu[iii]) * (mu[jjj] - empiricalMu[jjj]));
}
}
datumIndex = 0;
final Matrix pVarSigma = new Matrix(mu.length, mu.length);
for( final VariantDatum datum : data ) {
final double prob = pVarInGaussian.get(datumIndex++);
for( int iii = 0; iii < mu.length; iii++ ) {
for( int jjj = 0; jjj < mu.length; jjj++ ) {
pVarSigma.set(iii, jjj, prob * (datum.annotations[iii]-mu[iii]) * (datum.annotations[jjj]-mu[jjj]));
}
}
sigma.plusEquals( pVarSigma );
}
sigma.plusEquals( empiricalSigma );
sigma.plusEquals( wishart );
for( int iii = 0; iii < mu.length; iii++ ) {
mu[iii] = (sumProb * mu[iii] + SHRINKAGE * empiricalMu[iii]) / (sumProb + SHRINKAGE);
}
hyperParameter_a = sumProb + DEGREES_OF_FREEDOM;
hyperParameter_b = sumProb + SHRINKAGE;
hyperParameter_lambda = sumProb + DIRICHLET_PARAMETER;
resetPVarInGaussian(); // clean up some memory
}
public void evaluateFinalModelParameters( final List<VariantDatum> data ) {
sumProb = 0.0;
zeroOutMu();
zeroOutSigma();
int datumIndex = 0;
for( final VariantDatum datum : data ) {
final double prob = pVarInGaussian.get(datumIndex++);
sumProb += prob;
incrementMu( datum, prob );
}
divideEqualsMu( sumProb );
datumIndex = 0;
final Matrix pVarSigma = new Matrix(mu.length, mu.length);
for( final VariantDatum datum : data ) {
final double prob = pVarInGaussian.get(datumIndex++);
for( int iii = 0; iii < mu.length; iii++ ) {
for( int jjj = 0; jjj < mu.length; jjj++ ) {
pVarSigma.set(iii, jjj, prob * (datum.annotations[iii]-mu[iii]) * (datum.annotations[jjj]-mu[jjj]));
}
}
sigma.plusEquals( pVarSigma );
}
sigma.timesEquals( 1.0 / sumProb );
resetPVarInGaussian(); // clean up some memory
}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/DiploidExactAFCalculator.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.GenotypeLikelihoods;
import htsjdk.variant.variantcontext.GenotypesContext;
import htsjdk.variant.variantcontext.VariantContext;
import java.util.*;
public abstract class DiploidExactAFCalculator extends ExactAFCalculator {
private static final double LOG10_OF_2 = MathUtils.Log10Cache.get(2);
public DiploidExactAFCalculator() {
}
@Override
protected AFCalculationResult computeLog10PNonRef(final VariantContext vc, final int defaultPloidy,
final double[] log10AlleleFrequencyPriors, final StateTracker stateTracker) {
final int numAlternateAlleles = vc.getNAlleles() - 1;
final ArrayList<double[]> genotypeLikelihoods = getGLs(vc.getGenotypes(), true, vc.hasAllele(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE));
final int numSamples = genotypeLikelihoods.size()-1;
final int numChr = 2*numSamples;
// queue of AC conformations to process
final LinkedList<ExactACset> ACqueue = new LinkedList<>();
// mapping of ExactACset indexes to the objects
final HashMap<ExactACcounts, ExactACset> indexesToACset = new HashMap<>(numChr+1);
// add AC=0 to the queue
final int[] zeroCounts = new int[numAlternateAlleles];
ExactACset zeroSet = new ExactACset(numSamples+1, new ExactACcounts(zeroCounts));
ACqueue.add(zeroSet);
indexesToACset.put(zeroSet.getACcounts(), zeroSet);
while ( !ACqueue.isEmpty() ) {
stateTracker.incNEvaluations(); // keep track of the number of evaluations
// compute log10Likelihoods
final ExactACset set = ACqueue.remove();
calculateAlleleCountConformation(set, genotypeLikelihoods, numChr, ACqueue,
indexesToACset, log10AlleleFrequencyPriors,stateTracker);
// clean up memory
indexesToACset.remove(set.getACcounts());
//if ( DEBUG )
// System.out.printf(" *** removing used set=%s%n", set.ACcounts);
}
return getResultFromFinalState(vc, log10AlleleFrequencyPriors, stateTracker);
}
@Override
protected GenotypesContext reduceScopeGenotypes(final VariantContext vc, final int defaultPloidy, final List<Allele> allelesToUse) {
return GATKVariantContextUtils.subsetAlleles(vc, allelesToUse, GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL);
}
@Override
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
final ArrayList<double[]> GLs = getGLs(vc.getGenotypes(), true);
for ( final double[] likelihoods : GLs ) {
final int PLindexOfBestGL = MathUtils.maxElementIndex(likelihoods);
if ( PLindexOfBestGL != PL_INDEX_OF_HOM_REF ) {
final GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindexOfBestGL);
final int alleleLikelihoodIndex1 = alleles.alleleIndex1 - 1;
final int alleleLikelihoodIndex2 = alleles.alleleIndex2 - 1;
if ( alleles.alleleIndex1 != 0 )
likelihoodSums[alleleLikelihoodIndex1].sum += likelihoods[PLindexOfBestGL] - likelihoods[PL_INDEX_OF_HOM_REF];
// don't double-count it
if ( alleles.alleleIndex2 != 0 && alleles.alleleIndex2 != alleles.alleleIndex1 )
likelihoodSums[alleleLikelihoodIndex2].sum += likelihoods[PLindexOfBestGL] - likelihoods[PL_INDEX_OF_HOM_REF];
}
}
}
private static final class DependentSet {
public final int[] ACcounts;
public final int PLindex;
public DependentSet(final int[] ACcounts, final int PLindex) {
this.ACcounts = ACcounts;
this.PLindex = PLindex;
}
}
private double calculateAlleleCountConformation(final ExactACset set,
final ArrayList<double[]> genotypeLikelihoods,
final int numChr,
final LinkedList<ExactACset> ACqueue,
final HashMap<ExactACcounts, ExactACset> indexesToACset,
final double[] log10AlleleFrequencyPriors,
final StateTracker stateTracker) {
//if ( DEBUG )
// System.out.printf(" *** computing LofK for set=%s%n", set.ACcounts);
// compute the log10Likelihoods
computeLofK(set, genotypeLikelihoods, log10AlleleFrequencyPriors, stateTracker);
final double log10LofK = set.getLog10Likelihoods()[set.getLog10Likelihoods().length-1];
// can we abort early because the log10Likelihoods are so small?
if ( stateTracker.abort(log10LofK, set.getACcounts(), true, false) ) {
//if ( DEBUG )
// System.out.printf(" *** breaking early set=%s log10L=%.2f maxLog10L=%.2f%n", set.ACcounts, log10LofK, maxLog10L);
return log10LofK;
}
// iterate over higher frequencies if possible
final int ACwiggle = numChr - set.getACsum();
if ( ACwiggle == 0 ) // all alternate alleles already sum to 2N so we cannot possibly go to higher frequencies
return log10LofK;
final int numAltAlleles = set.getACcounts().getCounts().length;
// add conformations for the k+1 case
for ( int allele = 0; allele < numAltAlleles; allele++ ) {
final int[] ACcountsClone = set.getACcounts().getCounts().clone();
ACcountsClone[allele]++;
// to get to this conformation, a sample would need to be AB (remember that ref=0)
final int PLindex = GenotypeLikelihoods.calculatePLindex(0, allele+1);
updateACset(ACcountsClone, numChr, set, PLindex, ACqueue, indexesToACset, genotypeLikelihoods);
}
// add conformations for the k+2 case if it makes sense; note that the 2 new alleles may be the same or different
if ( ACwiggle > 1 ) {
final ArrayList<DependentSet> differentAlleles = new ArrayList<>(numAltAlleles * numAltAlleles);
final ArrayList<DependentSet> sameAlleles = new ArrayList<>(numAltAlleles);
for ( int allele_i = 0; allele_i < numAltAlleles; allele_i++ ) {
for ( int allele_j = allele_i; allele_j < numAltAlleles; allele_j++ ) {
final int[] ACcountsClone = set.getACcounts().getCounts().clone();
ACcountsClone[allele_i]++;
ACcountsClone[allele_j]++;
// to get to this conformation, a sample would need to be BB or BC (remember that ref=0, so add one to the index)
final int PLindex = GenotypeLikelihoods.calculatePLindex(allele_i+1, allele_j+1);
if ( allele_i == allele_j )
sameAlleles.add(new DependentSet(ACcountsClone, PLindex));
else
differentAlleles.add(new DependentSet(ACcountsClone, PLindex));
}
}
// IMPORTANT: we must first add the cases where the 2 new alleles are different so that the queue maintains its ordering
for ( DependentSet dependent : differentAlleles )
updateACset(dependent.ACcounts, numChr, set, dependent.PLindex, ACqueue, indexesToACset, genotypeLikelihoods);
for ( DependentSet dependent : sameAlleles )
updateACset(dependent.ACcounts, numChr, set, dependent.PLindex, ACqueue, indexesToACset, genotypeLikelihoods);
}
return log10LofK;
}
// adds the ExactACset represented by the ACcounts to the ACqueue if not already there (creating it if needed) and
// also pushes its value to the given callingSetIndex.
private void updateACset(final int[] newSetCounts,
final int numChr,
final ExactACset dependentSet,
final int PLsetIndex,
final Queue<ExactACset> ACqueue,
final HashMap<ExactACcounts, ExactACset> indexesToACset,
final ArrayList<double[]> genotypeLikelihoods) {
final ExactACcounts index = new ExactACcounts(newSetCounts);
if ( !indexesToACset.containsKey(index) ) {
ExactACset set = new ExactACset(numChr/2 +1, index);
indexesToACset.put(index, set);
ACqueue.add(set);
}
// push data from the dependency to the new set
//if ( DEBUG )
// System.out.println(" *** pushing data from " + index + " to " + dependencySet.ACcounts);
pushData(indexesToACset.get(index), dependentSet, PLsetIndex, genotypeLikelihoods);
}
private void computeLofK(final ExactACset set,
final ArrayList<double[]> genotypeLikelihoods,
final double[] log10AlleleFrequencyPriors, final StateTracker stateTracker) {
set.getLog10Likelihoods()[0] = 0.0; // the zero case
final int totalK = set.getACsum();
// special case for k = 0 over all k
if ( totalK == 0 ) {
for ( int j = 1; j < set.getLog10Likelihoods().length; j++ )
set.getLog10Likelihoods()[j] = set.getLog10Likelihoods()[j-1] + genotypeLikelihoods.get(j)[HOM_REF_INDEX];
final double log10Lof0 = set.getLog10Likelihoods()[set.getLog10Likelihoods().length-1];
stateTracker.setLog10LikelihoodOfAFzero(log10Lof0);
stateTracker.setLog10PosteriorOfAFzero(log10Lof0 + log10AlleleFrequencyPriors[0]);
return;
}
// if we got here, then k > 0 for at least one k.
// the non-AA possible conformations were already dealt with by pushes from dependent sets;
// now deal with the AA case (which depends on previous cells in this column) and then update the L(j,k) value
for ( int j = 1; j < set.getLog10Likelihoods().length; j++ ) {
if ( totalK < 2*j-1 ) {
final double[] gl = genotypeLikelihoods.get(j);
final double conformationValue = MathUtils.Log10Cache.get(2*j-totalK) + MathUtils.Log10Cache.get(2*j-totalK-1) + set.getLog10Likelihoods()[j-1] + gl[HOM_REF_INDEX];
set.getLog10Likelihoods()[j] = MathUtils.approximateLog10SumLog10(set.getLog10Likelihoods()[j], conformationValue);
}
final double logDenominator = MathUtils.Log10Cache.get(2*j) + MathUtils.Log10Cache.get(2*j-1);
set.getLog10Likelihoods()[j] = set.getLog10Likelihoods()[j] - logDenominator;
}
double log10LofK = set.getLog10Likelihoods()[set.getLog10Likelihoods().length-1];
// update the MLE if necessary
stateTracker.updateMLEifNeeded(log10LofK, set.getACcounts().getCounts());
// apply the priors over each alternate allele
for ( final int ACcount : set.getACcounts().getCounts() ) {
if ( ACcount > 0 )
log10LofK += log10AlleleFrequencyPriors[ACcount];
}
stateTracker.updateMAPifNeeded(log10LofK, set.getACcounts().getCounts());
}
private void pushData(final ExactACset targetSet,
final ExactACset dependentSet,
final int PLsetIndex,
final ArrayList<double[]> genotypeLikelihoods) {
final int totalK = targetSet.getACsum();
final double[] targetLog10Likelihoods = targetSet.getLog10Likelihoods();
final double[] dependentLog10Likelihoods = dependentSet.getLog10Likelihoods();
final int[] targetACcounts = targetSet.getACcounts().getCounts();
// skip impossible conformations, namely those for which there aren't enough possible chromosomes (2*j in the if loop below)
// to fill up the totalK alternate alleles needed; we do want to ensure that there's at least one sample included (hence the Math.max)
final int firstIndex = Math.max(1, (totalK + 1) / 2);
// find the 2 alleles that are represented by this PL index
final GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLsetIndex);
// if neither allele is reference then the coefficient is constant throughout this invocation
final Double constantCoefficient = (alleles.alleleIndex1 == 0) ? null : determineCoefficient(alleles, 0, targetACcounts, totalK);
for ( int j = firstIndex; j < targetLog10Likelihoods.length; j++ ) {
final double[] gl = genotypeLikelihoods.get(j);
final double coefficient = (constantCoefficient != null) ? constantCoefficient : determineCoefficient(alleles, j, targetACcounts, totalK);
final double conformationValue = coefficient + dependentLog10Likelihoods[j-1] + gl[PLsetIndex];
targetLog10Likelihoods[j] = MathUtils.approximateLog10SumLog10(targetLog10Likelihoods[j], conformationValue);
}
}
private double determineCoefficient(final GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles, final int j, final int[] ACcounts, final int totalK) {
// the closed form representation generalized for multiple alleles is as follows:
// AA: (2j - totalK) * (2j - totalK - 1)
// AB: 2k_b * (2j - totalK)
// AC: 2k_c * (2j - totalK)
// BB: k_b * (k_b - 1)
// BC: 2 * k_b * k_c
// CC: k_c * (k_c - 1)
// *** note that throughout this method we subtract one from the alleleIndex because ACcounts ***
// *** doesn't consider the reference allele whereas the GenotypeLikelihoods PL cache does. ***
// the AX het case
if ( alleles.alleleIndex1 == 0 )
return MathUtils.Log10Cache.get(2*ACcounts[alleles.alleleIndex2-1]) + MathUtils.Log10Cache.get(2*j-totalK);
final int k_i = ACcounts[alleles.alleleIndex1-1];
// the hom var case (e.g. BB, CC, DD)
final double coeff;
if ( alleles.alleleIndex1 == alleles.alleleIndex2 ) {
coeff = MathUtils.Log10Cache.get(k_i) + MathUtils.Log10Cache.get(k_i - 1);
}
// the het non-ref case (e.g. BC, BD, CD)
else {
final int k_j = ACcounts[alleles.alleleIndex2-1];
coeff = LOG10_OF_2 + MathUtils.Log10Cache.get(k_i) + MathUtils.Log10Cache.get(k_j);
}
return coeff;
}
@Override
public GenotypesContext subsetAlleles(final VariantContext vc,
final int defaultPloidy,
final List<Allele> allelesToUse,
final boolean assignGenotypes) {
if (defaultPloidy != 2)
throw new IllegalArgumentException("cannot support ploidy different than 2 and the default ploidy is " + defaultPloidy);
return allelesToUse.size() == 1
? GATKVariantContextUtils.subsetToRefOnly(vc, defaultPloidy)
: GATKVariantContextUtils.subsetAlleles(vc, allelesToUse,
assignGenotypes ? GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN : GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL);
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/contamination/ContaminationEstimate.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.cancer.contamination;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.collections.Pair;
import java.util.Arrays;
/**
* a class that estimates and stores the contamination values for a site.
*/
class ContaminationEstimate {
private final double precision; // to what precision do we want to run; e.g. if set to 1, we run using 1% increments
private final double[] bins; // the bins representing the discrete contamination levels we're evaluating
private double populationFit = 0.0;
private String popultationName = "";
private static double[] precalculatedEpsilon;
private int arrayAlleleObservations = 0;
private int alternateAlleleObservations = 0;
// precalculate the 128 values of epsilon that are possible
static {
precalculatedEpsilon = new double[Byte.MAX_VALUE+1];
for(int i=0; i <= (int)Byte.MAX_VALUE; i++) {
precalculatedEpsilon[i] = Math.pow(10.0,-1.0*(((double)i)/10.0));
}
}
/**
* create the contamination estimate, given:
* @param precision the precision value, to what level are we calculating the contamination
*/
public ContaminationEstimate(double precision,
double maf,
byte[] bases,
byte[] quals,
byte arrayAllele,
byte hapmapAlt,
String popName,
GenomeLoc locus
) {
// setup the bins to the correct precision
this.precision = precision;
bins = new double[(int)Math.ceil(100/precision)+1];
if (maf == 0) maf = 0.00001;
popultationName = popName;
Arrays.fill(bins,0.0); // just to make sure we don't have any residual values
// convert the quals
double[] realQuals = new double[quals.length];
int qIndex = 0;
for (byte qual : quals) {realQuals[qIndex++] = Math.pow(10.0,-1.0*(qual/10.0));}
// check our inputs
if (maf > 1.0 || maf < 0.0) throw new IllegalArgumentException("Invalid allele Freq: must be between 0 and 1 (inclusive), maf was " + maf + " for population " + popName);
// calculate the contamination for each bin
int qualOffset = 0;
for (byte base : bases) {
if (base == arrayAllele) { arrayAlleleObservations++; }
if (base == hapmapAlt) { alternateAlleleObservations++; }
double epsilon = precalculatedEpsilon[quals[qualOffset++]];
for (int index = 0; index < bins.length; index++) {
double contaminationRate = (1.0 - (double) index / (double) bins.length);
if (base == arrayAllele) {
bins[index] += Math.log((1.0 - contaminationRate) * (1.0 - epsilon) +
contaminationRate * ((maf) * (1.0 - epsilon) + (1.0 - maf) * (epsilon/3.0)));
populationFit += Math.log(epsilon);
} else if(hapmapAlt == base) {
bins[index] += Math.log((1.0 - contaminationRate) * (epsilon / 3.0) +
contaminationRate * ((maf) * (epsilon/3.0) + (1.0 - maf) * (1.0 - epsilon)));
populationFit += Math.log(maf + epsilon);
}
}
}
}
public double[] getBins() {
return bins;
}
public void setPopulationFit(double populationFit) {
this.populationFit = populationFit;
}
public double getPopulationFit() {
return populationFit;
}
public String getPopultationName() {
return popultationName;
}
public static class ConfidenceInterval {
private double start;
private double stop;
private double contamination;
private double maxLikelihood;
double[] newBins;
public ConfidenceInterval(double bins[], double intervalArea) {
// make a copy of the bins in non-log space
int maxIndex = 0;
for (int x = 0; x < bins.length; x++) if (bins[x] > bins[maxIndex]) maxIndex = x;
newBins = new double[bins.length];
maxLikelihood = bins[maxIndex];
int index = 0;
double total = 0.0;
for (double d : bins) {
newBins[index] = Math.pow(10,(bins[index] - bins[maxIndex]));
total += newBins[index];
index++;
}
for (int x = 0; x < newBins.length; x++) {
newBins[x] = newBins[x] / total;
}
double areaUnderCurve = 0;
int leftIndex = maxIndex;
int rightIndex = maxIndex;
while (areaUnderCurve < 0.95) {
// if the "left" bin is bigger, and can be moved, move it
if (newBins[leftIndex] >= newBins[rightIndex] && leftIndex > 0) {
leftIndex--;
} else {
// otherwise move the right bin if possible
if (rightIndex < bins.length - 1) {
rightIndex++;
} else {
// and if not move the left bin, or die
if (leftIndex > 0) {
leftIndex--;
} else {
throw new RuntimeException("Error trying to compute confidence interval");
}
}
}
areaUnderCurve = 0.0;
for (int x = leftIndex; x <= rightIndex; x++)
areaUnderCurve += newBins[x];
}
start = (bins.length - rightIndex) * (100.0/bins.length);
stop = (bins.length - leftIndex) * (100.0/bins.length);
contamination = (bins.length - maxIndex) * (100.0/bins.length);
}
public double getStart() {
return start;
}
public double getStop() {
return stop;
}
public double getContamination() {
return contamination;
}
public double getMaxLikelihood() {
return maxLikelihood;
}
public String toString() {
return contamination + "[" + start + " - " + stop + "] log likelihood = " + maxLikelihood;
}
}
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.reference.ReferenceSequenceFile;
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading.ReadThreadingAssembler;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.*;
public class LocalAssemblyEngineUnitTest extends BaseTest {
private GenomeLocParser genomeLocParser;
private ReferenceSequenceFile seq;
private SAMFileHeader header;
@BeforeClass
public void setup() throws FileNotFoundException {
seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
genomeLocParser = new GenomeLocParser(seq);
header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
}
@DataProvider(name = "AssembleIntervalsData")
public Object[][] makeAssembleIntervalsData() {
List<Object[]> tests = new ArrayList<Object[]>();
final String contig = "20";
final int start = 10000000;
final int end = 10100000;
final int windowSize = 100;
final int stepSize = 200;
final int nReadsToUse = 5;
for ( int startI = start; startI < end; startI += stepSize) {
final int endI = startI + windowSize;
final GenomeLoc refLoc = genomeLocParser.createGenomeLoc(contig, startI, endI);
tests.add(new Object[]{new ReadThreadingAssembler(), refLoc, nReadsToUse});
}
return tests.toArray(new Object[][]{});
}
@DataProvider(name = "AssembleIntervalsWithVariantData")
public Object[][] makeAssembleIntervalsWithVariantData() {
List<Object[]> tests = new ArrayList<Object[]>();
final String contig = "20";
final int start = 10000000;
final int end = 10001000;
final int windowSize = 100;
final int stepSize = 200;
final int variantStepSize = 1;
final int nReadsToUse = 5;
for ( int startI = start; startI < end; startI += stepSize) {
final int endI = startI + windowSize;
final GenomeLoc refLoc = genomeLocParser.createGenomeLoc(contig, startI, endI);
for ( int variantStart = windowSize / 2 - 10; variantStart < windowSize / 2 + 10; variantStart += variantStepSize ) {
tests.add(new Object[]{new ReadThreadingAssembler(), refLoc, nReadsToUse, variantStart});
}
}
return tests.toArray(new Object[][]{});
}
@Test(dataProvider = "AssembleIntervalsData")
public void testAssembleRef(final ReadThreadingAssembler assembler, final GenomeLoc loc, final int nReadsToUse) {
final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases();
final List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
for ( int i = 0; i < nReadsToUse; i++ ) {
final byte[] bases = refBases.clone();
final byte[] quals = Utils.dupBytes((byte) 30, refBases.length);
final String cigar = refBases.length + "M";
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, loc.getContig(), loc.getContigIndex(), loc.getStart(), bases, quals, cigar);
reads.add(read);
}
// TODO -- generalize to all assemblers
final Haplotype refHaplotype = new Haplotype(refBases, true);
final List<Haplotype> haplotypes = assemble(assembler, refBases, loc, reads);
Assert.assertEquals(haplotypes, Collections.singletonList(refHaplotype));
}
@Test(dataProvider = "AssembleIntervalsWithVariantData")
public void testAssembleRefAndSNP(final ReadThreadingAssembler assembler, final GenomeLoc loc, final int nReadsToUse, final int variantSite) {
final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases();
final Allele refBase = Allele.create(refBases[variantSite], true);
final Allele altBase = Allele.create((byte)(refBase.getBases()[0] == 'A' ? 'C' : 'A'), false);
final VariantContextBuilder vcb = new VariantContextBuilder("x", loc.getContig(), variantSite, variantSite, Arrays.asList(refBase, altBase));
testAssemblyWithVariant(assembler, refBases, loc, nReadsToUse, vcb.make());
}
@Test(dataProvider = "AssembleIntervalsWithVariantData")
public void testAssembleRefAndDeletion(final ReadThreadingAssembler assembler, final GenomeLoc loc, final int nReadsToUse, final int variantSite) {
final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases();
for ( int deletionLength = 1; deletionLength < 10; deletionLength++ ) {
final Allele refBase = Allele.create(new String(refBases).substring(variantSite, variantSite + deletionLength + 1), true);
final Allele altBase = Allele.create(refBase.getBases()[0], false);
final VariantContextBuilder vcb = new VariantContextBuilder("x", loc.getContig(), variantSite, variantSite + deletionLength, Arrays.asList(refBase, altBase));
testAssemblyWithVariant(assembler, refBases, loc, nReadsToUse, vcb.make());
}
}
@Test(dataProvider = "AssembleIntervalsWithVariantData")
public void testAssembleRefAndInsertion(final ReadThreadingAssembler assembler, final GenomeLoc loc, final int nReadsToUse, final int variantSite) {
final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases();
for ( int insertionLength = 1; insertionLength < 10; insertionLength++ ) {
final Allele refBase = Allele.create(refBases[variantSite], false);
final Allele altBase = Allele.create(new String(refBases).substring(variantSite, variantSite + insertionLength + 1), true);
final VariantContextBuilder vcb = new VariantContextBuilder("x", loc.getContig(), variantSite, variantSite + insertionLength, Arrays.asList(refBase, altBase));
testAssemblyWithVariant(assembler, refBases, loc, nReadsToUse, vcb.make());
}
}
private void testAssemblyWithVariant(final ReadThreadingAssembler assembler, final byte[] refBases, final GenomeLoc loc, final int nReadsToUse, final VariantContext site) {
final String preRef = new String(refBases).substring(0, site.getStart());
final String postRef = new String(refBases).substring(site.getEnd() + 1, refBases.length);
final byte[] altBases = (preRef + site.getAlternateAllele(0).getBaseString() + postRef).getBytes();
// logger.warn("ref " + new String(refBases));
// logger.warn("alt " + new String(altBases));
final List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
for ( int i = 0; i < nReadsToUse; i++ ) {
final byte[] bases = altBases.clone();
final byte[] quals = Utils.dupBytes((byte) 30, altBases.length);
final String cigar = altBases.length + "M";
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, loc.getContig(), loc.getContigIndex(), loc.getStart(), bases, quals, cigar);
reads.add(read);
}
final Haplotype refHaplotype = new Haplotype(refBases, true);
final Haplotype altHaplotype = new Haplotype(altBases, false);
final List<Haplotype> haplotypes = assemble(assembler, refBases, loc, reads);
Assert.assertEquals(haplotypes, Arrays.asList(refHaplotype, altHaplotype));
}
private List<Haplotype> assemble(final ReadThreadingAssembler assembler, final byte[] refBases, final GenomeLoc loc, final List<GATKSAMRecord> reads) {
final Haplotype refHaplotype = new Haplotype(refBases, true);
final Cigar c = new Cigar();
c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M));
refHaplotype.setCigar(c);
final ActiveRegion activeRegion = new ActiveRegion(loc, null, true, genomeLocParser, 0);
activeRegion.addAll(reads);
// logger.warn("Assembling " + activeRegion + " with " + engine);
final AssemblyResultSet assemblyResultSet = assembler.runLocalAssembly(activeRegion, refHaplotype, refBases, loc, Collections.<VariantContext>emptyList(), null);
return assemblyResultSet.getHaplotypeList();
}
@DataProvider(name = "SimpleAssemblyTestData")
public Object[][] makeSimpleAssemblyTestData() {
List<Object[]> tests = new ArrayList<Object[]>();
final String contig = "20";
final int start = 10000000;
final int windowSize = 200;
final int end = start + windowSize;
final int excludeVariantsWithinXbp = 25; // TODO -- decrease to zero when the edge calling problem is fixed
final String ref = new String(seq.getSubsequenceAt(contig, start, end).getBases());
final GenomeLoc refLoc = genomeLocParser.createGenomeLoc(contig, start, end);
for ( int snpPos = 0; snpPos < windowSize; snpPos++) {
if ( snpPos > excludeVariantsWithinXbp && (windowSize - snpPos) >= excludeVariantsWithinXbp ) {
final byte[] altBases = ref.getBytes();
altBases[snpPos] = altBases[snpPos] == 'A' ? (byte)'C' : (byte)'A';
final String alt = new String(altBases);
tests.add(new Object[]{"SNP at " + snpPos, new ReadThreadingAssembler(), refLoc, ref, alt});
}
}
return tests.toArray(new Object[][]{});
}
@Test(dataProvider = "SimpleAssemblyTestData")
public void testSimpleAssembly(final String name, final ReadThreadingAssembler assembler, final GenomeLoc loc, final String ref, final String alt) {
final byte[] refBases = ref.getBytes();
final byte[] altBases = alt.getBytes();
final List<GATKSAMRecord> reads = new LinkedList<>();
for ( int i = 0; i < 20; i++ ) {
final byte[] bases = altBases.clone();
final byte[] quals = Utils.dupBytes((byte) 30, altBases.length);
final String cigar = altBases.length + "M";
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, loc.getContig(), loc.getContigIndex(), loc.getStart(), bases, quals, cigar);
reads.add(read);
}
final Haplotype refHaplotype = new Haplotype(refBases, true);
final Haplotype altHaplotype = new Haplotype(altBases, false);
final List<Haplotype> haplotypes = assemble(assembler, refBases, loc, reads);
Assert.assertTrue(haplotypes.size() > 0, "Failed to find ref haplotype");
Assert.assertEquals(haplotypes.get(0), refHaplotype);
Assert.assertEquals(haplotypes.size(), 2, "Failed to find single alt haplotype");
Assert.assertEquals(haplotypes.get(1), altHaplotype);
}
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReadThreadingLikelihoodCalculationEngineUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import htsjdk.variant.variantcontext.Allele;
import org.broadinstitute.gatk.utils.genotyper.SampleListUtils;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading.HaplotypeGraph;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.pairhmm.*;
import org.broadinstitute.gatk.utils.sam.ClippedGATKSAMRecord;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import org.testng.Assert;
import org.testng.SkipException;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
/**
* Created with IntelliJ IDEA.
* User: valentin
* Date: 8/4/13
* Time: 10:20 PM
* To change this template use File | Settings | File Templates.
*/
@Test(enabled=false)
public class ReadThreadingLikelihoodCalculationEngineUnitTest extends ActiveRegionTestDataSetUnitTest {
// private static FastHMM hmm = new MLLog10PairHMM((byte)10); // new FastLoglessPairHMM((byte)10);
private static FlexibleHMM hmm = new FastLoglessPairHMM((byte)10);
@Test(dataProvider="activeRegionTestDataSets",enabled=false)
public void testActiveRegionsDataSet(final ActiveRegionTestDataSet as, final int kmerSize, final int readLength, final String variation, final int readCount, final int regionSize, final byte bq, final byte iq, final byte dq) {
super.testActiveRegionsDataSet(as,kmerSize,readLength,variation,readCount,regionSize,bq,iq,dq);
}
/** How many missing read record are tolerated in the graph based approach. For example a read is missed
* if it does not map to the reference path with at least two kmers in non overlapping positions. This constant
* indictes the proportion of reads reacords that we can miss with respect to all possible
*/
private static final double READ_SKIP_TOLERANCE = 0.01;
//final PairHMMLikelihoodCalculationEngine fullPairHMM = new PairHMMLikelihoodCalculationEngine((byte)10, false,
// PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING, -3);
final PairHMMLikelihoodCalculationEngine fullPairHMM = new PairHMMLikelihoodCalculationEngine((byte)10,
PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING, null, Double.NEGATIVE_INFINITY,
true, PairHMMLikelihoodCalculationEngine.PCR_ERROR_MODEL.NONE);
// When using likelihoods it should be around 0.05 since
// When using maximum-likelihoods it can be as low as 0.00001
private static final double SIGNIFICANT_LnLK_RATIO_DIFF_FRACTION = hmm instanceof FastLoglessPairHMM ? 0.1 : 0.00001;
// Some case is kind of expected to have differences between PairHMM and GraphBased Flexible PairHMM.
// Is therefore difficult to test for the to give similar results in a unit test....
// This is left for for example Integration tests like GraphBasedVsLoglessAccuracyIntegrationTest.
// There code herein is maintain around for historical purposes, but disabled.
@Test(dataProvider="readLikekihoodRatioTestData",enabled=false)
public void testReadLikelihoodRatios(final ActiveRegionTestDataSet ds, final GATKSAMRecord read, final Allele a1,
final Allele a2, final PerReadAlleleLikelihoodMap loglessLks,
final PerReadAlleleLikelihoodMap graphLks, final List<Civar.ElementOffset> readEventOffsets, final List<Civar.ElementOffset> firstAlleleCivar, final List<Civar.ElementOffset> secondAlleleCivar ) {
checkForLongEventsThatMightCauseFailures(readEventOffsets, firstAlleleCivar, secondAlleleCivar);
final Map<Allele,Double> logless = loglessLks.getLikelihoodReadMap().get(read);
final Map<Allele,Double> graph = graphLks.getLikelihoodReadMap().get(read);
final double loglessA1Lk = logless.get(a1);
final double loglessA2Lk = logless.get(a2);
if (graph == null)
throw new SkipException("no likelihoods produced for this read using the graph method: Lla1= " + loglessA1Lk + " Lla2= " + loglessA2Lk + "LlDiff=" + (loglessA2Lk - loglessA1Lk) );
final Double graphA1Lk = graph.get(a1);
final Double graphA2Lk = graph.get(a2);
if (graphA1Lk == null)
throw new SkipException("no likelihoods produced for this read in the first haplotype: Lla1= " + loglessA1Lk + " Lla2= " + loglessA2Lk + "LlDiff=" + (loglessA2Lk - loglessA1Lk) );
if (graphA2Lk == null)
throw new SkipException("no likelihoods produced for this read in the second haplotype: Lla1= " + loglessA1Lk + " Lla2= " + loglessA2Lk + "LlDiff=" + (loglessA2Lk - loglessA1Lk) );
final double loglessDiff = loglessA1Lk - loglessA2Lk;
final double graphDiff = graphA1Lk - graphA2Lk;
final double epsilon = calculateEpsilon(graphDiff,loglessDiff);
Assert.assertEquals(graphDiff,loglessDiff,epsilon,String.format("Delta(%f,%f) = %f > %f",graphDiff,loglessDiff,Math.abs(graphDiff - loglessDiff),epsilon));
}
private double calculateEpsilon(final double graphDiff, final double loglessDiff) {
if (hmm instanceof FastLoglessPairHMM)
return Math.max(0.01,Math.max(Math.abs(loglessDiff),Math.abs(graphDiff)) * SIGNIFICANT_LnLK_RATIO_DIFF_FRACTION);
else
return SIGNIFICANT_LnLK_RATIO_DIFF_FRACTION;
}
private static final double MIN_READ_ACROSS_SIZE_FOR_INDEL_EVENTS = 0.8; // 50% percent.
private static final double MIN_LARGE_INDEL = 4;
private void checkForLongEventsThatMightCauseFailures(final List<Civar.ElementOffset> read, final List<Civar.ElementOffset> a1, final List<Civar.ElementOffset> a2) {
int sequenceLength = Math.max(a1.get(a1.size() - 1).templateTo, a2.get(a2.size() - 1).templateTo) + 1;
boolean tai1 = thereAreIndels(a1);
boolean tai2 = thereAreIndels(a2);
boolean tair = thereAreIndels(read);
boolean thereAreIndels = tai1 || tai2 || tair;
if (!thereAreIndels) return;
final boolean[] inserts = new boolean[sequenceLength];
final boolean[] deletions = new boolean[sequenceLength];
final int[] range = new int[2];
int refStart = Integer.MAX_VALUE;
int refEnd = -1;
for (final Civar.ElementOffset ce : read) {
if (refStart > ce.templateFrom)
refStart = ce.templateFrom;
if (refEnd < ce.templateTo)
refEnd = ce.templateTo;
switch (ce.element.operator()) {
case DELETION:
deletions[ce.templateFrom] = deletions[ce.templateTo] = true;
break;
case INSERTION:
inserts[ce.templateFrom] = inserts[ce.templateTo] = true;
break;
case MATCH:
break;
}
}
range[0] = refStart;
range[1] = refEnd;
checkForLongEventsThatMightCauseFailures_allele(refStart,refEnd,inserts,deletions,a1);
checkForLongEventsThatMightCauseFailures_allele(refStart,refEnd,inserts,deletions,a2);
}
private void checkForLongEventsThatMightCauseFailures_allele(final int refStart, final int refEnd, final boolean[] inserts, final boolean[] deletions, final List<Civar.ElementOffset> a1) {
for (final Civar.ElementOffset ce : a1) {
if (ce.templateFrom <= refStart) continue;
if (ce.templateTo >= refEnd) continue;
int size;
switch (ce.element.operator()) {
case DELETION:
size = ce.templateTo - ce.templateFrom;
if (deletions[ce.templateFrom] || deletions[ce.templateTo]) continue;
break;
case INSERTION:
size = ce.sequenceTo - ce.sequenceFrom;
if (inserts[ce.templateFrom] || inserts[ce.templateTo]) continue;
break;
default:
continue;
}
int minMargin = (int) Math.ceil(size * MIN_READ_ACROSS_SIZE_FOR_INDEL_EVENTS);
if (ce.templateFrom - refStart < minMargin)
throw new SkipException("Large Indel");
if (refEnd - ce.templateTo < minMargin)
throw new SkipException("Large Indel");
}
}
private boolean thereAreIndels(final List<Civar.ElementOffset> a1) {
for (final Civar.ElementOffset ce : a1) {
switch (ce.element.operator()) {
case DELETION:
if (ce.templateTo - ce.templateFrom >= MIN_LARGE_INDEL) return true;
break;
case INSERTION:
if (ce.sequenceTo - ce.sequenceFrom >= MIN_LARGE_INDEL) return true;
break;
}
}
return false;
}
@DataProvider(name="readLikekihoodRatioTestData")
public Iterator<Object[]> readLikelihoodRatioTestDataSets() {
final Iterator<Object[]> activeRegionTestDataSetIterator = super.activeRegionTestDataSets();
return new java.util.Iterator<Object[]>() {
public static final boolean INTRODUCE_READ_ERRORS = true;
private List<Pair<Allele,Allele>> allelePairs;
private Iterator<Pair<Allele,Allele>> allelePairsIt;
private Iterator<GATKSAMRecord> readIt;
private GATKSAMRecord read;
private Iterator<List<Civar.ElementOffset>> civarEventOffsetsIt;
private List<Civar.ElementOffset> civarEventOffsets;
private ActiveRegionTestDataSet dataSet;
private GraphBasedLikelihoodCalculationEngineInstance graphEngine;
private PerReadAlleleLikelihoodMap graphLks;
private PerReadAlleleLikelihoodMap loglessLks;
private Map<Allele,Civar> civarByAllele;
private String reference;
@Override
public boolean hasNext() {
return activeRegionTestDataSetIterator.hasNext() || (readIt != null && readIt.hasNext()) || (allelePairsIt != null && allelePairsIt.hasNext());
}
@Override
public Object[] next() {
if (allelePairsIt != null && allelePairsIt.hasNext()) {
final Pair<Allele,Allele> allelePair = allelePairsIt.next();
return new Object[] { dataSet, read, allelePair.getFirst(), allelePair.getSecond(), loglessLks, graphLks, civarEventOffsets, civarByAllele.get(allelePair.getFirst()).eventOffsets(reference,0,Integer.MAX_VALUE), civarByAllele.get(allelePair.getSecond()).eventOffsets(reference,0,Integer.MAX_VALUE)};
}
if (readIt != null && readIt.hasNext()) {
allelePairsIt = allelePairs.iterator();
final Pair<Allele,Allele> allelePair = allelePairsIt.next();
return new Object[] {dataSet, read = readIt.next(), allelePair.getFirst(), allelePair.getSecond(), loglessLks, graphLks, civarEventOffsets = civarEventOffsetsIt.next(), civarByAllele.get(allelePair.getFirst()).eventOffsets(reference,0,Integer.MAX_VALUE), civarByAllele.get(allelePair.getSecond()).eventOffsets(reference,0,Integer.MAX_VALUE) };
}
final Object[] params = activeRegionTestDataSetIterator.next();
dataSet = (ActiveRegionTestDataSet) params[0];
if (INTRODUCE_READ_ERRORS) dataSet.introduceErrors(new Random(13));
graphEngine = new GraphBasedLikelihoodCalculationEngineInstance(dataSet.assemblyResultSet(),hmm,Double.NEGATIVE_INFINITY, HeterogeneousKmerSizeResolution.COMBO_MAX);
graphLks = graphEngine.computeReadLikelihoods(dataSet.haplotypeList(), SampleListUtils.singletonList("anonymous"),Collections.singletonMap("anonymous",dataSet.readList())).toPerReadAlleleLikelihoodMap(0);
// clip reads at the anchors.
final Map<GATKSAMRecord,GATKSAMRecord> clippedReads = anchorClippedReads(graphEngine.getHaplotypeGraph(),dataSet.readList());
final List<GATKSAMRecord> clippedReadList = new ArrayList<>(dataSet.readList().size());
for (final GATKSAMRecord r : dataSet.readList()) {
clippedReadList.add(clippedReads.containsKey(r) ? clippedReads.get(r) : r);
}
loglessLks = fullPairHMM.computeReadLikelihoods(dataSet.assemblyResultSet(),SampleListUtils.singletonList("anonymous"),Collections.singletonMap("anonymous",clippedReadList)).toPerReadAlleleLikelihoodMap(0);
// Change clipped by unclipped in the resulting likelihood map.
for (final GATKSAMRecord r : clippedReads.keySet()) {
loglessLks.getLikelihoodReadMap().put(r,loglessLks.getLikelihoodReadMap().remove(clippedReads.get(r)));
}
final List<Haplotype> haplotypes = dataSet.haplotypeList();
final Map<Haplotype,Allele> alleleByHaplotype = new HashMap<>(haplotypes.size());
final Map<String,Civar> civarBySequence = new HashMap<>(haplotypes.size());
final Map<String,Haplotype> haplotypeBySequence = new HashMap<>(haplotypes.size());
civarByAllele = new HashMap<>(haplotypes.size());
final List<Civar> unrolledCivars = dataSet.unrolledCivars();
for (int i = 0; i < haplotypes.size(); i++) {
final Haplotype h = haplotypes.get(i);
haplotypeBySequence.put(h.getBaseString(),h);
civarBySequence.put(h.getBaseString(),unrolledCivars.get(i));
}
for (final Allele a : loglessLks.getAllelesSet()) {
alleleByHaplotype.put(haplotypeBySequence.get(a.getBaseString()),a);
civarByAllele.put(a,civarBySequence.get(a.getBaseString()));
}
allelePairs = new ArrayList<>(haplotypes.size() * 2);
final Haplotype[] haplotypeArray = haplotypes.toArray(new Haplotype[haplotypes.size()]);
for (int i = 0; i < haplotypeArray.length; i++)
for (int j = i + 1; j < haplotypeArray.length; j++)
allelePairs.add(new Pair<>(alleleByHaplotype.get(haplotypeArray[i]),alleleByHaplotype.get(haplotypeArray[j])));
allelePairsIt = allelePairs.iterator();
readIt = dataSet.readList().iterator();
final Pair<Allele,Allele> allelePair = allelePairsIt.next();
civarEventOffsetsIt = dataSet.readEventOffsetList().iterator();
reference = dataSet.getReference();
return new Object[] { dataSet , read = readIt.next(), allelePair.getFirst(), allelePair.getSecond(), loglessLks, graphLks, civarEventOffsets = civarEventOffsetsIt.next(), civarByAllele.get(allelePair.getFirst()).eventOffsets(reference,0,Integer.MAX_VALUE), civarByAllele.get(allelePair.getSecond()).eventOffsets(reference,0,Integer.MAX_VALUE)};
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
/**
* Returns the reads clipped at their anchors.
*
* @param reads target reads.
* @return never {@code null}.
*/
protected Map<GATKSAMRecord, GATKSAMRecord> anchorClippedReads(final HaplotypeGraph haplotypeGraph, final List<GATKSAMRecord> reads) {
final Map<GATKSAMRecord, GATKSAMRecord> result = new HashMap<>(reads.size());
for (final GATKSAMRecord r : reads) {
final ReadAnchoring anchoring = new ReadAnchoring(r,haplotypeGraph);
if (anchoring.isAnchoredSomewhere())
continue;
final int start = anchoring.leftAnchorIndex;
final int end = anchoring.rightAnchorIndex + haplotypeGraph.getKmerSize();
final GATKSAMRecord clipped = new ClippedGATKSAMRecord(r, start, end);
result.put(r, clipped);
}
return result;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.AssemblyResult;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.LocalAssemblyEngine;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs.*;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import java.io.File;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
public class ReadThreadingAssembler extends LocalAssemblyEngine {
private final static Logger logger = Logger.getLogger(ReadThreadingAssembler.class);
private final static int DEFAULT_NUM_PATHS_PER_GRAPH = 128;
private final static int GGA_MODE_ARTIFICIAL_COUNTS = 1000;
private final static int KMER_SIZE_ITERATION_INCREASE = 10;
private final static int MAX_KMER_ITERATIONS_TO_ATTEMPT = 6;
/** The min and max kmer sizes to try when building the graph. */
private final List<Integer> kmerSizes;
private final boolean dontIncreaseKmerSizesForCycles;
private final boolean allowNonUniqueKmersInRef;
private final int numPruningSamples;
protected boolean removePathsNotConnectedToRef = true;
private boolean justReturnRawGraph = false;
/** for testing only */
public ReadThreadingAssembler() {
this(DEFAULT_NUM_PATHS_PER_GRAPH, Arrays.asList(25));
}
public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List<Integer> kmerSizes, final boolean dontIncreaseKmerSizesForCycles, final boolean allowNonUniqueKmersInRef, final int numPruningSamples) {
super(maxAllowedPathsForReadThreadingAssembler);
this.kmerSizes = kmerSizes;
this.dontIncreaseKmerSizesForCycles = dontIncreaseKmerSizesForCycles;
this.allowNonUniqueKmersInRef = allowNonUniqueKmersInRef;
this.numPruningSamples = numPruningSamples;
}
protected ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List<Integer> kmerSizes) {
this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, true, true, 1);
}
/** for testing purposes */
protected void setJustReturnRawGraph(boolean justReturnRawGraph) {
this.justReturnRawGraph = justReturnRawGraph;
}
private void addResult(final List<AssemblyResult> results, final AssemblyResult maybeNullResult) {
if ( maybeNullResult != null )
results.add(maybeNullResult);
}
@Override
public List<AssemblyResult> assemble(final List<GATKSAMRecord> reads, final Haplotype refHaplotype, final List<Haplotype> givenHaplotypes) {
final List<AssemblyResult> results = new LinkedList<>();
// first, try using the requested kmer sizes
for ( final int kmerSize : kmerSizes ) {
addResult(results, createGraph(reads, refHaplotype, kmerSize, givenHaplotypes, dontIncreaseKmerSizesForCycles, allowNonUniqueKmersInRef));
}
// if none of those worked, iterate over larger sizes if allowed to do so
if ( results.isEmpty() && !dontIncreaseKmerSizesForCycles ) {
int kmerSize = MathUtils.arrayMaxInt(kmerSizes) + KMER_SIZE_ITERATION_INCREASE;
int numIterations = 1;
while ( results.isEmpty() && numIterations <= MAX_KMER_ITERATIONS_TO_ATTEMPT ) {
// on the last attempt we will allow low complexity graphs
final boolean lastAttempt = numIterations == MAX_KMER_ITERATIONS_TO_ATTEMPT;
addResult(results, createGraph(reads, refHaplotype, kmerSize, givenHaplotypes, lastAttempt, allowNonUniqueKmersInRef || lastAttempt));
kmerSize += KMER_SIZE_ITERATION_INCREASE;
numIterations++;
}
}
return results;
}
/**
* Creates the sequence graph for the given kmerSize
*
* @param reads reads to use
* @param refHaplotype reference haplotype
* @param kmerSize kmer size
* @param activeAlleleHaplotypes the GGA haplotypes to inject into the graph
* @param allowLowComplexityGraphs if true, do not check for low-complexity graphs
* @param allowNonUniqueKmersInRef if true, do not fail if the reference has non-unique kmers
* @return sequence graph or null if one could not be created (e.g. because it contains cycles or too many paths or is low complexity)
*/
protected AssemblyResult createGraph(final List<GATKSAMRecord> reads,
final Haplotype refHaplotype,
final int kmerSize,
final List<Haplotype> activeAlleleHaplotypes,
final boolean allowLowComplexityGraphs,
final boolean allowNonUniqueKmersInRef) {
if ( refHaplotype.length() < kmerSize ) {
// happens in cases where the assembled region is just too small
return new AssemblyResult(AssemblyResult.Status.FAILED, null);
}
if ( !allowNonUniqueKmersInRef && !ReadThreadingGraph.determineNonUniqueKmers(new SequenceForKmers("ref", refHaplotype.getBases(), 0, refHaplotype.getBases().length, 1, true), kmerSize).isEmpty() ) {
if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because reference contains non-unique kmers");
return null;
}
final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly, numPruningSamples);
rtgraph.setThreadingStartOnlyAtExistingVertex(!recoverDanglingBranches);
// add the reference sequence to the graph
rtgraph.addSequence("ref", refHaplotype.getBases(), true);
// add the artificial GGA haplotypes to the graph
int hapCount = 0;
for ( final Haplotype h : activeAlleleHaplotypes ) {
rtgraph.addSequence("activeAllele" + hapCount++, h.getBases(), GGA_MODE_ARTIFICIAL_COUNTS, false);
}
// Next pull kmers out of every read and throw them on the graph
for( final GATKSAMRecord read : reads ) {
rtgraph.addRead(read);
}
// actually build the read threading graph
rtgraph.buildGraphIfNecessary();
// sanity check: make sure there are no cycles in the graph
if ( rtgraph.hasCycles() ) {
if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it contains a cycle");
return null;
}
// sanity check: make sure the graph had enough complexity with the given kmer
if ( ! allowLowComplexityGraphs && rtgraph.isLowComplexity() ) {
if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it does not produce a graph with enough complexity");
return null;
}
printDebugGraphTransform(rtgraph, new File("" + refHaplotype.getGenomeLocation() + "-sequenceGraph." + kmerSize + ".0.0.raw_readthreading_graph.dot"));
// go through and prune all of the chains where all edges have <= pruneFactor. This must occur
// before recoverDanglingTails in the graph, so that we don't spend a ton of time recovering
// tails that we'll ultimately just trim away anyway, as the dangling tail edges have weight of 1
rtgraph.pruneLowWeightChains(pruneFactor);
// look at all chains in the graph that terminate in a non-ref node (dangling sources and sinks) and see if
// we can recover them by merging some N bases from the chain back into the reference
if ( recoverDanglingBranches ) {
rtgraph.recoverDanglingTails(pruneFactor, minDanglingBranchLength);
rtgraph.recoverDanglingHeads(pruneFactor, minDanglingBranchLength);
}
// remove all heading and trailing paths
if ( removePathsNotConnectedToRef ) rtgraph.removePathsNotConnectedToRef();
printDebugGraphTransform(rtgraph, new File("" + refHaplotype.getGenomeLocation() + "-sequenceGraph." + kmerSize + ".0.1.cleaned_readthreading_graph.dot"));
final SeqGraph initialSeqGraph = rtgraph.convertToSequenceGraph();
if (debugGraphTransformations) initialSeqGraph.printGraph(new File("" + refHaplotype.getGenomeLocation() + "-sequenceGraph." + kmerSize + ".0.1.initial_seqgraph.dot"),10000);
// if the unit tests don't want us to cleanup the graph, just return the raw sequence graph
if ( justReturnRawGraph ) return new AssemblyResult(AssemblyResult.Status.ASSEMBLED_SOME_VARIATION, initialSeqGraph);
if (debug) logger.info("Using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler");
printDebugGraphTransform(initialSeqGraph, new File( "" + refHaplotype.getGenomeLocation() + "-sequenceGraph." + kmerSize + ".0.2.initial_seqgraph.dot"));
initialSeqGraph.cleanNonRefPaths(); // TODO -- I don't this is possible by construction
final AssemblyResult cleaned = cleanupSeqGraph(initialSeqGraph);
final AssemblyResult.Status status = cleaned.getStatus();
final AssemblyResult result = new AssemblyResult(status, cleaned.getGraph());
result.setThreadingGraph(rtgraph);
return result;
}
@Override
public String toString() {
return "ReadThreadingAssembler{" +
"kmerSizes=" + kmerSizes +
'}';
}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/CalculateGenotypePosteriors.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantutils;
import org.broadinstitute.gatk.utils.commandline.*;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.engine.samples.Sample;
import org.broadinstitute.gatk.engine.samples.Trio;
import org.broadinstitute.gatk.engine.walkers.RodWalker;
import org.broadinstitute.gatk.engine.SampleUtils;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import org.broadinstitute.gatk.engine.GATKVCFUtils;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import org.broadinstitute.gatk.utils.variant.HomoSapiensConstants;
import htsjdk.variant.variantcontext.*;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.vcf.*;
import java.util.*;
/**
* Calculate genotype posterior likelihoods given panel data
*
* <p>
* Given a VCF with genotype likelihoods from the HaplotypeCaller, UnifiedGenotyper, or another source which provides
* <b>unbiased</b> genotype likelihoods, calculate the posterior genotype state and likelihood given allele frequency
* information from both the samples themselves and input VCFs describing allele frequencies in related populations.</p>
*
* <p>The AF field will not be used in this calculation as it does not provide a way to estimate the confidence interval
* or uncertainty around the allele frequency, while AN provides this necessary information. This uncertainty is
* modeled by a Dirichlet distribution: that is, the frequency is known up to a Dirichlet distribution with
* parameters AC1+q,AC2+q,...,(AN-AC1-AC2-...)+q, where "q" is the global frequency prior (typically q << 1). The
* genotype priors applied then follow a Dirichlet-Multinomial distribution, where 2 alleles per sample are drawn
* independently. This assumption of independent draws is the assumption Hardy-Weinberg Equilibrium. Thus, HWE is
* imposed on the likelihoods as a result of CalculateGenotypePosteriors.</p>
*
* <h3>Input</h3>
* <p>
* <ul>
* <li>A VCF with genotype likelihoods, and optionally genotypes, AC/AN fields, or MLEAC/AN fields</li>
* <li>(Optional) A PED pedigree file containing the description of the individuals relationships.</li>
* </ul>
* </p>
*
* <p>
* A collection of VCFs to use for informing allele frequency priors. Each VCF must have one of
* </p>
* <ul>
* <li>AC field and AN field</li>
* <li>MLEAC field and AN field</li>
* <li>genotypes</li>
* </ul>
* </p>
*
* <h3>Output</h3>
* <p>A new VCF with:</p>
* <ul>
* <li>Genotype posteriors added to the genotype fields ("PP")</li>
* <li>Genotypes and GQ assigned according to these posteriors</li>
* <li>Per-site genotype priors added to the INFO field ("PG")</li>
* <li>(Optional) Per-site, per-trio joint likelihoods (JL) and joint posteriors (JL) given as Phred-scaled probability
* of all genotypes in the trio being correct based on the PLs for JL and the PPs for JP. These annotations are added to
* the genotype fields.</li>
* </ul>
*
* <h3>Notes</h3>
* <p>
* Using the default behavior, priors will only be applied for each variants (provided each variant has at least 10
* called samples.) SNP sites in the input callset that have a SNP at the matching site in the supporting VCF will have
* priors applied based on the AC from the supporting samples and the input callset (unless the --ignoreInputSamples
* flag is used). If the site is not called in the supporting VCF, priors will be applied using the discovered AC from
* the input samples (unless the --discoveredACpriorsOff flag is used). Flat priors are applied for any non-SNP sites in
* the input callset.
* </p>
*
* <h3>Usage examples</h3>
* <h4>Inform the genotype assignment of NA12878 using the 1000G Euro panel</h4>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T CalculateGenotypePosteriors \
* -R reference.fasta \
* -V NA12878.wgs.HC.vcf \
* -supporting 1000G_EUR.genotypes.combined.vcf \
* -o NA12878.wgs.HC.posteriors.vcf
* </pre>
*
* <h4>Refine the genotypes of a large panel based on the discovered allele frequency</h4>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T CalculateGenotypePosteriors \
* -R reference.fasta \
* -V input.vcf \
* -o output.withPosteriors.vcf
* </pre>
*
* <h4>Apply frequency and HWE-based priors to the genotypes of a family without including the family allele counts
* in the allele frequency estimates the genotypes of a large panel based on the discovered allele frequency</h4>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T CalculateGenotypePosteriors \
* -R reference.fasta \
* -V input.vcf \
* -o output.withPosteriors.vcf \
* --ignoreInputSamples
* </pre>
*
* <h4>Calculate the posterior genotypes of a callset, and impose that a variant *not seen* in the external panel
* is tantamount to being AC=0, AN=100 within that panel</h4>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T CalculateGenotypePosteriors \
* -R reference.fasta \
* -supporting external.panel.vcf \
* -V input.vcf \
* -o output.withPosteriors.vcf \
* --numRefSamplesIfNoCall 100
* </pre>
*
* <h4>Apply only family priors to a callset</h4>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T CalculateGenotypePosteriors \
* -R reference.fasta \
* -V input.vcf \
* --skipPopulationPriors \
* -ped family.ped \
* -o output.withPosteriors.vcf
* </pre>
*
* <h3>Caveat</h3>
* <p>If applying family priors, only diploid family genotypes are supported</p>
*/
@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} )
public class CalculateGenotypePosteriors extends RodWalker<Integer,Integer> {
/**
* The input VCF (posteriors will be calculated for these samples, and written to the output)
*/
@ArgumentCollection
protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
/**
* Supporting external panels. Allele counts from these panels (taken from AC,AN or MLEAC,AN or raw genotypes) will
* be used to inform the frequency distribution underying the genotype priors. These files must be VCF 4.2 spec or later.
*/
@Input(fullName="supporting", shortName = "supporting", doc="Other callsets to use in generating genotype posteriors", required=false)
public List<RodBinding<VariantContext>> supportVariants = new ArrayList<>();
/**
* The global prior of a variant site -- i.e. the expected allele frequency distribution knowing only that N alleles
* exist, and having observed none of them. This is the "typical" 1/x trend, modeled here as not varying
* across alleles. The calculation for this parameter is (Effective population size) * (steady state mutation rate)
*
*/
@Argument(fullName="globalPrior",shortName="G",doc="The global Dirichlet prior parameters for the allele frequency",required=false)
public double globalPrior = HomoSapiensConstants.SNP_HETEROZYGOSITY;
/**
* The mutation prior -- i.e. the probability that a new mutation occurs. Sensitivity analysis on known de novo
* mutations suggests a default value of 10^-6.
*
*/
@Argument(fullName="deNovoPrior",shortName="DNP",doc="The de novo mutation prior",required=false)
public double deNovoPrior = 1e-6;
/**
* When a variant is not seen in a panel, whether to infer (and with what effective strength) that only reference
* alleles were ascertained at that site. E.g. "If not seen in 1000Genomes, treat it as AC=0, AN=2000". This is
* applied across all external panels, so if numRefIsMissing = 10, and the variant is absent in two panels, this
* confers evidence of AC=0,AN=20
*/
@Argument(fullName="numRefSamplesIfNoCall",shortName="nrs",doc="The number of homozygous reference to infer were " +
"seen at a position where an \"other callset\" contains no site or genotype information",required=false)
public int numRefIfMissing = 0;
/**
* Rather than looking for the MLEAC field first, and then falling back to AC; first look for the AC field and then
* fall back to MLEAC or raw genotypes
*/
@Argument(fullName="defaultToAC",shortName="useAC",doc="Use the AC field as opposed to MLEAC. Does nothing if VCF lacks MLEAC field",required=false)
public boolean defaultToAC = false;
/**
* Do not use the [MLE] allele count from the input samples (the ones for which you're calculating posteriors)
* in the site frequency distribution; only use the AC and AN calculated from external sources.
*/
@Argument(fullName="ignoreInputSamples",shortName="ext",doc="Use external information only; do not inform genotype priors by "+
"the discovered allele frequency in the callset whose posteriors are being calculated. Useful for callsets containing "+
"related individuals.",required=false)
public boolean ignoreInputSamples = false;
/**
* Calculate priors for missing external variants from sample data -- default behavior is to apply flat priors
*/
@Argument(fullName="discoveredACpriorsOff",shortName="useACoff",doc="Do not use discovered allele count in the input callset " +
"for variants that do not appear in the external callset. ", required=false)
public boolean useACoff = false;
/**
* Skip application of population-based priors
*/
@Argument(fullName="skipPopulationPriors",shortName="skipPop",doc="Skip application of population-based priors", required=false)
public boolean skipPopulationPriors = false;
/**
* Skip application of family-based priors. Note: if pedigree file is absent, family-based priors will be skipped.
*/
@Argument(fullName="skipFamilyPriors",shortName="skipFam",doc="Skip application of family-based priors", required=false)
public boolean skipFamilyPriors = false;
@Output(doc="File to which variants should be written")
protected VariantContextWriter vcfWriter = null;
private FamilyLikelihoodsUtils famUtils = new FamilyLikelihoodsUtils();
public void initialize() {
// Get list of samples to include in the output
final List<String> rodNames = Arrays.asList(variantCollection.variants.getName());
final Map<String,VCFHeader> vcfRods = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), rodNames);
final Set<String> vcfSamples = SampleUtils.getSampleList(vcfRods, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE);
//Get the trios from the families passed as ped
if (!skipFamilyPriors){
final Set<Trio> trios = getSampleDB().getTrios();
if(trios.size()<1) {
logger.info("No PED file passed or no *non-skipped* trios found in PED file. Skipping family priors.");
skipFamilyPriors = true;
}
}
if ( vcfRods.size() > 1 )
throw new IllegalStateException("Somehow more than one variant was bound?");
final VCFHeader header = new ArrayList<>(vcfRods.values()).get(0); // pure laziness
if ( ! header.hasGenotypingData() ) {
throw new UserException("VCF has no genotypes");
}
if ( header.hasInfoLine(GATKVCFConstants.MLE_ALLELE_COUNT_KEY) ) {
final VCFInfoHeaderLine mleLine = header.getInfoHeaderLine(GATKVCFConstants.MLE_ALLELE_COUNT_KEY);
if ( mleLine.getCountType() != VCFHeaderLineCount.A ) {
throw new UserException("VCF does not have a properly formatted MLEAC field: the count type should be \"A\"");
}
if ( mleLine.getType() != VCFHeaderLineType.Integer ) {
throw new UserException("VCF does not have a properly formatted MLEAC field: the field type should be \"Integer\"");
}
}
// Initialize VCF header
final Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(vcfRods.values(), true);
headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY));
headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.GENOTYPE_PRIOR_KEY));
if (!skipFamilyPriors) {
headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.JOINT_LIKELIHOOD_TAG_NAME));
headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.JOINT_POSTERIOR_TAG_NAME));
}
headerLines.add(new VCFHeaderLine("source", "CalculateGenotypePosteriors"));
vcfWriter.writeHeader(new VCFHeader(headerLines, vcfSamples));
Map<String,Set<Sample>> families = this.getSampleDB().getFamilies(vcfSamples);
famUtils.initialize(deNovoPrior, vcfSamples, families);
}
public Integer reduceInit() { return 0; }
public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if ( tracker == null || context == null || ref == null ) {
return 0;
}
final Collection<VariantContext> vcs = tracker.getValues(variantCollection.variants, ref.getLocus());
final Collection<VariantContext> otherVCs = tracker.getValues(supportVariants, context.getLocation());
final int missing = supportVariants.size() - otherVCs.size();
for ( final VariantContext vc : vcs ) {
VariantContext vc_familyPriors,vc_bothPriors;
//do family priors first (if applicable)
final VariantContextBuilder builder = new VariantContextBuilder(vc);
//only compute family priors for biallelelic sites
if (!skipFamilyPriors && vc.isBiallelic()){
GenotypesContext gc = famUtils.calculatePosteriorGLs(vc);
builder.genotypes(gc);
}
VariantContextUtils.calculateChromosomeCounts(builder, false);
vc_familyPriors = builder.make();
if (!skipPopulationPriors)
vc_bothPriors = PosteriorLikelihoodsUtils.calculatePosteriorGLs(vc_familyPriors, otherVCs, missing * numRefIfMissing, globalPrior, !ignoreInputSamples, defaultToAC, useACoff);
else {
final VariantContextBuilder builder2 = new VariantContextBuilder(vc_familyPriors);
VariantContextUtils.calculateChromosomeCounts(builder, false);
vc_bothPriors = builder2.make();
}
vcfWriter.add(vc_bothPriors);
}
return 1;
}
public Integer reduce(Integer l, Integer r) { return r + l; }
}
<file_sep>/src/main/java/org/broadinstitute/gatk/utils/pairhmm/DebugJNILoglessPairHMM.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.utils.pairhmm;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import org.broadinstitute.gatk.utils.QualityUtils;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import org.broadinstitute.gatk.nativebindings.pairhmm.PairHMMNativeArguments;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.broadinstitute.gatk.utils.pairhmm.PairHMMModel.*;
/**
* Created with IntelliJ IDEA.
* User: rpoplin, carneiro
* Date: 10/16/12
*/
public class DebugJNILoglessPairHMM extends LoglessPairHMM {
private static final boolean dumpSandboxOnly = false; //simulates ifdef
private static final boolean debug = false; //simulates ifdef
private static final boolean verify = !dumpSandboxOnly && (debug || true); //simulates ifdef
private static final boolean debug0_1 = false; //simulates ifdef
private static final boolean debug1 = false; //simulates ifdef
private static final boolean debug2 = false;
private static final boolean debug3 = false;
//Debugging stats
private int numCalls = 0;
private int numComputeLikelihoodCalls = 0;
protected HashMap<String, BufferedWriter> filenameToWriter = new HashMap<String, BufferedWriter>();
private JNILoglessPairHMM jniPairHMM = null;
public DebugJNILoglessPairHMM(final PairHMM.HMM_IMPLEMENTATION hmmType, PairHMMNativeArguments pairHmmNativeArgs) {
super();
switch(hmmType) {
case VECTOR_LOGLESS_CACHING:
jniPairHMM = new VectorLoglessPairHMM(VectorLoglessPairHMM.Implementation.AVX, pairHmmNativeArgs);
break;
default:
throw new UserException.BadArgumentValue("pairHMM","Specified JNIPairHMM implementation is unrecognized or incompatible with the HaplotypeCaller. Acceptable options are VECTOR_LOGLESS_CACHING");
}
}
@Override
public void close()
{
jniPairHMM.close();
debugClose();
}
//Used only when testing parts of the compute kernel
/**
* {@inheritDoc}
*/
@Override
public void initialize( final int readMaxLength, final int haplotypeMaxLength ) {
if(verify)
super.initialize(readMaxLength, haplotypeMaxLength);
if(debug3)
{
System.out.println("Java: alloc initialized readMaxLength : "+readMaxLength+" haplotypeMaxLength : "+haplotypeMaxLength);
debugDump("lengths_java.txt", String.format("%d %d\n",readMaxLength, haplotypeMaxLength),
true);
}
if(debug2)
jniInitialize(readMaxLength, haplotypeMaxLength);
}
private HashMap<Haplotype,Integer> haplotypeToHaplotypeListIdxMap = null;
//Used to transfer data to JNI
//Since the haplotypes are the same for all calls to computeLikelihoods within a region, transfer the haplotypes only once to the JNI per region
/**
* {@inheritDoc}
*/
@Override
public void initialize( final List<Haplotype> haplotypes, final Map<String, List<GATKSAMRecord>> perSampleReadList,
final int readMaxLength, final int haplotypeMaxLength ) {
if(verify)
{
super.initialize(haplotypes, perSampleReadList, readMaxLength, haplotypeMaxLength);
jniPairHMM.initialize(haplotypes, perSampleReadList, readMaxLength, haplotypeMaxLength);
haplotypeToHaplotypeListIdxMap = jniPairHMM.getHaplotypeToHaplotypeListIdxMap();
}
}
/**
* {@inheritDoc}
*/
@Override
public void finalizeRegion()
{
if(!dumpSandboxOnly)
jniPairHMM.finalizeRegion();
}
/**
* {@inheritDoc}
*/
@Override
public void computeLikelihoods( final ReadLikelihoods.Matrix<Haplotype> likelihoods, final List<GATKSAMRecord> processedReads, final Map<GATKSAMRecord, byte[]> GCPArrayMap ) {
// (re)initialize the pairHMM only if necessary
final int readMaxLength = verify ? findMaxReadLength(processedReads) : 0;
final int haplotypeMaxLength = verify ? findMaxHaplotypeLength(likelihoods.alleles()) : 0;
if(verify)
{
if (!initialized || readMaxLength > maxReadLength || haplotypeMaxLength > maxHaplotypeLength)
{ initialize(readMaxLength, haplotypeMaxLength); }
if ( ! initialized )
throw new IllegalStateException("Must call initialize before calling jniComputeLikelihoods in debug/verify mode");
}
int readListSize = processedReads.size();
int numHaplotypes = likelihoods.alleleCount();
int numTestcases = readListSize*numHaplotypes;
if(debug0_1)
System.out.println("Java numReads "+readListSize+" numHaplotypes "+numHaplotypes);
int idx = 0;
for(final GATKSAMRecord read : processedReads)
{
final byte [] overallGCP = GCPArrayMap.get(read);
if(debug0_1)
System.out.println("Java read length "+read.getReadBases().length);
if(debug3)
{
for(int i=0;i<read.getReadBases().length;++i)
{
debugDump("reads_java.txt",String.format("%d\n",(int)read.getReadBases()[i]),true);
debugDump("reads_java.txt",String.format("%d\n",(int)read.getBaseQualities()[i]),true);
debugDump("reads_java.txt",String.format("%d\n",(int)read.getBaseInsertionQualities()[i]),true);
debugDump("reads_java.txt",String.format("%d\n",(int)read.getBaseDeletionQualities()[i]),true);
debugDump("reads_java.txt",String.format("%d\n",(int)overallGCP[i]),true);
}
}
++idx;
}
if(verify)
{
idx = 0;
for (final Haplotype h : likelihoods.alleles()) //order is important - access in same order always
{
byte[] haplotypeBases = h.getBases();
if(debug0_1)
System.out.println("Java haplotype length "+haplotypeBases.length);
if(debug3)
{
for(int i=0;i<haplotypeBases.length;++i)
debugDump("haplotype_bases_java.txt",String.format("%d\n",(int)haplotypeBases[i]),true);
}
++idx;
}
}
double[] likelihoodArray = null;
PerReadAlleleLikelihoodMap likelihoodMap = null;
if(verify)
{
jniPairHMM.computeLikelihoods(likelihoods, processedReads, GCPArrayMap);
likelihoodArray = jniPairHMM.getLikelihoodArray();
//to compare values
super.computeLikelihoods(likelihoods, processedReads, GCPArrayMap);
}
else
{
likelihoodMap = new PerReadAlleleLikelihoodMap();
likelihoodArray = new double[numTestcases];
for(int i=0;i<numTestcases;++i)
likelihoodArray[i] = -0.5;
}
if(verify || dumpSandboxOnly)
{
boolean toDump = dumpSandboxOnly ? true : false;
if(verify)
{
//re-order values in likelihoodArray
double[] tmpArray = new double[numHaplotypes];
idx = 0;
int idxInsideHaplotypeList = 0;
int readIdx = 0;
for(final GATKSAMRecord read : processedReads)
{
for(int j=0;j<numHaplotypes;++j)
tmpArray[j] = likelihoodArray[readIdx+j];
for (final Haplotype haplotype : likelihoods.alleles())//order is important - access in same order always
{
idxInsideHaplotypeList = haplotypeToHaplotypeListIdxMap.get(haplotype);
likelihoodArray[idx] = tmpArray[idxInsideHaplotypeList];
++idx;
}
readIdx += numHaplotypes;
}
//for floating point values, no exact equality
//check whether numbers are close in terms of abs_error or relative_error
//For very large values, relative_error is relevant
//For very small values, abs_error is relevant
for(int i=0;i<likelihoodArray.length;++i)
{
double abs_error = Math.abs(likelihoodArray[i] - mLikelihoodArray[i]);
double relative_error = 0;
if(mLikelihoodArray[i] == 0)
relative_error = 0;
else
relative_error = Math.abs(abs_error/mLikelihoodArray[i]);
if(abs_error > 1e-5 && relative_error > 1e-5)
{
toDump = true;
break;
}
}
}
//if numbers are not close, then dump out the data that produced the inconsistency
if(toDump)
{
idx = 0;
System.out.println("Dump : Java numReads "+readListSize+" numHaplotypes "+numHaplotypes);
boolean firstLine = true;
for(final GATKSAMRecord read : processedReads)
{
byte [] overallGCP = GCPArrayMap.get(read);
byte[] tmpByteArray = new byte[read.getReadBases().length];
for (final Haplotype haplotype : likelihoods.alleles()) //order is important - access in same order always
{
byte[] haplotypeBases = haplotype.getBases();
debugDump("debug_dump.txt",new String(haplotypeBases)+" ",true);
debugDump("debug_dump.txt",new String(read.getReadBases())+" ",true);
for(int k=0;k<read.getReadBases().length;++k)
tmpByteArray[k] = (byte)((int)((read.getBaseQualities())[k]) + 33);
debugDump("debug_dump.txt",new String(tmpByteArray)+" ",true);
for(int k=0;k<read.getReadBases().length;++k)
tmpByteArray[k] = (byte)((int)((read.getBaseInsertionQualities())[k]) + 33);
debugDump("debug_dump.txt",new String(tmpByteArray)+" ",true);
for(int k=0;k<read.getReadBases().length;++k)
tmpByteArray[k] = (byte)((int)((read.getBaseDeletionQualities())[k]) + 33);
debugDump("debug_dump.txt",new String(tmpByteArray)+" ",true);
for(int k=0;k<read.getReadBases().length;++k)
tmpByteArray[k] = (byte)((int)(overallGCP[k]) + 33);
debugDump("debug_dump.txt",new String(tmpByteArray),true);
if(firstLine)
{
debugDump("debug_dump.txt",String.format(" %d %d\n",readListSize, numHaplotypes), true);
firstLine = false;
}
else
debugDump("debug_dump.txt","\n",true);
if(verify)
debugDump("debug_results.txt",String.format("%e %e\n",mLikelihoodArray[idx],likelihoodArray[idx]),true);
else
if(dumpSandboxOnly)
likelihoods.set(likelihoods.alleleIndex(haplotype),likelihoods.readIndex(read), likelihoodArray[idx]);
++idx;
}
}
}
debugClose();
}
++numComputeLikelihoodCalls;
//if(numComputeLikelihoodCalls == 5)
//jniPairHMM.close();
//System.exit(0);
}
//Used to test parts of the compute kernel separately
private native void jniInitialize( final int readMaxLength, final int haplotypeMaxLength);
private native static void jniInitializeProbabilities( final double[][] transition, final byte[] insertionGOP,
final byte[] deletionGOP, final byte[] overallGCP);
private native double jniInitializePriorsAndUpdateCells( boolean doInitialization, final int paddedReadLength,
final int paddedHaplotypeLength, final byte[] readBases, final byte[] haplotypeBases, final byte[] readQuals,
final int hapStartIndex);
private native double jniSubComputeReadLikelihoodGivenHaplotypeLog10( final int readLength, final int haplotypeLength,
final byte[] readBases, final byte[] haplotypeBases, final byte[] readQuals, final byte[] insertionGOP,
final byte[] deletionGOP, final byte[] overallGCP, final int hapStartIndex);
/**
* {@inheritDoc}
*/
@Override
public double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases, final byte[] readBases,
final byte[] readQuals, final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP,
final int hapStartIndex, final boolean recacheReadValues, final int nextHapStartIndex) {
//System.out.println("#### START STACK TRACE ####");
//for (StackTraceElement ste : Thread.currentThread().getStackTrace()) {
//System.out.println(ste);
//}
//System.out.println("#### END STACK TRACE ####");
//
if(debug1)
jniSubComputeReadLikelihoodGivenHaplotypeLog10(readBases.length, haplotypeBases.length,
readBases, haplotypeBases, readQuals,
insertionGOP, deletionGOP, overallGCP,
hapStartIndex);
boolean doInitialization = (previousHaplotypeBases == null || previousHaplotypeBases.length != haplotypeBases.length);
if (doInitialization) {
final double initialValue = INITIAL_CONDITION / haplotypeBases.length;
// set the initial value (free deletions in the beginning) for the first row in the deletion matrix
for( int j = 0; j < paddedHaplotypeLength; j++ ) {
deletionMatrix[0][j] = initialValue;
}
}
if ( ! constantsAreInitialized || recacheReadValues ) {
initializeProbabilities(transition, insertionGOP, deletionGOP, overallGCP);
if(debug3)
{
System.out.println("Java: initializeProbabilities lengths : "+insertionGOP.length+" padded "+paddedReadLength+" "+paddedHaplotypeLength);
for(int i=0;i<insertionGOP.length;++i)
for(int j=0;j<6;++j)
debugDump("transitions_java.txt",String.format("%e\n",transition[i+1][j]),true);
}
if(debug2)
jniInitializeProbabilities(transition, insertionGOP, deletionGOP, overallGCP);
// note that we initialized the constants
constantsAreInitialized = true;
}
if(debug3)
System.out.println("Java: initializePriors : lengths "+readBases.length+" "+haplotypeBases.length+" padded "+paddedReadLength+" "+paddedHaplotypeLength + " doNotUseTristateCorrection "+doNotUseTristateCorrection);
initializePriors(haplotypeBases, readBases, readQuals, hapStartIndex);
for (int i = 1; i < paddedReadLength; i++) {
// +1 here is because hapStartIndex is 0-based, but our matrices are 1 based
for (int j = hapStartIndex+1; j < paddedHaplotypeLength; j++) {
updateCell(i, j, prior[i][j], transition[i]);
}
}
// final probability is the log10 sum of the last element in the Match and Insertion state arrays
// this way we ignore all paths that ended in deletions! (huge)
// but we have to sum all the paths ending in the M and I matrices, because they're no longer extended.
final int endI = paddedReadLength - 1;
double finalSumProbabilities = 0.0;
for (int j = 1; j < paddedHaplotypeLength; j++) {
finalSumProbabilities += matchMatrix[endI][j] + insertionMatrix[endI][j];
}
if(debug2)
jniInitializePriorsAndUpdateCells(doInitialization, paddedReadLength, paddedHaplotypeLength,
readBases, haplotypeBases, readQuals,
hapStartIndex);
if(debug)
debugDump("return_values_java.txt",String.format("%e\n",Math.log10(finalSumProbabilities) - INITIAL_CONDITION_LOG10),true);
++numCalls;
//if(numCalls == 100)
//{
//debugClose();
//System.exit(0);
//}
return Math.log10(finalSumProbabilities) - INITIAL_CONDITION_LOG10;
}
/**
* Initializes the matrix that holds all the constants related to the editing
* distance between the read and the haplotype.
*
* @param haplotypeBases the bases of the haplotype
* @param readBases the bases of the read
* @param readQuals the base quality scores of the read
* @param startIndex where to start updating the distanceMatrix (in case this read is similar to the previous read)
*/
protected void initializePriors(final byte[] haplotypeBases, final byte[] readBases, final byte[] readQuals, final int startIndex) {
// initialize the pBaseReadLog10 matrix for all combinations of read x haplotype bases
// Abusing the fact that java initializes arrays with 0.0, so no need to fill in rows and columns below 2.
if(debug3)
System.out.println("hapStartIndex "+startIndex);
for (int i = 0; i < readBases.length; i++) {
final byte x = readBases[i];
final byte qual = readQuals[i];
for (int j = startIndex; j < haplotypeBases.length; j++) {
final byte y = haplotypeBases[j];
prior[i+1][j+1] = ( x == y || x == (byte) 'N' || y == (byte) 'N' ?
QualityUtils.qualToProb(qual) : (QualityUtils.qualToErrorProb(qual) / (doNotUseTristateCorrection ? 1.0 : TRISTATE_CORRECTION)) );
if(debug3)
debugDump("priors_java.txt",String.format("%e\n",prior[i+1][j+1]),true);
}
}
}
/**
* Initializes the matrix that holds all the constants related to quality scores.
*
* @param insertionGOP insertion quality scores of the read
* @param deletionGOP deletion quality scores of the read
* @param overallGCP overall gap continuation penalty
*/
@Requires({
"insertionGOP != null",
"deletionGOP != null",
"overallGCP != null"
})
@Ensures("constantsAreInitialized")
protected static void initializeProbabilities(final double[][] transition, final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) {
PairHMMModel.qualToTransProbs(transition,insertionGOP,deletionGOP,overallGCP);
}
/**
* Updates a cell in the HMM matrix
*
* The read and haplotype indices are offset by one because the state arrays have an extra column to hold the
* initial conditions
* @param indI row index in the matrices to update
* @param indJ column index in the matrices to update
* @param prior the likelihood editing distance matrix for the read x haplotype
* @param transition an array with the six transition relevant to this location
*/
protected void updateCell( final int indI, final int indJ, final double prior, final double[] transition) {
matchMatrix[indI][indJ] = prior * ( matchMatrix[indI - 1][indJ - 1] * transition[matchToMatch] +
insertionMatrix[indI - 1][indJ - 1] * transition[indelToMatch] +
deletionMatrix[indI - 1][indJ - 1] * transition[indelToMatch] );
insertionMatrix[indI][indJ] = matchMatrix[indI - 1][indJ] * transition[matchToInsertion] + insertionMatrix[indI - 1][indJ] * transition[insertionToInsertion];
deletionMatrix[indI][indJ] = matchMatrix[indI][indJ - 1] * transition[matchToDeletion] + deletionMatrix[indI][indJ - 1] * transition[deletionToDeletion];
if(debug3)
{
debugDump("matrices_java.txt",String.format("%e\n",matchMatrix[indI][indJ]),true);
debugDump("matrices_java.txt",String.format("%e\n",insertionMatrix[indI][indJ]),true);
debugDump("matrices_java.txt",String.format("%e\n",deletionMatrix[indI][indJ]),true);
}
}
protected void debugDump( String filename, String s, boolean toAppend ) {
try {
File file = new File(filename);
if (!file.exists())
file.createNewFile();
BufferedWriter currWriter = filenameToWriter.get(filename);
if(currWriter == null)
{
FileWriter fw = new FileWriter(file, toAppend);
currWriter = new BufferedWriter(fw);
filenameToWriter.put(filename, currWriter);
}
currWriter.write(s);
}
catch(IOException e)
{
e.printStackTrace();
}
}
protected void debugClose() {
for(Map.Entry<String, BufferedWriter> currEntry : filenameToWriter.entrySet()) {
BufferedWriter currWriter = currEntry.getValue();
try
{
currWriter.flush();
currWriter.close();
}
catch(IOException e)
{
e.printStackTrace();
}
}
filenameToWriter.clear();
}
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleListUtilsUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import htsjdk.variant.variantcontext.Allele;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.genotyper.AlleleList;
import org.broadinstitute.gatk.utils.genotyper.AlleleListPermutation;
import org.broadinstitute.gatk.utils.genotyper.AlleleListUtils;
import org.broadinstitute.gatk.utils.genotyper.IndexedAlleleList;
import org.testng.Assert;
import org.testng.SkipException;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
/**
* Test {@link org.broadinstitute.gatk.utils.genotyper.AlleleListUtils}.
*
* @author <NAME> <<EMAIL>>
*/
public class AlleleListUtilsUnitTest {
@Test(dataProvider = "singleAlleleListData")
public void testAsList(final List<Allele> alleles1) {
final Allele[] uniqueAlleles = new LinkedHashSet<>(alleles1).toArray(new Allele[0]);
final AlleleList<Allele> alleleList = new IndexedAlleleList<>(alleles1);
final List<Allele> asList = AlleleListUtils.asList(alleleList);
final Allele[] asListArray = asList.toArray(new Allele[asList.size()]);
Assert.assertTrue(Arrays.equals(uniqueAlleles,asListArray));
}
@Test(dataProvider = "singleAlleleListData")
public void testIndexOfReference(final List<Allele> alleles1) {
final Allele[] uniqueAlleles = new LinkedHashSet<>(alleles1).toArray(new Allele[0]);
for (int i = 0; i < uniqueAlleles.length; i++) {
final Allele[] actualAlleles = uniqueAlleles.clone();
actualAlleles[i] = Allele.create(actualAlleles[i].getBases(),true);
final AlleleList<Allele> alleleList = new IndexedAlleleList<>(actualAlleles);
Assert.assertEquals(AlleleListUtils.indexOfReference(alleleList),i);
}
final AlleleList<Allele> alleleList = new IndexedAlleleList<>(uniqueAlleles);
Assert.assertEquals(AlleleListUtils.indexOfReference(alleleList),-1);
}
@Test(dataProvider = "twoAlleleListData", dependsOnMethods={"testAsList"})
public void testEquals(final List<Allele> alleles1, final List<Allele> alleles2) {
final AlleleList<Allele> alleleList1 = new IndexedAlleleList<Allele>(alleles1);
final AlleleList<Allele> alleleList2 = new IndexedAlleleList<Allele>(alleles2);
Assert.assertTrue(AlleleListUtils.equals(alleleList1,alleleList1));
Assert.assertTrue(AlleleListUtils.equals(alleleList2,alleleList2));
Assert.assertEquals(AlleleListUtils.equals(alleleList1, alleleList2),
Arrays.equals(AlleleListUtils.asList(alleleList1).toArray(new Allele[alleleList1.alleleCount()]),
AlleleListUtils.asList(alleleList2).toArray(new Allele[alleleList2.alleleCount()]))
);
Assert.assertEquals(AlleleListUtils.equals(alleleList1,alleleList2),
AlleleListUtils.equals(alleleList2,alleleList1));
}
@Test(dataProvider = "singleAlleleListData", dependsOnMethods= "testEquals" )
public void testSelfPermutation(final List<Allele> alleles1) {
final AlleleList<Allele> originalAlleleList = new IndexedAlleleList<>(alleles1);
final AlleleListPermutation<Allele> selfPermutation = AlleleListUtils.permutation(originalAlleleList,originalAlleleList);
Assert.assertEquals(selfPermutation.fromSize(),originalAlleleList.alleleCount());
Assert.assertEquals(selfPermutation.toSize(),originalAlleleList.alleleCount());
Assert.assertTrue(selfPermutation.isNonPermuted());
Assert.assertFalse(selfPermutation.isPartial());
for (int i = 0; i < originalAlleleList.alleleCount(); i++) {
Assert.assertEquals(selfPermutation.fromIndex(i), i);
Assert.assertEquals(selfPermutation.toIndex(i),i);
Assert.assertEquals(selfPermutation.fromList(),selfPermutation.toList());
AlleleListUnitTester.assertAlleleList(originalAlleleList, selfPermutation.fromList());
}
Assert.assertTrue(AlleleListUtils.equals(selfPermutation,originalAlleleList));
}
private final Random rnd = Utils.getRandomGenerator();
@Test(dataProvider = "singleAlleleListData", dependsOnMethods = "testEquals")
public void testSubsetPermutation(final List<Allele> alleles1) {
final List<Allele> subsetAlleles = new ArrayList<>(alleles1.size());
for (final Allele allele : alleles1)
if (rnd.nextBoolean()) subsetAlleles.add(allele);
final AlleleList<Allele> originalAlleleList = new IndexedAlleleList<>(alleles1);
final AlleleList<Allele> targetAlleleList = new IndexedAlleleList<>(subsetAlleles);
final AlleleListPermutation<Allele> subset = AlleleListUtils.permutation(originalAlleleList,targetAlleleList);
if (originalAlleleList.alleleCount() == targetAlleleList.alleleCount())
throw new SkipException("no real subset");
Assert.assertTrue(subset.isPartial());
Assert.assertFalse(subset.isNonPermuted());
Assert.assertEquals(subset.fromSize(),originalAlleleList.alleleCount());
Assert.assertEquals(subset.toSize(),targetAlleleList.alleleCount());
AlleleListUnitTester.assertAlleleList(originalAlleleList,subset.fromList());
AlleleListUnitTester.assertAlleleList(targetAlleleList,subset.toList());
for (int i = 0; i < targetAlleleList.alleleCount(); i++)
Assert.assertEquals(subset.fromIndex(i), originalAlleleList.alleleIndex(targetAlleleList.alleleAt(i)));
for (int j = 0; j < originalAlleleList.alleleCount(); j++) {
final Allele allele = originalAlleleList.alleleAt(j);
Assert.assertEquals(subset.toIndex(j),targetAlleleList.alleleIndex(allele));
}
Assert.assertTrue(AlleleListUtils.equals(subset,targetAlleleList));
}
@Test(dataProvider = "singleAlleleListData", dependsOnMethods = {"testAsList","testEquals"})
public void testShufflePermutation(final List<Allele> alleles1) {
final AlleleList<Allele> originalAlleleList = new IndexedAlleleList<>(alleles1);
if (originalAlleleList.alleleCount() <= 1)
throw new SkipException("non-shuffle allele-list");
final Allele[] targetAlleleArray = AlleleListUtils.asList(originalAlleleList).toArray(new Allele[originalAlleleList.alleleCount()]);
final int[] fromIndex = new int[targetAlleleArray.length];
for (int i = 0; i < fromIndex.length; i++)
fromIndex[i] = i;
for (int i = 0; i < targetAlleleArray.length - 1; i++) {
final int swapIndex = rnd.nextInt(targetAlleleArray.length - i - 1);
final int otherIndex = fromIndex[swapIndex + i + 1];
final Allele other = targetAlleleArray[swapIndex + i + 1];
fromIndex[swapIndex + i + 1] = fromIndex[i];
fromIndex[i] = otherIndex;
targetAlleleArray[swapIndex + i + 1] = targetAlleleArray[i];
targetAlleleArray[i] = other;
}
final AlleleList<Allele> targetAlleleList = new IndexedAlleleList<>(targetAlleleArray);
final AlleleListPermutation<Allele> permutation = AlleleListUtils.permutation(originalAlleleList,targetAlleleList);
Assert.assertFalse(permutation.isNonPermuted());
AlleleListUnitTester.assertAlleleList(originalAlleleList,permutation.fromList());
AlleleListUnitTester.assertAlleleList(targetAlleleList,permutation.toList());
Assert.assertFalse(permutation.isPartial());
Assert.assertEquals(permutation.fromSize(),originalAlleleList.alleleCount());
Assert.assertEquals(permutation.toSize(),targetAlleleList.alleleCount());
for (int i = 0; i < permutation.fromSize(); i++) {
Assert.assertEquals(permutation.toIndex(i),targetAlleleList.alleleIndex(originalAlleleList.alleleAt(i)));
Assert.assertEquals(permutation.fromIndex(i),originalAlleleList.alleleIndex(targetAlleleList.alleleAt(i)));
Assert.assertEquals(permutation.fromIndex(i),fromIndex[i]);
}
Assert.assertTrue(AlleleListUtils.equals(permutation,targetAlleleList));
}
private List<Allele>[] alleleLists;
@BeforeClass
public void setUp() {
alleleLists = new List[ALLELE_COUNT.length * MAX_ALLELE_LENGTH.length];
int nextIndex = 0;
for (int i = 0; i < ALLELE_COUNT.length; i++)
for (int j = 0; j < MAX_ALLELE_LENGTH.length; j++)
alleleLists[nextIndex++] = Arrays.asList(AlleleListUnitTester.generateRandomAlleles(ALLELE_COUNT[i], MAX_ALLELE_LENGTH[j]));
}
private static final int[] ALLELE_COUNT = { 0, 1, 5, 10, 20};
private static final int[] MAX_ALLELE_LENGTH = { 1, 2, 3, 10 };
@DataProvider(name="singleAlleleListData")
public Object[][] singleAlleleListData() {
final Object[][] result = new Object[alleleLists.length][];
for (int i = 0; i < alleleLists.length; i++)
result[i] = new Object[] { alleleLists[i]};
return result;
}
@DataProvider(name="twoAlleleListData")
public Object[][] twoAlleleListData() {
final Object[][] result = new Object[alleleLists.length * alleleLists.length][];
int index = 0;
for (int i = 0; i < alleleLists.length; i++)
for (int j = 0; j < alleleLists.length; j++)
result[index++] = new Object[] { alleleLists[i], alleleLists[j]};
return result;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/GenotypeLikelihoodCalculators.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.exceptions.GATKException;
import java.util.Arrays;
import java.util.stream.IntStream;
/**
* Genotype likelihood calculator utility.
*
* <p>
* This class provide genotype likelihood calculators with any number of alleles able given an arbitrary ploidy and allele
* count (number of distinct alleles).
* </p>
*
* <p>
* This class is thread-safe.
* </p>
*
* @author <NAME> <<EMAIL>>
*/
public class GenotypeLikelihoodCalculators {
/**
* Maximum possible number of genotypes that this calculator can handle.
*/
public static final int MAXIMUM_STRONG_REF_GENOTYPE_PER_PLOIDY = 1000;
/**
* Mark to indicate genotype-count overflow due to a large number of allele and ploidy;
*/
protected static final int GENOTYPE_COUNT_OVERFLOW = -1;
/**
* The current maximum allele index supported by the tables.
* <p>
* Its initial value indicates the initial capacity of the shared {@link #alleleFirstGenotypeOffsetByPloidy} table.
* Feel free to change it to anything reasonable that is non-negative.
* </p>
*/
private static int maximumAllele = 1; // its initial value is the initial capacity of the shared tables.
/**
* The current maximum ploidy supported by the tables.
* <p>
* Its initial value indicates the initial capacity of the shared {@link #genotypeTableByPloidy}. Feel free
* to change it to anything reasonable that is non-negative.
* </p>
*/
private static int maximumPloidy = 2; // its initial value is the initial capacity of the shared tables.
/**
* Shared copy of the offset table as described in {@link #buildGenotypeAlleleCountsTable(int, int, int[][])}.
*
* This reference holds the largest requested so far in terms of maximum-allele and maximum-ploidy.
*/
private volatile static int[][] alleleFirstGenotypeOffsetByPloidy =
buildAlleleFirstGenotypeOffsetTable(maximumPloidy, maximumAllele);
/**
* Shared table of genotypes give the ploidy sorted by their index in the likelihood array.
*
* <p>
* Its format is described in {@link #buildGenotypeAlleleCountsTable(int, int, int[][])}.
* </p>
*/
private volatile static GenotypeAlleleCounts[][] genotypeTableByPloidy =
buildGenotypeAlleleCountsTable(maximumPloidy,maximumAllele,alleleFirstGenotypeOffsetByPloidy);
/**
* Cached log10 values for the first integer up to the maximum ploidy requested thus far.
*/
private volatile static double[] ploidyLog10 = IntStream.range(0, maximumPloidy + 1).mapToDouble(Math::log10).toArray();
/**
* Build the table with the genotype offsets based on ploidy and the maximum allele index with representation
* in the genotype.
* <p>
* The result is a matrix containing the offset of the first genotype that contain a particular allele
* stratified by ploidy.
* <p>
* Row (first dimension) represent the ploidy, whereas
* the second dimension represents the allele.
* </p>
*
* <p>
* Thus the value a position <i>[p][a]</i> indicates how many genotypes of ploidy <i>p</i> there are before the first
* one that contains allele <i>a</i>. <br/>
*
* For example, considering ploidy 3 and alleles A, B, C, D, etc ... (indexed 0, 1, 2, ... respectively):
* <br/>
* [3][A] == [3][0] == 0 as the first genotype AAA contains A.
* <br/>
* [3][C] == [3][2] == 4 as the first genotype that contains C, AAC follows: AAA AAB ABB BBB
* <br/>
* [4][D] == [4][3] == 14 as the first genotype that contains D, AAAD follows: AAAA AAAB AABB ABBB BBBB AAAC
* AABC ABBC BBBC AACC ABCC BBCC ACCC BCCC CCCC.
*
* </p>
*
* <p>
* This value are calculated recursively as follows:
* </p>
* <pre>
*
* Offset[p][a] := Offset[p-1][a] + Offset[p][a-1] when a > 0, p > 0
* 0 when a == 0
* 1 otherwise
*
*
* 0 1 1 1 1 1 1 ...
* 0 1 2 3 4 5 6 ...
* 0 1 3 6 10 15 21 ...
* 0 1 4 10 20 35 56 ...
* 0 1 5 15 35 70 126 ...
* 0 ..................
* </pre>
*
* <p>
* Note: if someone can come with a close form computable 0(1) (respect to ploidy and allele count)
* please let the author know.
* </p>
*
* <p>
* The matrix is guaranteed to have as many rows as indicated by {@code maximumPloidy} + 1; the first
* row refers to the special case of ploidy == 0, the second row to ploidy 1 and so forth. Thus the ploidy
* matches the index.
* </p>
* <p>
* The matrix is guaranteed to have as many columns as indicate by {@code maximumAllele} + 1. In this case however
* the first allele index 0 is a sense allele (typically the reference allele). The reason to have at least the total
* genotype count up to allele count {@link @alleleCapacity} that is equal to the offset of the first genotype
* of the following allele; thus we need an extra one.
* </p>
*
* <p>
* Although it might seem non-sense to have genotypes of ploidy 0. The values in the first row are used when
* filling up values in row 1 and so forth so it is present for programmatic convenience.
* Offsets in this row are 0 for the first column and 1 for any others.
* </p>
*
* @param maximumPloidy maximum supported ploidy.
* @param maximumAllele maximum supported allele index.
*
* @throws IllegalArgumentException if {@code maximumPloidy} or {@code maximumAllele} is negative.
*
* @return never {@code null}, the matrix described with enough information to address
* problems concerning up to the requested maximum allele index and ploidy.
*/
private static int[][] buildAlleleFirstGenotypeOffsetTable(final int maximumPloidy, final int maximumAllele) {
checkPloidyAndMaximumAllele(maximumPloidy, maximumAllele);
final int rowCount = maximumPloidy + 1;
final int colCount = maximumAllele + 1;
final int[][] result = new int[rowCount][colCount];
// Ploidy 0 array must be { 0, 1, 1, ...., 1}
Arrays.fill(result[0],1,colCount,1);
// Now we take care of the rest of ploidies.
// We leave the first allele offset to it correct value 0 by starting with allele := 1.
for (int ploidy = 1; ploidy < rowCount; ploidy++)
for (int allele = 1; allele < colCount; allele++) {
result[ploidy][allele] = result[ploidy][allele - 1] + result[ploidy - 1][allele];
if (result[ploidy][allele] < result[ploidy][allele - 1])
result[ploidy][allele] = GENOTYPE_COUNT_OVERFLOW;
}
return result;
}
/**
* Composes a table with the lists of all possible genotype allele counts given the the ploidy and maximum allele index.
* <p>
* The resulting matrix has at least as many rows as {@code maximumPloidy } + 1 as the first row with index 0 correspond
* to ploidy == 0. Each row array has as many positions as necessary to contain all possible genotype-allele-counts in increasing order.
* This quantity varies with the ploidy.
* </p>
*
* <p>
* Therefore <code>result[3][4]</code> would contain the 5th genotype with ploidy 3, and <code>result[4].length</code>
* would be equal to the count of possible genotypes for ploidy 4.
* </p>
*
* @param maximumPloidy maximum ploidy to use in queries to the resulting table.
* @param maximumAllele maximum allele index to use in queries to the resulting table.
* @param offsetTable an allele first genotype offset table as constructed using {@link #buildAlleleFirstGenotypeOffsetTable(int, int)}
* that supports at least up to {@code maximumAllele} and {@code maximumPloidy}.
*
* @throws IllegalArgumentException if {@code maximumPloidy} or {@code maximumAllele} is negative, or {@code offsetTable} is {@code null},
* or it does not have the capacity to handle the requested maximum ploidy or allele index.
*
* @return never {@code null}.
*/
private static GenotypeAlleleCounts[][] buildGenotypeAlleleCountsTable(final int maximumPloidy, final int maximumAllele, final int[][] offsetTable) {
checkPloidyAndMaximumAllele(maximumPloidy, maximumAllele);
checkOffsetTableCapacity(offsetTable,maximumPloidy,maximumAllele);
final int rowCount = maximumPloidy + 1;
final GenotypeAlleleCounts[][] result = new GenotypeAlleleCounts[rowCount][]; // each row has a different number of columns.
for (int ploidy = 0; ploidy <= maximumPloidy; ploidy++)
result[ploidy] = buildGenotypeAlleleCountsArray(ploidy, maximumAllele, offsetTable);
return result;
}
/**
* Builds a genotype-allele-counts array given the genotype ploidy and how many genotype you need.
* <p>
* The result is guarantee to have exactly {@code length} positions and the elements are sorted
* in agreement with the standard way to display genotypes following the VCF standard.
* </p>
*
* <p> Notice that is possible to request ploidy ==0. In that case the resulting array will have repetitions
* of the empty genotype allele count.
* </p>
*
* <p>
* For example,
*
* <pre>
* ploidy = 1, length = 5 : [ {A}, {B}, {C}, {D}, {E} ]
* ploidy = 2, length = 7 : [ {AA}, {AB}, {BB}, {AC}, {BC}, {CC}, {AD}
* ploidy = 3, length = 10 : [ {AAA}, {AAB}, {ABB}, {BBB}, {AAC}, {ABC}, {BBC}, {BCC}, {CCC}, {AAD} ]
* </pre>
* </p>
*
* @param ploidy requested ploidy.
* @param alleleCount number of different alleles that the genotype table must support.
* @param genotypeOffsetTable table with the offset of the first genotype that contain an allele given
* the ploidy and its index.
*
* @throws IllegalArgumentException if {@code ploidy} or {@code length} is negative.
*
* @return never {@code null}, follows the specification above.
*/
private static GenotypeAlleleCounts[] buildGenotypeAlleleCountsArray(final int ploidy, final int alleleCount, final int[][] genotypeOffsetTable) {
if (ploidy < 0)
throw new IllegalArgumentException("the requested ploidy cannot be negative: " + ploidy);
if (alleleCount < 0)
throw new IllegalArgumentException("the requested maximum allele cannot be negative: " + alleleCount);
final int length = genotypeOffsetTable[ploidy][alleleCount];
final int strongRefLength = length == GENOTYPE_COUNT_OVERFLOW ? MAXIMUM_STRONG_REF_GENOTYPE_PER_PLOIDY : Math.min(length, MAXIMUM_STRONG_REF_GENOTYPE_PER_PLOIDY);
final GenotypeAlleleCounts[] result = new GenotypeAlleleCounts[strongRefLength];
result[0] = GenotypeAlleleCounts.first(ploidy);
for (int genotypeIndex = 1; genotypeIndex < strongRefLength; genotypeIndex++)
result[genotypeIndex] = result[genotypeIndex-1].next();
return result;
}
/**
* Returns an instance given its ploidy and the number of alleles.
*
* @param alleleCount the required allele-count.
* @param ploidy the required ploidy-count.
*
* @throws IllegalArgumentException if either {@code ploidy} or {@code alleleCount} is negative, or the resulting number of genotypes is too large.
*
* @return never {@code null}.
*/
public static GenotypeLikelihoodCalculator getInstance(final int ploidy, final int alleleCount) {
checkPloidyAndMaximumAllele(ploidy, alleleCount);
// Non-thread safe (fast) check on tables capacities,
// if not enough capacity we expand the tables in a thread-safe manner
// also checks if the requested ploidy and allele count result in a genotype count too large to deal with
if(calculateGenotypeCountUsingTables(ploidy, alleleCount) == GENOTYPE_COUNT_OVERFLOW){
final double largeGenotypeCount = MathUtils.binomialCoefficient(ploidy + alleleCount - 1, alleleCount - 1);
throw new IllegalArgumentException(String.format("the number of genotypes is too large for ploidy %d and allele %d: approx. %.0f", ploidy, alleleCount, largeGenotypeCount));
}
// At this point the tables must have at least the requested capacity, likely to be much more.
return new GenotypeLikelihoodCalculator(ploidy, alleleCount, alleleFirstGenotypeOffsetByPloidy, genotypeTableByPloidy, ploidyLog10);
}
/**
* Thread safe update of shared tables
*
* @param requestedMaximumAllele the new requested maximum allele maximum.
* @param requestedMaximumPloidy the new requested ploidy maximum.
*/
private synchronized static void ensureCapacity(final int requestedMaximumAllele, final int requestedMaximumPloidy) {
final boolean needsToExpandAlleleCapacity = requestedMaximumAllele > maximumAllele;
final boolean needsToExpandPloidyCapacity = requestedMaximumPloidy > maximumPloidy;
// Double check with the lock on to avoid double work.
if (!needsToExpandAlleleCapacity && !needsToExpandPloidyCapacity)
return;
final int newMaximumPloidy = Math.max(maximumPloidy,requestedMaximumPloidy);
final int newMaximumAllele = Math.max(maximumAllele,requestedMaximumAllele);
// Update tables first.
alleleFirstGenotypeOffsetByPloidy = buildAlleleFirstGenotypeOffsetTable(newMaximumPloidy,newMaximumAllele);
genotypeTableByPloidy = buildGenotypeAlleleCountsTable(newMaximumPloidy,newMaximumAllele,alleleFirstGenotypeOffsetByPloidy);
if (needsToExpandPloidyCapacity)
ploidyLog10 = ploidyLog10Extension(newMaximumPloidy);
// Since tables are volatile fields, it is guaranteed that tables changes will be seen before
// than any change on ploidyCapacity and alleleCapacity ensuring that the non-thread safe
// capacity verification test in {@link #getInstance} wont ever allow a thread
// to proceed to use a table without the required capacity.
// Just after updating tables update the capacity fields:
if (needsToExpandAlleleCapacity)
maximumAllele = requestedMaximumAllele;
if (needsToExpandPloidyCapacity)
maximumPloidy = requestedMaximumPloidy;
}
/**
* Extends the existing {@link #ploidyLog10} with more log10 as needed by maximum-ploidy expansion.
* @param newMaximumPloidy the new maximum ploidy.
*
* @return never code {@code null}.
*/
private static double[] ploidyLog10Extension(final int newMaximumPloidy) {
final int start = ploidyLog10.length;
final double[] result = Arrays.copyOf(ploidyLog10,newMaximumPloidy + 1);
for (int i = start; i < result.length; i++)
result[i] = Math.log10(i);
return result;
}
/**
* Perform value checks on maximumPloidy and allele passed to diverse methods in this class.
* <p>
* Throws an exception if there is any issues.
* </p>
*
* @param ploidy the maximum ploidy value.
* @param maximumAllele the maximum allele value.
*
* @throws IllegalArgumentException if either value is negative.
*/
private static void checkPloidyAndMaximumAllele(final int ploidy, final int maximumAllele) {
if (ploidy < 0)
throw new IllegalArgumentException("the ploidy provided cannot be negative: " + ploidy);
if (maximumAllele < 0)
throw new IllegalArgumentException("the maximum allele index provided cannot be negative: " + maximumAllele);
}
private static void checkOffsetTableCapacity(final int[][] offsetTable, final int maximumPloidy, final int maximumAllele) {
if (offsetTable == null)
throw new IllegalArgumentException("the allele first genotype offset table provided cannot be null");
if (offsetTable.length <= maximumPloidy )
throw new IllegalArgumentException("the allele first genotype offset table provided does not have enough " +
"capacity for requested maximum ploidy: " + maximumPloidy);
if (offsetTable[0].length < maximumAllele)
throw new IllegalArgumentException("the allele first genotype offset table provided does not have enough " +
"capacity for requested maximum allele index: " + maximumAllele);
}
/**
* Returns the number of possible genotypes given the ploidy and number of different alleles.
* @param ploidy the requested ploidy.
* @param alleleCount the requested number of alleles.
*
* @throws IllegalArgumentException if {@code ploidy} or {@code alleleCount} is negative or
* the number of genotypes is too large (more than {@link Integer#MAX_VALUE}).
*
* @return the number of genotypes given ploidy and allele count (0 or greater).
*/
public final static int genotypeCount(final int ploidy, final int alleleCount) {
final int result = calculateGenotypeCountUsingTables(ploidy, alleleCount);
if (result == GENOTYPE_COUNT_OVERFLOW) {
final double largeGenotypeCount = MathUtils.binomialCoefficient(ploidy + alleleCount - 1, alleleCount - 1);
throw new IllegalArgumentException(String.format("the number of genotypes is too large for ploidy %d and allele %d: approx. %.0f", ploidy, alleleCount, largeGenotypeCount));
}
return result;
}
/**
* Compute the maximally acceptable allele count (ref allele included) given the maximally acceptable genotype count.
* @param ploidy sample ploidy
* @param maxGenotypeCount maximum number of genotype count used to calculate upper bound on number of alleles given ploidy
* @throws IllegalArgumentException if {@code ploidy} or {@code alleleCount} is negative.
* @return the maximally acceptable allele count given ploidy and maximum number of genotypes acceptable
*/
public static int computeMaxAcceptableAlleleCount(final int ploidy, final int maxGenotypeCount){
checkPloidyAndMaximumAllele(ploidy, ploidy); // a hack to check ploidy makes sense (could duplicate code but choice must be made)
final double log10MaxGenotypeCount = Math.log10(maxGenotypeCount);
// Math explanation: genotype count is determined by ${P+A-1 \choose A-1}$, this leads to constraint
// $\log(\frac{(P+A-1)!}{(A-1)!}) \le \log(P!G)$,
// where $P$ is ploidy, $A$ is allele count, and $G$ is maxGenotypeCount
// The upper and lower bounds of the left hand side of the constraint are $P \log(A-1+P)$ and $P \log(A)$
// which require $A$ to be searched in interval $[10^{\log(P!G)/P} - (P-1), 10^{\log(P!G)/P}]$
// Denote $[10^{\log(P!G)/P}$ as $x$ in the code.
final double x = Math.pow(10, (MathUtils.log10Factorial(ploidy) + log10MaxGenotypeCount)/ploidy );
final int lower = (int)Math.floor(x) - ploidy - 1;
final int upper = (int)Math.ceil(x);
for(int a=upper; a>=lower; --a){// check one by one
final double log10GTCnt = MathUtils.log10BinomialCoefficient(ploidy+a-1, a-1);
if(log10MaxGenotypeCount >= log10GTCnt) {
return a;
}
}
throw new GATKException("Code should never reach here.");
}
private static int calculateGenotypeCountUsingTables(int ploidy, int alleleCount) {
checkPloidyAndMaximumAllele(ploidy, alleleCount);
if (ploidy > maximumPloidy || alleleCount > maximumAllele) {
ensureCapacity(alleleCount, ploidy);
}
return alleleFirstGenotypeOffsetByPloidy[ploidy][alleleCount];
}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/AFCalculationResult.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.Utils;
import htsjdk.variant.variantcontext.Allele;
import java.util.*;
/**
* Describes the results of the AFCalc
*
* Only the bare essentials are represented here, as all AFCalc models must return meaningful results for
* all of these fields.
*
* Note that all of the values -- i.e. priors -- are checked now that they are meaningful, which means
* that users of this code can rely on the values coming out of these functions.
*/
public class AFCalculationResult {
private final static int AF0 = 0; //index of the AC=0 entry in the log10 array (of priors, likelihoods, or posteriors listed below)
private final static int AF1p = 1; //index of the AC>0 entry in the log10 array
private final static int LOG_10_ARRAY_SIZES = 2;
private final double[] log10LikelihoodsOfAC;
private final double[] log10PriorsOfAC;
private final double[] log10PosteriorsOfAC;
private final Map<Allele, Double> log10pRefByAllele;
/**
* The AC values for all ALT alleles at the MLE
*/
private final int[] alleleCountsOfMLE;
int nEvaluations = 0;
/**
* The list of alleles actually used in computing the AF
*/
private List<Allele> allelesUsedInGenotyping = null;
/**
* Create a results object capability of storing results for calls with up to maxAltAlleles
*/
public AFCalculationResult(final int[] alleleCountsOfMLE,
final int nEvaluations,
final List<Allele> allelesUsedInGenotyping,
final double[] log10LikelihoodsOfAC,
final double[] log10PriorsOfAC,
final Map<Allele, Double> log10pRefByAllele) {
if ( allelesUsedInGenotyping == null || allelesUsedInGenotyping.size() < 1 ) throw new IllegalArgumentException("allelesUsedInGenotyping must be non-null list of at least 1 value " + allelesUsedInGenotyping);
if ( alleleCountsOfMLE == null ) throw new IllegalArgumentException("alleleCountsOfMLE cannot be null");
if ( alleleCountsOfMLE.length != allelesUsedInGenotyping.size() - 1) throw new IllegalArgumentException("alleleCountsOfMLE.length " + alleleCountsOfMLE.length + " != number of alternate alleles used in genotyping " + (allelesUsedInGenotyping.size() - 1));
if ( nEvaluations < 0 ) throw new IllegalArgumentException("nEvaluations must be >= 0 but saw " + nEvaluations);
if ( log10LikelihoodsOfAC.length != 2 ) throw new IllegalArgumentException("log10LikelihoodsOfAC must have length equal 2");
if ( log10PriorsOfAC.length != 2 ) throw new IllegalArgumentException("log10PriorsOfAC must have length equal 2");
if ( log10pRefByAllele == null ) throw new IllegalArgumentException("log10pRefByAllele cannot be null");
if ( log10pRefByAllele.size() != allelesUsedInGenotyping.size() - 1 ) throw new IllegalArgumentException("log10pRefByAllele has the wrong number of elements: log10pRefByAllele " + log10pRefByAllele + " but allelesUsedInGenotyping " + allelesUsedInGenotyping);
if ( ! allelesUsedInGenotyping.containsAll(log10pRefByAllele.keySet()) ) throw new IllegalArgumentException("log10pRefByAllele doesn't contain all of the alleles used in genotyping: log10pRefByAllele " + log10pRefByAllele + " but allelesUsedInGenotyping " + allelesUsedInGenotyping);
if ( ! MathUtils.goodLog10ProbVector(log10LikelihoodsOfAC, LOG_10_ARRAY_SIZES, false) ) throw new IllegalArgumentException("log10LikelihoodsOfAC are bad " + Utils.join(",", log10LikelihoodsOfAC));
if ( ! MathUtils.goodLog10ProbVector(log10PriorsOfAC, LOG_10_ARRAY_SIZES, false) ) throw new IllegalArgumentException("log10priors are bad " + Utils.join(",", log10PriorsOfAC));
this.alleleCountsOfMLE = alleleCountsOfMLE;
this.nEvaluations = nEvaluations;
this.allelesUsedInGenotyping = allelesUsedInGenotyping;
this.log10LikelihoodsOfAC = Arrays.copyOf(log10LikelihoodsOfAC, LOG_10_ARRAY_SIZES);
this.log10PriorsOfAC = Arrays.copyOf(log10PriorsOfAC, LOG_10_ARRAY_SIZES);
this.log10PosteriorsOfAC = computePosteriors(log10LikelihoodsOfAC, log10PriorsOfAC);
this.log10pRefByAllele = new HashMap<Allele, Double>(log10pRefByAllele);
}
/**
* Return a new AFCalcResult with a new prior probability
*
* @param log10PriorsOfAC
* @return
*/
public AFCalculationResult withNewPriors(final double[] log10PriorsOfAC) {
return new AFCalculationResult(alleleCountsOfMLE, nEvaluations, allelesUsedInGenotyping, log10LikelihoodsOfAC, log10PriorsOfAC, log10pRefByAllele);
}
/**
* Returns a vector with maxAltAlleles values containing AC values at the MLE
*
* The values of the ACs for this call are stored in the getAllelesUsedInGenotyping order,
* starting from index 0 (i.e., the first alt allele is at 0). The vector is always
* maxAltAlleles in length, and so only the first getAllelesUsedInGenotyping.size() - 1 values
* are meaningful.
*
* @return a vector with allele counts, not all of which may be meaningful
*/
@Ensures("result != null")
public int[] getAlleleCountsOfMLE() {
return alleleCountsOfMLE;
}
/**
* Returns the AC of allele a la #getAlleleCountsOfMLE
*
* @param allele the allele whose AC we want to know. Error if its not in allelesUsedInGenotyping
* @throws IllegalStateException if allele isn't in allelesUsedInGenotyping
* @return the AC of allele
*/
public int getAlleleCountAtMLE(final Allele allele) {
return getAlleleCountsOfMLE()[altAlleleIndex(allele)];
}
/**
* Returns the number of cycles used to evaluate the pNonRef for this AF calculation
*
* @return the number of evaluations required to produce the answer for this AF calculation
*/
public int getnEvaluations() {
return nEvaluations;
}
/**
* Get the list of alleles actually used in genotyping.
*
* Due to computational / implementation constraints this may be smaller than
* the actual list of alleles requested
*
* @return a non-empty list of alleles used during genotyping, the first of which is the reference allele
*/
@Ensures({"result != null", "! result.isEmpty()"})
public List<Allele> getAllelesUsedInGenotyping() {
return allelesUsedInGenotyping;
}
/**
* Get the log10 normalized -- across all ACs -- posterior probability of AC == 0 for all alleles
*
* @return
*/
@Ensures({"MathUtils.goodLog10Probability(result)"})
public double getLog10PosteriorOfAFEq0() {
return log10PosteriorsOfAC[AF0];
}
/**
* Get the log10 normalized -- across all ACs -- posterior probability of AC > 0 for any alleles
*
* @return
*/
@Ensures({"MathUtils.goodLog10Probability(result)"})
public double getLog10PosteriorOfAFGT0() {
return log10PosteriorsOfAC[AF1p];
}
/**
* Get the log10 unnormalized -- across all ACs -- likelihood of AC == 0 for all alleles
*
* @return
*/
@Ensures({"MathUtils.goodLog10Probability(result)"})
public double getLog10LikelihoodOfAFEq0() {
return log10LikelihoodsOfAC[AF0];
}
/**
* Get the log10 unnormalized -- across all ACs -- likelihood of AC > 0 for any alleles
*
* @return
*/
@Ensures({"MathUtils.goodLog10Probability(result)"})
public double getLog10LikelihoodOfAFGT0() {
return log10LikelihoodsOfAC[AF1p];
}
/**
* Get the log10 unnormalized -- across all ACs -- prior probability of AC == 0 for all alleles
*
* @return
*/
@Ensures({"MathUtils.goodLog10Probability(result)"})
public double getLog10PriorOfAFEq0() {
return log10PriorsOfAC[AF0];
}
/**
* Get the log10 unnormalized -- across all ACs -- prior probability of AC > 0
*
* @return
*/
@Ensures({"MathUtils.goodLog10Probability(result)"})
public double getLog10PriorOfAFGT0() {
return log10PriorsOfAC[AF1p];
}
@Override
public String toString() {
final List<String> byAllele = new LinkedList<String>();
for ( final Allele a : getAllelesUsedInGenotyping() )
if ( a.isNonReference() ) byAllele.add(String.format("%s => MLE %d / posterior %.2f", a, getAlleleCountAtMLE(a), getLog10PosteriorOfAFEq0ForAllele(a)));
return String.format("AFCalc%n\t\tlog10PosteriorOfAFGT0=%.2f%n\t\t%s", getLog10LikelihoodOfAFGT0(), Utils.join("\n\t\t", byAllele));
}
/**
* Are we sufficiently confidence in being non-ref that the site is considered polymorphic?
*
* We are non-ref if the probability of being non-ref > the emit confidence (often an argument).
* Suppose posterior AF > 0 is log10: -5 => 10^-5
* And that log10minPNonRef is -3.
* We are considered polymorphic since 10^-5 < 10^-3 => -5 < -3
*
* Note that log10minPNonRef is really the minimum confidence, scaled as an error rate, so
* if you want to be 99% confidence, then log10PNonRef should be log10(0.01) = -2.
*
* @param log10minPNonRef the log10 scaled min pr of being non-ref to be considered polymorphic
*
* @return true if there's enough confidence (relative to log10minPNonRef) to reject AF == 0
*/
@Requires("MathUtils.goodLog10Probability(log10minPNonRef)")
public boolean isPolymorphic(final Allele allele, final double log10minPNonRef) {
return getLog10PosteriorOfAFEq0ForAllele(allele) < log10minPNonRef;
}
/**
* Same as #isPolymorphic but takes a phred-scaled quality score as input
*/
public boolean isPolymorphicPhredScaledQual(final Allele allele, final double minPNonRefPhredScaledQual) {
if ( minPNonRefPhredScaledQual < 0 ) throw new IllegalArgumentException("phredScaledQual " + minPNonRefPhredScaledQual + " < 0 ");
final double log10Threshold = minPNonRefPhredScaledQual / -10;
return isPolymorphic(allele, log10Threshold);
}
/**
* Are any of the alleles polymorphic w.r.t. #isPolymorphic?
*
* @param log10minPNonRef the confidence threshold, in log10 space
* @return true if any are poly, false otherwise
*/
public boolean anyPolymorphic(final double log10minPNonRef) {
for ( final Allele a : getAllelesUsedInGenotyping() )
if ( a.isNonReference() && isPolymorphic(a, log10minPNonRef) )
return true;
return false;
}
/**
* Returns the log10 probability that allele is not segregating
*
* Note that this function is p not segregating so that we can store
* internally the log10 value of AF == 0, which grows very quickly
* negative and yet has sufficient resolution for high confidence tests.
* For example, if log10pRef == -100, not an unreasonably high number,
* if we tried to store log10pNonRef we'd be looking at 1 - 10^-100, which
* quickly underflows to 1. So the logic here is backward from what
* you really want (the p of segregating) but we do that for numerical
* reasons
*
* Unlike the sites-level annotation, this calculation is specific to allele, and can be
* used to separately determine how much evidence there is that allele is independently
* segregating as opposed to the site being polymorphic with any allele. In the bi-allelic
* case these are obviously the same but for multiple alt alleles there can be lots of
* evidence for one allele but not so much for any other allele
*
* @param allele the allele we're interested in, must be in getAllelesUsedInGenotyping
* @return the log10 probability that allele is not segregating at this site
*/
@Ensures("MathUtils.goodLog10Probability(result)")
public double getLog10PosteriorOfAFEq0ForAllele(final Allele allele) {
final Double log10pNonRef = log10pRefByAllele.get(allele);
if ( log10pNonRef == null ) throw new IllegalArgumentException("Unknown allele " + allele);
return log10pNonRef;
}
/**
* Returns the log10 normalized posteriors given the log10 likelihoods and priors
*
* @param log10LikelihoodsOfAC
* @param log10PriorsOfAC
*
* @return freshly allocated log10 normalized posteriors vector
*/
@Requires("log10LikelihoodsOfAC.length == log10PriorsOfAC.length")
@Ensures("MathUtils.goodLog10ProbVector(result, LOG_10_ARRAY_SIZES, true)")
private static double[] computePosteriors(final double[] log10LikelihoodsOfAC, final double[] log10PriorsOfAC) {
final double[] log10UnnormalizedPosteriors = new double[log10LikelihoodsOfAC.length];
for ( int i = 0; i < log10LikelihoodsOfAC.length; i++ )
log10UnnormalizedPosteriors[i] = log10LikelihoodsOfAC[i] + log10PriorsOfAC[i];
return MathUtils.normalizeFromLog10(log10UnnormalizedPosteriors, true, false);
}
/**
* Computes the offset into linear vectors indexed by alt allele for allele
*
* Things like our MLE allele count vector are indexed by alt allele index, with
* the first alt allele being 0, the second 1, etc. This function computes the index
* associated with allele.
*
* @param allele the allele whose alt index we'd like to know
* @throws IllegalArgumentException if allele isn't in allelesUsedInGenotyping
* @return an index value greater than 0 suitable for indexing into the MLE and other alt allele indexed arrays
*/
@Requires("allele != null")
@Ensures({"result >= 0", "result < allelesUsedInGenotyping.size() - 1"})
private int altAlleleIndex(final Allele allele) {
if ( allele.isReference() ) throw new IllegalArgumentException("Cannot get the alt allele index for reference allele " + allele);
final int index = allelesUsedInGenotyping.indexOf(allele);
if ( index == -1 )
throw new IllegalArgumentException("could not find allele " + allele + " in " + allelesUsedInGenotyping);
else
return index - 1;
}
}<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReferenceConfidenceModelUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.GenotypeLikelihoods;
import htsjdk.variant.variantcontext.GenotypeType;
import htsjdk.variant.variantcontext.VariantContext;
import org.broadinstitute.gatk.tools.walkers.genotyper.*;
import org.broadinstitute.gatk.utils.*;
import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods;
import org.broadinstitute.gatk.utils.genotyper.SampleList;
import org.broadinstitute.gatk.utils.genotyper.SampleListUtils;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import org.broadinstitute.gatk.utils.variant.HomoSapiensConstants;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
public class ReferenceConfidenceModelUnitTest extends BaseTest {
GenomeLocParser parser;
final String RGID = "ID1";
GATKSAMReadGroupRecord rg;
final String sample = "NA12878";
final SampleList samples = SampleListUtils.singletonList(sample);
SAMFileHeader header;
ReferenceConfidenceModel model;
@BeforeClass
public void setUp() throws Exception {
header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
rg = new GATKSAMReadGroupRecord(RGID);
rg.setSample(sample);
header.addReadGroup(rg);
parser = new GenomeLocParser(header.getSequenceDictionary());
}
@BeforeMethod
public void setupModel() {
model = new ReferenceConfidenceModel(parser, samples, header, 10);
}
@DataProvider(name = "CalcNIndelInformativeReadsData")
public Object[][] makeMyDataProvider() {
List<Object[]> tests = new ArrayList<>();
{ // very basic testing
final String ref = "ACGT";
final String read = "ACGT";
tests.add(new Object[]{read, ref, 1, Arrays.asList(1, 1, 1, 0)});
tests.add(new Object[]{read, ref, 2, Arrays.asList(1, 1, 0, 0)});
tests.add(new Object[]{read, ref, 3, Arrays.asList(1, 0, 0, 0)});
tests.add(new Object[]{read, ref, 4, Arrays.asList(0, 0, 0, 0)});
}
{ // actually interesting case where some sites aren't informative
final String ref = "NNAAAANN";
final String read1 = "NNA";
final String read2 = "NNAA";
final String read3 = "NNAAA";
final String read4 = "NNAAAA";
final String read5 = "NNAAAAN";
tests.add(new Object[]{read1, ref, 1, Arrays.asList(1, 1, 0, 0, 0, 0, 0, 0)});
tests.add(new Object[]{read2, ref, 1, Arrays.asList(1, 1, 0, 0, 0, 0, 0, 0)});
tests.add(new Object[]{read3, ref, 1, Arrays.asList(1, 1, 0, 0, 0, 0, 0, 0)});
tests.add(new Object[]{read4, ref, 1, Arrays.asList(1, 1, 0, 0, 0, 0, 0, 0)});
tests.add(new Object[]{read5, ref, 1, Arrays.asList(1, 1, 1, 1, 1, 1, 0, 0)});
}
{
for ( final String repeatUnit : Arrays.asList("A", "CA", "TAG", "TAGC", "TCAGA")) {
final String anchor = Utils.dupString("N", repeatUnit.length());
for ( int nUnits = 1; nUnits < 10; nUnits++ ) {
final String repeat = Utils.dupString(repeatUnit, nUnits);
final String ref = anchor + repeat + anchor;
for ( int readLen = repeatUnit.length(); readLen < repeat.length(); readLen++ ) {
final String read = anchor + repeat.substring(0, readLen);
final List<Integer> expected = new LinkedList<>();
for ( int i = 0; i < anchor.length(); i++ ) expected.add(1);
for ( int i = 0; i < repeat.length(); i++ ) expected.add(readLen == repeat.length() ? 1 : 0);
for ( int i = 0; i < anchor.length(); i++ ) expected.add(0);
tests.add(new Object[]{read, ref, repeatUnit.length(), expected});
final List<Integer> result = new ArrayList<>(Collections.nCopies(ref.length() - anchor.length(), 1));
result.addAll(Collections.nCopies(anchor.length(), 0));
tests.add(new Object[]{ref, ref, repeatUnit.length(), result});
}
}
}
}
return tests.toArray(new Object[][]{});
}
@Test(dataProvider = "CalcNIndelInformativeReadsData")
public void testCalcNIndelInformativeReads(final String readBases, final String ref, final int maxIndelSize, final List<Integer> expected ) {
final byte qual = (byte)30;
final byte[] quals = Utils.dupBytes(qual, readBases.length());
for ( int i = 0; i < readBases.getBytes().length; i++ ) {
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(readBases.getBytes(), quals, readBases.length() + "M");
final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, i, i);
final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, Collections.singletonList(read), i);
final int actual = model.calcNIndelInformativeReads(pileup, i, ref.getBytes(), maxIndelSize);
Assert.assertEquals(actual, (int)expected.get(i), "failed at position " + i);
}
}
@Test
public void testClose() {
model.close();
}
@Test
public void testWorstGL() {
final GenotypeLikelihoods gq10 = GenotypeLikelihoods.fromPLField("0,10,100");
final GenotypeLikelihoods gq20 = GenotypeLikelihoods.fromPLField("0,20,200");
final GenotypeLikelihoods gq0 = GenotypeLikelihoods.fromPLField("20,0,200");
Assert.assertSame(model.getGLwithWorstGQ(gq10, gq20), gq10);
Assert.assertSame(model.getGLwithWorstGQ(gq20, gq10), gq10);
Assert.assertSame(model.getGLwithWorstGQ(gq10, gq0), gq0);
Assert.assertSame(model.getGLwithWorstGQ(gq0, gq10), gq0);
}
@Test
public void testIndelLikelihoods() {
GenotypeLikelihoods prev = model.getIndelPLs(HomoSapiensConstants.DEFAULT_PLOIDY,0);
Assert.assertEquals(prev.getAsPLs(), new int[]{0, 0, 0});
Assert.assertEquals(-10 * prev.getLog10GQ(GenotypeType.HOM_REF), 0.0);
for ( int i = 1; i <= ReferenceConfidenceModel.MAX_N_INDEL_INFORMATIVE_READS; i++ ) {
final GenotypeLikelihoods current = model.getIndelPLs(HomoSapiensConstants.DEFAULT_PLOIDY,i);
final double prevGQ = -10 * prev.getLog10GQ(GenotypeType.HOM_REF);
final double currGQ = -10 * current.getLog10GQ(GenotypeType.HOM_REF);
Assert.assertTrue(prevGQ < currGQ, "GQ Failed with prev " + prev + " curr " + current + " at " + i);
Assert.assertTrue(prev.getAsPLs()[1] < current.getAsPLs()[1], "het PL failed with prev " + prev + " curr " + current + " at " + i);
Assert.assertTrue(prev.getAsPLs()[2] < current.getAsPLs()[2], "hom-var PL Failed with prev " + prev + " curr " + current + " at " + i);
// logger.warn("result at " + i + " is " + current);
prev = current;
}
}
@Test
public void testOverlappingVariantContext() {
final VariantContext vc10 = GATKVariantContextUtils.makeFromAlleles("test", "chr1", 10, Arrays.asList("A", "C"));
final VariantContext vc13 = GATKVariantContextUtils.makeFromAlleles("test", "chr1", 13, Arrays.asList("A", "C"));
final VariantContext vc12_15 = GATKVariantContextUtils.makeFromAlleles("test", "chr1", 12, Arrays.asList("ACAT", "A"));
final VariantContext vc18 = GATKVariantContextUtils.makeFromAlleles("test", "chr1", 18, Arrays.asList("A", "ACAT"));
final List<VariantContext> calls = Arrays.asList(vc13, vc12_15, vc18, vc10);
checkOverlapping(8, calls, null);
checkOverlapping(9, calls, null);
checkOverlapping(10, calls, vc10);
checkOverlapping(11, calls, null);
checkOverlapping(12, calls, vc12_15);
checkOverlapping(13, calls, vc13);
checkOverlapping(14, calls, vc12_15);
checkOverlapping(15, calls, vc12_15);
checkOverlapping(16, calls, null);
checkOverlapping(17, calls, null);
checkOverlapping(18, calls, vc18);
checkOverlapping(19, calls, null);
checkOverlapping(20, calls, null);
}
private void checkOverlapping(final int pos, Collection<VariantContext> calls, final VariantContext expected) {
final GenomeLoc loc = parser.createGenomeLoc(parser.getContigs().getSequences().get(0).getSequenceName(), pos, pos);
final VariantContext actual = model.getOverlappingVariantContext(loc, calls);
Assert.assertEquals(actual, expected);
}
//
// test reference calculation
//
private class RefConfData {
final String ref;
final int extension;
final Haplotype refHap;
final GenomeLoc refLoc, paddedRefLoc;
final ActiveRegion region;
int readCounter = 0;
private RefConfData(String ref, int extension) {
this.ref = ref;
this.extension = extension;
refLoc = parser.createGenomeLoc("chr1", getStart(), getEnd());
paddedRefLoc = parser.createGenomeLoc("chr1", getStart() - extension, getEnd() + extension);
region = new ActiveRegion(getRefLoc(), parser, extension);
final String pad = Utils.dupString("N", extension);
refHap = ReferenceConfidenceModel.createReferenceHaplotype(getActiveRegion(), (pad + ref + pad).getBytes(), getPaddedRefLoc());
}
public GenomeLoc getRefLoc() { return refLoc; }
public GenomeLoc getPaddedRefLoc() { return paddedRefLoc; }
public ActiveRegion getActiveRegion() { return region; }
public Haplotype getRefHap() { return refHap; }
public int getStart() { return 100; }
public int getEnd() { return getStart() + getRefLength() - 1; }
public byte[] getRefBases() { return ref.getBytes(); }
public int getRefLength() { return ref.length(); }
public GATKSAMRecord makeRead(final int start, final int length) {
final byte[] quals = Utils.dupBytes((byte)30, length);
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read " + readCounter++, 0, start + getStart(), ref.substring(start, start + length).getBytes(), quals, length + "M");
read.setReadGroup(rg);
return read;
}
}
@DataProvider(name = "RefConfidenceData")
public Object[][] makeRefConfidenceData() {
List<Object[]> tests = new ArrayList<>();
for ( int i = 0; i < 10; i++ ) {
for ( final int extension : Arrays.asList(0, 10) ) {
tests.add(new Object[]{i, extension});
}
}
return tests.toArray(new Object[][]{});
}
@Test(dataProvider = "RefConfidenceData")
public void testRefConfidenceBasic(final int nReads, final int extension) {
final RefConfData data = new RefConfData("ACGTAACCGGTT", extension);
final List<Haplotype> haplotypes = Arrays.asList(data.getRefHap());
final List<VariantContext> calls = Collections.emptyList();
for ( int i = 0; i < nReads; i++ ) {
data.getActiveRegion().add(data.makeRead(0, data.getRefLength()));
}
final ReadLikelihoods<Haplotype> likelihoods = HaplotypeCaller.createDummyStratifiedReadMap(data.getRefHap(), samples, data.getActiveRegion());
final PloidyModel ploidyModel = new HomogeneousPloidyModel(samples,2);
final GenotypingModel genotypingModel = new InfiniteRandomMatingPopulationModel();
final List<Integer> expectedDPs = Collections.nCopies(data.getActiveRegion().getLocation().size(), nReads);
final List<VariantContext> contexts = model.calculateRefConfidence(data.getRefHap(), haplotypes, data.getPaddedRefLoc(), data.getActiveRegion(), likelihoods, ploidyModel, genotypingModel, calls);
checkReferenceModelResult(data, contexts, expectedDPs, calls);
}
@Test
public void testRefConfidencePartialReads() {
final PloidyModel ploidyModel = new HomogeneousPloidyModel(samples,2);
final GenotypingModel genotypingModel = new InfiniteRandomMatingPopulationModel();
final String ref = "ACGTAACCGGTT";
for ( int readLen = 3; readLen < ref.length(); readLen++ ) {
for ( int start = 0; start < ref.length() - readLen; start++ ) {
final RefConfData data = new RefConfData(ref, 0);
final List<Haplotype> haplotypes = Arrays.asList(data.getRefHap());
final List<VariantContext> calls = Collections.emptyList();
data.getActiveRegion().add(data.makeRead(start, readLen));
final ReadLikelihoods<Haplotype> likelihoods = HaplotypeCaller.createDummyStratifiedReadMap(data.getRefHap(), samples, data.getActiveRegion());
final List<Integer> expectedDPs = new ArrayList<>(Collections.nCopies(data.getActiveRegion().getLocation().size(), 0));
for ( int i = start; i < readLen + start; i++ ) expectedDPs.set(i, 1);
final List<VariantContext> contexts = model.calculateRefConfidence(data.getRefHap(), haplotypes, data.getPaddedRefLoc(), data.getActiveRegion(), likelihoods, ploidyModel, genotypingModel, calls);
checkReferenceModelResult(data, contexts, expectedDPs, calls);
}
}
}
@Test
public void testRefConfidenceWithCalls() {
final RefConfData xxxdata = new RefConfData("ACGTAACCGGTT", 0);
final int start = xxxdata.getStart();
final int stop = xxxdata.getEnd();
final PloidyModel ploidyModel = new HomogeneousPloidyModel(samples,2);
final GenotypingModel genotypingModel = new InfiniteRandomMatingPopulationModel();
for ( int nReads = 0; nReads < 2; nReads++ ) {
final VariantContext vcStart = GATKVariantContextUtils.makeFromAlleles("test", "chr1", start, Arrays.asList("A", "C"));
final VariantContext vcEnd = GATKVariantContextUtils.makeFromAlleles("test", "chr1", stop, Arrays.asList("A", "C"));
final VariantContext vcMiddle = GATKVariantContextUtils.makeFromAlleles("test", "chr1", start + 2, Arrays.asList("A", "C"));
final VariantContext vcDel = GATKVariantContextUtils.makeFromAlleles("test", "chr1", start + 4, Arrays.asList("AAC", "A"));
final VariantContext vcIns = GATKVariantContextUtils.makeFromAlleles("test", "chr1", start + 8, Arrays.asList("G", "GCG"));
final List<VariantContext> allCalls = Arrays.asList(vcStart, vcEnd, vcMiddle, vcDel, vcIns);
for ( int n = 1; n <= allCalls.size(); n++ ) {
for ( final List<VariantContext> calls : Utils.makePermutations(allCalls, n, false) ) {
// logger.warn("Executing " + n + " " + calls.size());
final RefConfData data = new RefConfData("ACGTAACCGGTT", 0);
final List<Haplotype> haplotypes = Arrays.asList(data.getRefHap());
for ( int i = 0; i < nReads; i++ ) {
data.getActiveRegion().add(data.makeRead(0, data.getRefLength()));
}
final ReadLikelihoods<Haplotype> likelihoods = HaplotypeCaller.createDummyStratifiedReadMap(data.getRefHap(), samples, data.getActiveRegion());
final List<Integer> expectedDPs = Collections.nCopies(data.getActiveRegion().getLocation().size(), nReads);
final List<VariantContext> contexts = model.calculateRefConfidence(data.getRefHap(), haplotypes, data.getPaddedRefLoc(), data.getActiveRegion(), likelihoods, ploidyModel, genotypingModel, calls);
checkReferenceModelResult(data, contexts, expectedDPs, calls);
}
}
}
}
private void checkReferenceModelResult(final RefConfData data, final List<VariantContext> contexts, final List<Integer> expectedDPs, final List<VariantContext> calls) {
Assert.assertNotNull(contexts);
final GenomeLoc loc = data.getActiveRegion().getExtendedLoc();
final List<Boolean> seenBP = new ArrayList<>(Collections.nCopies(data.getActiveRegion().getLocation().size(), false));
for ( int i = 0; i < loc.size(); i++ ) {
final GenomeLoc curPos = parser.createGenomeLoc(loc.getContig(), loc.getStart() + i);
final VariantContext call = model.getOverlappingVariantContext(curPos, calls);
final VariantContext refModel = model.getOverlappingVariantContext(curPos, contexts);
if ( ! data.getActiveRegion().getLocation().containsP(curPos) ) {
// part of the extended interval, but not the full interval
Assert.assertNull(refModel);
continue;
}
if ( call != null ) {
if (call.isVariant() && refModel.getType() == VariantContext.Type.SYMBOLIC ) {
//Assert.assertEquals(refModel, call, "Should have found call " + call + " but found " + refModel + " instead");
Assert.assertTrue(call.getReference().length() > 1); // must be a deletion.
Assert.assertTrue(call.getStart() < refModel.getStart()); // the deletion must not start at the same position
Assert.assertEquals(call.getReference().getBaseString().substring(refModel.getStart() - call.getStart(),
refModel.getStart() - call.getStart() + 1), refModel.getReference().getBaseString(), "" + data.getRefHap()); // the reference must be the same.
Assert.assertTrue(refModel.getGenotype(0).getGQ() <= 0); // No confidence in the reference hom-ref call across the deletion
Assert.assertEquals(refModel.getAlleles().size(),2); // the reference and the lonelly <NON_REF>
Assert.assertEquals(refModel.getAlleles().get(1), GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE);
} else {
Assert.assertEquals(refModel, call, "Should have found call " + call + " but found " + refModel + " instead");
}
} else {
final int expectedDP = expectedDPs.get(curPos.getStart() - data.getActiveRegion().getLocation().getStart());
Assert.assertEquals(refModel.getStart(), loc.getStart() + i);
Assert.assertEquals(refModel.getEnd(), loc.getStart() + i);
Assert.assertFalse(refModel.hasLog10PError());
Assert.assertEquals(refModel.getAlternateAlleles().size(), 1);
Assert.assertEquals(refModel.getAlternateAllele(0), GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE);
Assert.assertTrue(refModel.hasGenotype(sample));
final Genotype g = refModel.getGenotype(sample);
Assert.assertTrue(g.hasAD());
Assert.assertTrue(g.hasDP());
Assert.assertEquals(g.getDP(), expectedDP);
Assert.assertTrue(g.hasGQ());
Assert.assertTrue(g.hasPL());
}
final VariantContext vc = call == null ? refModel : call;
if ( curPos.getStart() == vc.getStart() ) {
for ( int pos = vc.getStart(); pos <= vc.getEnd(); pos++ ) {
final int j = pos - data.getActiveRegion().getLocation().getStart();
Assert.assertFalse(seenBP.get(j));
seenBP.set(j, true);
}
}
}
for ( int i = 0; i < seenBP.size(); i++ ) {
Assert.assertEquals((boolean)seenBP.get(i), true);
}
}
}<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/genotyper/GeneralPloidyGenotypeLikelihoodsUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import htsjdk.samtools.SAMUtils;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.engine.walkers.Walker;
import org.broadinstitute.gatk.utils.BaseUtils;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import htsjdk.variant.variantcontext.*;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.io.File;
import java.io.PrintStream;
import java.util.*;
public class GeneralPloidyGenotypeLikelihoodsUnitTest {
final UnifiedArgumentCollection UAC = new UnifiedArgumentCollection();
final Logger logger = Logger.getLogger(Walker.class);
private static final boolean VERBOSE = false;
private static final boolean SIMULATE_NOISY_PILEUP = false;
private static final int NUM_SIMULATED_OBS = 10;
void PoolGenotypeLikelihoodsUnitTest() {
UAC.minQualityScore = 5;
UAC.maxQualityScore = 40;
UAC.phredScaledPrior = (byte)20;
UAC.minPower = 0.0;
}
@Test
public void testStoringLikelihoodElements() {
// basic test storing a given PL vector in a GeneralPloidyGenotypeLikelihoods object and then retrieving it back
int ploidy = 20;
int numAlleles = 4;
int res = GenotypeLikelihoods.numLikelihoods(numAlleles, ploidy);
// System.out.format("Alt Alleles: %d, Ploidy: %d, #Likelihoods: %d\n", numAltAlleles, ploidy, res);
List<Allele> alleles = new ArrayList<Allele>();
alleles.add(Allele.create("T",true));
alleles.add(Allele.create("C",false));
alleles.add(Allele.create("A",false));
alleles.add(Allele.create("G",false));
double[] gls = new double[res];
for (int k=0; k < gls.length; k++)
gls[k]= (double)k;
GeneralPloidyGenotypeLikelihoods gl = new GeneralPloidySNPGenotypeLikelihoods(alleles, gls,ploidy, null, false,true);
double[] glnew = gl.getLikelihoods();
Assert.assertEquals(gls, glnew);
}
@Test
public void testElementStorageCache() {
// compare cached element storage with compuationally hard-coded iterative computation
for (int ploidy = 2; ploidy < 10; ploidy++) {
for (int nAlleles = 2; nAlleles < 10; nAlleles++)
Assert.assertEquals(GeneralPloidyGenotypeLikelihoods.getNumLikelihoodElements(nAlleles, ploidy),
GenotypeLikelihoods.numLikelihoods(nAlleles, ploidy));
}
}
@Test
public void testVectorToLinearIndex() {
// create iterator, compare linear index given by iterator with closed form function
int numAlleles = 4;
int ploidy = 2;
GeneralPloidyGenotypeLikelihoods.SumIterator iterator = new GeneralPloidyGenotypeLikelihoods.SumIterator(numAlleles, ploidy);
while(iterator.hasNext()) {
System.out.format("\n%d:",iterator.getLinearIndex());
int[] a = iterator.getCurrentVector();
for (int aa: a)
System.out.format("%d ",aa);
int computedIdx = GeneralPloidyGenotypeLikelihoods.getLinearIndex(a, numAlleles, ploidy);
System.out.format("Computed idx = %d\n",computedIdx);
iterator.next();
}
}
@Test
public void testSubsetToAlleles() {
int ploidy = 2;
int numAlleles = 4;
int res = GenotypeLikelihoods.numLikelihoods(numAlleles, ploidy);
// System.out.format("Alt Alleles: %d, Ploidy: %d, #Likelihoods: %d\n", numAltAlleles, ploidy, res);
List<Allele> originalAlleles = new ArrayList<Allele>();
originalAlleles.add(Allele.create("T",true));
originalAlleles.add(Allele.create("C",false));
originalAlleles.add(Allele.create("A",false));
originalAlleles.add(Allele.create("G",false));
double[] oldLikelihoods = new double[res];
for (int k=0; k < oldLikelihoods.length; k++)
oldLikelihoods[k]= (double)k;
List<Allele> allelesToSubset = new ArrayList<Allele>();
allelesToSubset.add(Allele.create("A",false));
allelesToSubset.add(Allele.create("C",false));
double[] newGLs = GeneralPloidyGenotypeLikelihoods.subsetToAlleles(oldLikelihoods, ploidy,
originalAlleles, allelesToSubset);
/*
For P=2, N=4, default iteration order:
0:2 0 0 0
1:1 1 0 0
2:0 2 0 0
3:1 0 1 0
4:0 1 1 0
5:0 0 2 0
6:1 0 0 1
7:0 1 0 1
8:0 0 1 1
9:0 0 0 2
For P=2,N=2, iteration order is:
0:2 0
1:1 1
2:0 2
From first list, if we're extracting alleles 2 and 1, we need all elements that have zero at positions 0 and 3.
These are only elements {2,4,5}. Since test is flipping alleles 2 and 1, order is reversed.
*/
Assert.assertEquals(newGLs,new double[]{5.0,4.0,2.0});
}
@Test
public void testIndexIterator() {
int[] seed = new int[]{1,2,3,4};
GeneralPloidyGenotypeLikelihoods.SumIterator iterator = runIterator(seed,-1);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),prod(seed)-1);
seed = new int[]{1,0,1,1};
iterator = runIterator(seed,-1);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),prod(seed)-1);
seed = new int[]{5};
iterator = runIterator(seed,-1);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),prod(seed)-1);
// Diploid, # alleles = 4
seed = new int[]{2,2,2,2};
iterator = runIterator(seed,2);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),9);
// Diploid, # alleles = 2
seed = new int[]{2,2};
iterator = runIterator(seed,2);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),2);
// Diploid, # alleles = 3
seed = new int[]{2,2,2};
iterator = runIterator(seed,2);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),5);
// Triploid, # alleles = 2
seed = new int[]{3,3};
iterator = runIterator(seed,3);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),3);
// Triploid, # alleles = 3
seed = new int[]{3,3,3};
iterator = runIterator(seed,3);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),9);
// Triploid, # alleles = 4
seed = new int[]{3,3,3,3};
iterator = runIterator(seed,3);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),19);
// 8-ploid, # alleles = 6
seed = new int[]{8,8,8,8,8,8};
iterator = runIterator(seed,8);
// Assert.assertTrue(compareIntArrays(iterator.getCurrentVector(), seed));
Assert.assertEquals(iterator.getLinearIndex(),1286);
}
private GeneralPloidyGenotypeLikelihoods.SumIterator runIterator(int[] seed, int restrictSumTo) {
GeneralPloidyGenotypeLikelihoods.SumIterator iterator = new GeneralPloidyGenotypeLikelihoods.SumIterator(seed, restrictSumTo);
while(iterator.hasNext()) {
int[] a = iterator.getCurrentVector();
int idx = GeneralPloidyGenotypeLikelihoods.getLinearIndex(a, a.length, restrictSumTo);
if (VERBOSE) {
System.out.format("%d:",iterator.getLinearIndex());
for (int i=0; i < seed.length; i++)
System.out.format("%d ",a[i]);
System.out.format(" LI:%d\n", idx);
}
iterator.next();
}
return iterator;
}
private static int prod(int[] x) {
int prod = 1;
for (int xx : x) {
prod *= (1+xx);
}
return prod;
}
@Test
public void testErrorModel() {
final ArtificialReadPileupTestProvider refPileupTestProvider = new ArtificialReadPileupTestProvider(1,"ref");
final byte refByte = refPileupTestProvider.getRefByte();
final byte altByte = refByte == (byte)'T'? (byte) 'C': (byte)'T';
final String refSampleName = refPileupTestProvider.getSampleNames().get(0);
final List<Allele> trueAlleles = new ArrayList<Allele>();
trueAlleles.add(Allele.create(refByte, true));
final VariantContext refVC = new VariantContextBuilder("test","chr1",5, 5,
trueAlleles).genotypes(GenotypeBuilder.create(refSampleName, trueAlleles)).make();
final int[] matchArray = {95, 995, 9995, 10000};
final int[] mismatchArray = {1,5,10,20};
if (VERBOSE) System.out.println("Running SNP error model test");
for (int matches: matchArray) {
for (int mismatches: mismatchArray) {
// get artificial alignment context for ref sample - no noise
Map<String,AlignmentContext> refContext = refPileupTestProvider.getAlignmentContextFromAlleles(0, new String(new byte[]{altByte}), new int[]{matches, mismatches}, false, 30);
final ReadBackedPileup refPileup = refContext.get(refSampleName).getBasePileup();
final ErrorModel emodel = new ErrorModel(UAC, refPileup, refVC, refPileupTestProvider.getReferenceContext());
final double[] errorVec = emodel.getErrorModelVector().getProbabilityVector();
final double mlEst = -10.0*Math.log10((double)mismatches/(double)(matches+mismatches));
final int peakIdx = (int)Math.round(mlEst);
if (VERBOSE) System.out.format("Matches:%d Mismatches:%d maxV:%d peakIdx:%d\n",matches, mismatches, MathUtils.maxElementIndex(errorVec),peakIdx);
Assert.assertEquals(MathUtils.maxElementIndex(errorVec),peakIdx);
}
}
}
@Test
public void testIndelErrorModel() {
final ArtificialReadPileupTestProvider refPileupTestProvider = new ArtificialReadPileupTestProvider(1,"ref");
final byte refByte = refPileupTestProvider.getRefByte();
final String altBases = "TCA";
final String refSampleName = refPileupTestProvider.getSampleNames().get(0);
final List<Allele> trueAlleles = new ArrayList<Allele>();
trueAlleles.add(Allele.create(refByte, true));
trueAlleles.add(Allele.create((char)refByte + "TC", false));
final String fw = new String(refPileupTestProvider.getReferenceContext().getForwardBases());
final VariantContext refInsertionVC = new VariantContextBuilder("test","chr1",refPileupTestProvider.getReferenceContext().getLocus().getStart(),
refPileupTestProvider.getReferenceContext().getLocus().getStart(), trueAlleles).
genotypes(GenotypeBuilder.create(refSampleName, trueAlleles)).make();
final int[] matchArray = {95, 995, 9995, 10000};
final int[] mismatchArray = {1,5,10,20};
if (VERBOSE) System.out.println("Running indel error model test");
for (int matches: matchArray) {
for (int mismatches: mismatchArray) {
// get artificial alignment context for ref sample - no noise
// CASE 1: Test HET insertion
// Ref sample has TC insertion but pileup will have TCA inserted instead to test mismatches
Map<String,AlignmentContext> refContext = refPileupTestProvider.getAlignmentContextFromAlleles(1+altBases.length(), altBases, new int[]{matches, mismatches}, false, 30);
final ReadBackedPileup refPileup = refContext.get(refSampleName).getBasePileup();
final ErrorModel emodel = new ErrorModel(UAC, refPileup, refInsertionVC, refPileupTestProvider.getReferenceContext());
final double[] errorVec = emodel.getErrorModelVector().getProbabilityVector();
final double mlEst = -10.0*Math.log10((double)mismatches/(double)(matches+mismatches));
final int peakIdx = (int)Math.round(mlEst);
if (VERBOSE) System.out.format("Matches:%d Mismatches:%d peakIdx:%d\n",matches, mismatches, peakIdx);
Assert.assertEquals(MathUtils.maxElementIndex(errorVec),peakIdx);
// CASE 2: Test HET deletion
}
}
// create deletion VC
final int delLength = 4;
final List<Allele> delAlleles = new ArrayList<Allele>();
delAlleles.add(Allele.create(fw.substring(0,delLength+1), true));
delAlleles.add(Allele.create(refByte, false));
final VariantContext refDeletionVC = new VariantContextBuilder("test","chr1",refPileupTestProvider.getReferenceContext().getLocus().getStart(),
refPileupTestProvider.getReferenceContext().getLocus().getStart()+delLength, delAlleles).
genotypes(GenotypeBuilder.create(refSampleName, delAlleles)).make();
for (int matches: matchArray) {
for (int mismatches: mismatchArray) {
// get artificial alignment context for ref sample - no noise
// CASE 1: Test HET deletion
// Ref sample has 4bp deletion but pileup will have 3 bp deletion instead to test mismatches
Map<String,AlignmentContext> refContext = refPileupTestProvider.getAlignmentContextFromAlleles(-delLength+1, altBases, new int[]{matches, mismatches}, false, 30);
final ReadBackedPileup refPileup = refContext.get(refSampleName).getBasePileup();
final ErrorModel emodel = new ErrorModel(UAC, refPileup, refDeletionVC, refPileupTestProvider.getReferenceContext());
final double[] errorVec = emodel.getErrorModelVector().getProbabilityVector();
final double mlEst = -10.0*Math.log10((double)mismatches/(double)(matches+mismatches));
final int peakIdx = (int)Math.round(mlEst);
if (VERBOSE) System.out.format("Matches:%d Mismatches:%d peakIdx:%d\n",matches, mismatches, peakIdx);
Assert.assertEquals(MathUtils.maxElementIndex(errorVec),peakIdx);
// CASE 2: Test HET deletion
}
}
}
@Test
public void testAddPileupToPoolGL() {
// dummy error model - Q=infinity FAPP so that there's no source of uncertainty
final double[] emv = new double[SAMUtils.MAX_PHRED_SCORE+1];
// error rate for noisy tests
final int PHRED_SITE_ERROR_RATE = 20;
Arrays.fill(emv, Double.NEGATIVE_INFINITY);
emv[SAMUtils.MAX_PHRED_SCORE] = 0;
final int numSamples = 1;
// have a high quality site say Q40 site, and create artificial pileups for one single sample, at coverage N, with given
// true pool AC = x.
final ArtificialReadPileupTestProvider readPileupTestProvider = new ArtificialReadPileupTestProvider(numSamples,"sample", (byte)SAMUtils.MAX_PHRED_SCORE);
final ErrorModel noiselessErrorModel = new ErrorModel(emv);
final double[] emverr = new double[SAMUtils.MAX_PHRED_SCORE+1];
Arrays.fill(emverr, Double.NEGATIVE_INFINITY);
emverr[PHRED_SITE_ERROR_RATE] = 0;
final ErrorModel Q30ErrorModel = new ErrorModel(emverr);
final int eventLength = 0; // test snp only
final byte refByte = readPileupTestProvider.getRefByte();
final byte altByte = refByte == (byte)'T'? (byte) 'C': (byte)'T';
final List<Allele> allAlleles = new ArrayList<Allele>(); // this contains only ref Allele up to now
final Set<String> laneIDs = new TreeSet<String>();
laneIDs.add(GenotypeLikelihoodsCalculationModel.DUMMY_LANE);
final HashMap<String, ErrorModel> noiselessErrorModels = new HashMap<String, ErrorModel>();
// build per-lane error model for all lanes present in ref sample
for (String laneID : laneIDs)
noiselessErrorModels.put(laneID, noiselessErrorModel);
final HashMap<String, ErrorModel> noisyErrorModels = new HashMap<String, ErrorModel>();
// build per-lane error model for all lanes present in ref sample
for (String laneID : laneIDs)
noisyErrorModels.put(laneID, Q30ErrorModel);
// all first ref allele
allAlleles.add(Allele.create(refByte,true));
for (byte b: BaseUtils.BASES) {
if (refByte != b)
allAlleles.add(Allele.create(b, false));
}
final int refIdx = 0;
int altIdx = -1;
for (int k=0; k < allAlleles.size(); k++)
if (altByte == allAlleles.get(k).getBases()[0]) {
altIdx = k;
break;
}
PrintStream out = null;
if (SIMULATE_NOISY_PILEUP) {
try {
out = new PrintStream(new File("GLUnitTest.table"));
// out = new PrintStream(new File("/Users/delangel/GATK/Sting_unstable/GLUnitTest.table"));
}
catch (Exception e) {}
// write header
out.format("Depth\tPoolPloidy\tACTrue\tACEst\tREF\tALTTrue\tALTEst\n");
}
final int[] depthVector = {1000,10000};
//final double[] alleleFrequencyVector = {0.01,0.1,0.5,1.0};
final int[] spVector = {10,100};
//final int[] spVector = {1};
for (int depth : depthVector) {
for (int nSamplesPerPool : spVector) {
final int ploidy = 2*nSamplesPerPool;
for (int ac =2; ac <=ploidy; ac++) {
// simulate pileup with given AC and depth
int altDepth = (int)Math.round( (double)ac/(double)ploidy * (double)depth);
final int[] numReadsPerAllele = {depth-altDepth,altDepth};
final Map<String,AlignmentContext> alignmentContextMap =
readPileupTestProvider.getAlignmentContextFromAlleles(eventLength, new String(new byte[]{altByte}), numReadsPerAllele);
// get now likelihoods for this
final GeneralPloidySNPGenotypeLikelihoods GL = new GeneralPloidySNPGenotypeLikelihoods(allAlleles, null, nSamplesPerPool*2, noiselessErrorModels, false, true);
final int nGoodBases = GL.add(alignmentContextMap.get("sample0000").getBasePileup(), true, false, UAC.MIN_BASE_QUALTY_SCORE);
if (VERBOSE) {
System.out.format("Depth:%d, AC:%d, altDepth:%d, samplesPerPool:%d\nGLs:", depth,ac,altDepth, nSamplesPerPool);
System.out.println(GL.toString());
}
Assert.assertEquals(nGoodBases, depth);
Pair<int[],Double> mlPair = GL.getMostLikelyACCount();
// Most likely element has to be conformation REF = nSamples-AC,ALT = AC
if (ac == 0) {
Assert.assertEquals(mlPair.first[refIdx],ploidy);
} else {
Assert.assertEquals(mlPair.first[altIdx],ac);
Assert.assertEquals(mlPair.first[refIdx],ploidy-ac);
}
// simulate now pileup with base error rate
if (SIMULATE_NOISY_PILEUP) {
System.out.format("Depth:%d, AC:%d, altDepth:%d, samplesPerPool:%d\n", depth,ac,altDepth, nSamplesPerPool);
for (int k=0; k < NUM_SIMULATED_OBS; k++) {
final Map<String,AlignmentContext> noisyAlignmentContextMap =
readPileupTestProvider.getAlignmentContextFromAlleles(eventLength, new String(new byte[]{altByte}), numReadsPerAllele,
true, PHRED_SITE_ERROR_RATE);
// get now likelihoods for this
final GeneralPloidySNPGenotypeLikelihoods noisyGL = new GeneralPloidySNPGenotypeLikelihoods(allAlleles, null, nSamplesPerPool*2, noisyErrorModels, false,true);
noisyGL.add(noisyAlignmentContextMap.get("sample0000").getBasePileup(), true, false, UAC.MIN_BASE_QUALTY_SCORE);
mlPair = noisyGL.getMostLikelyACCount();
// Most likely element has to be conformation REF = nSamples-AC,ALT = AC
int acEst;
if (ac == 0) {
acEst = mlPair.first[refIdx];
} else {
acEst = mlPair.first[altIdx];
}
byte altEst = BaseUtils.baseIndexToSimpleBase(MathUtils.maxElementIndex(mlPair.first));
out.format("%d\t%d\t%d\t%d\t%c\t%c\t%c\n",depth, ploidy, ac, acEst, refByte, altByte, altEst);
}
}
}
}
}
if (SIMULATE_NOISY_PILEUP)
out.close();
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/UnifiedArgumentCollection.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import org.broadinstitute.gatk.utils.commandline.*;
import org.broadinstitute.gatk.tools.walkers.genotyper.StandardCallerArgumentCollection;
import org.broadinstitute.gatk.utils.pairhmm.PairHMM;
import htsjdk.variant.variantcontext.VariantContext;
public class UnifiedArgumentCollection extends StandardCallerArgumentCollection {
@Argument(fullName = "genotype_likelihoods_model", shortName = "glm", doc = "Genotype likelihoods calculation model to employ -- SNP is the default option, while INDEL is also available for calling indels and BOTH is available for calling both together", required = false)
public GenotypeLikelihoodsCalculationModel.Model GLmodel = GenotypeLikelihoodsCalculationModel.Model.SNP;
/**
* The PCR error rate is independent of the sequencing error rate, which is necessary because we cannot necessarily
* distinguish between PCR errors vs. sequencing errors. The practical implication for this value is that it
* effectively acts as a cap on the base qualities.
*/
@Argument(fullName = "pcr_error_rate", shortName = "pcr_error", doc = "The PCR error rate to be used for computing fragment-based likelihoods", required = false)
public Double PCR_error = DiploidSNPGenotypeLikelihoods.DEFAULT_PCR_ERROR_RATE;
/**
* Note that calculating the SLOD increases the runtime by an appreciable amount.
*/
@Argument(fullName = "computeSLOD", shortName = "slod", doc = "If provided, we will calculate the SLOD (SB annotation)", required = false)
public boolean COMPUTE_SLOD = false;
/**
* The PairHMM implementation to use for -glm INDEL genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime.
*/
@Argument(fullName = "pair_hmm_implementation", shortName = "pairHMM", doc = "The PairHMM implementation to use for -glm INDEL genotype likelihood calculations", required = false)
public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING;
/**
* The minimum confidence needed in a given base for it to be used in variant calling. Note that the base quality of a base
* is capped by the mapping quality so that bases on reads with low mapping quality may get filtered out depending on this value.
* Note too that this argument is ignored in indel calling. In indel calling, low-quality ends of reads are clipped off (with fixed threshold of Q20).
*/
@Argument(fullName = "min_base_quality_score", shortName = "mbq", doc = "Minimum base quality required to consider a base for calling", required = false)
public int MIN_BASE_QUALTY_SCORE = 17;
/**
* If the fraction of reads with deletions spanning a locus is greater than this value, the site will not be considered callable and will be skipped.
* To disable the use of this parameter, set its value to >1.
*/
@Argument(fullName = "max_deletion_fraction", shortName = "deletions", doc = "Maximum fraction of reads with deletions spanning this locus for it to be callable", required = false)
public Double MAX_DELETION_FRACTION = 0.05;
// indel-related arguments
/**
* A candidate indel is genotyped (and potentially called) if there are this number of reads with a consensus indel at a site.
* Decreasing this value will increase sensitivity but at the cost of larger calling time and a larger number of false positives.
*/
@Argument(fullName = "min_indel_count_for_genotyping", shortName = "minIndelCnt", doc = "Minimum number of consensus indels required to trigger genotyping run", required = false)
public int MIN_INDEL_COUNT_FOR_GENOTYPING = 5;
/**
* Complementary argument to minIndelCnt. Only samples with at least this fraction of indel-containing reads will contribute
* to counting and overcoming the threshold minIndelCnt. This parameter ensures that in deep data you don't end
* up summing lots of super rare errors up to overcome the 5 read default threshold. Should work equally well for
* low-coverage and high-coverage samples, as low coverage samples with any indel containing reads should easily over
* come this threshold.
*/
@Argument(fullName = "min_indel_fraction_per_sample", shortName = "minIndelFrac", doc = "Minimum fraction of all reads at a locus that must contain an indel (of any allele) for that sample to contribute to the indel count for alleles", required = false)
public double MIN_INDEL_FRACTION_PER_SAMPLE = 0.25;
@Advanced
@Argument(fullName = "indelGapContinuationPenalty", shortName = "indelGCP", doc = "Indel gap continuation penalty, as Phred-scaled probability. I.e., 30 => 10^-30/10", required = false)
public byte INDEL_GAP_CONTINUATION_PENALTY = 10;
@Advanced
@Argument(fullName = "indelGapOpenPenalty", shortName = "indelGOP", doc = "Indel gap open penalty, as Phred-scaled probability. I.e., 30 => 10^-30/10", required = false)
public byte INDEL_GAP_OPEN_PENALTY = 45;
@Hidden
@Argument(fullName = "indelHaplotypeSize", shortName = "indelHSize", doc = "Indel haplotype size", required = false)
public int INDEL_HAPLOTYPE_SIZE = 80;
@Hidden
@Argument(fullName = "indelDebug", shortName = "indelDebug", doc = "Output indel debug info", required = false)
public boolean OUTPUT_DEBUG_INDEL_INFO = false;
@Hidden
@Argument(fullName = "ignoreSNPAlleles", shortName = "ignoreSNPAlleles", doc = "expt", required = false)
public boolean IGNORE_SNP_ALLELES = false;
/*
Generalized ploidy argument (debug only): squash all reads into a single pileup without considering sample info
*/
@Hidden
@Argument(fullName = "allReadsSP", shortName = "dl", doc = "expt", required = false)
public boolean TREAT_ALL_READS_AS_SINGLE_POOL = false;
/*
Generalized ploidy argument (debug only): When building site error models, ignore lane information and build only
sample-level error model
*/
@Hidden
@Argument(fullName = "ignoreLaneInfo", shortName = "ignoreLane", doc = "Ignore lane when building error model, error model is then per-site", required = false)
public boolean IGNORE_LANE_INFO = false;
/*
Generalized ploidy argument: VCF file that contains truth calls for reference sample. If a reference sample is included through argument -refsample,
then this argument is required.
*/
@Hidden
@Input(fullName="reference_sample_calls", shortName = "referenceCalls", doc="VCF file with the truth callset for the reference sample", required=false)
RodBinding<VariantContext> referenceSampleRod;
/*
Reference sample name: if included, a site-specific error model will be built in order to improve calling quality. This requires ideally
that a bar-coded reference sample be included with the polyploid/pooled data in a sequencing experimental design.
If argument is absent, no per-site error model is included and calling is done with a generalization of traditional statistical calling.
*/
@Hidden
@Argument(shortName="refsample", fullName="reference_sample_name", doc="Reference sample name.", required=false)
String referenceSampleName;
/**
* The following argument are for debug-only tweaks when running generalized ploidy with a reference sample
*/
@Hidden
@Argument(shortName="minqs", fullName="min_quality_score", doc="Min quality score to consider. Smaller numbers process faster. Default: Q1.", required=false)
byte minQualityScore= 1;
@Hidden
@Argument(shortName="maxqs", fullName="max_quality_score", doc="Max quality score to consider. Smaller numbers process faster. Default: Q40.", required=false)
byte maxQualityScore= 40;
@Hidden
@Argument(shortName="site_prior", fullName="site_quality_prior", doc="Phred-Scaled prior quality of the site. Default: Q20.", required=false)
byte phredScaledPrior = 20;
@Hidden
@Argument(shortName = "min_call_power", fullName = "min_power_threshold_for_calling", doc="The minimum confidence in the error model to make a call. Number should be between 0 (no power requirement) and 1 (maximum power required).", required = false)
double minPower = 0.95;
/**
* Create a new UAC with defaults for all UAC arguments
*/
public UnifiedArgumentCollection() {
super();
}
@Override
public UnifiedArgumentCollection clone() {
return (UnifiedArgumentCollection) super.clone();
}
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.bqsr;
import org.broadinstitute.gatk.engine.walkers.WalkerTest;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import static org.testng.Assert.assertTrue;
/**
* Tests Analyze Covariates.
* <p/>
* Notice that since PDF report generated by R are different every-time this program
* is executed their content won't be tested. It only will verify that file has a healthy size.
*
*/
public class AnalyzeCovariatesIntegrationTest extends WalkerTest {
private static final String TOOL_NAME = AnalyzeCovariates.class.getSimpleName();
/**
* Directory where the testdata is located.
*/
private static final File TEST_DATA_DIR = new File(privateTestDir,"AnalyzeCovariates");
/**
* File containing the before report for normal testing.
*/
private static final File BEFORE_FILE = new File(TEST_DATA_DIR,"before.table");
/**
* File containing the after report for normal testing.
*/
private static final File AFTER_FILE = new File(TEST_DATA_DIR,"after.table");
/**
* File containing the bqsr report for normal testing.
*/
private static final File BQSR_FILE = new File(TEST_DATA_DIR,"bqsr.table");
/**
* Test the content of the generated csv file.
*
* @throws IOException should never happen. It would be an indicator of a
* problem with the testing environment.
*/
@Test(enabled = true)
public void testCsvGeneration()
throws IOException {
final WalkerTestSpec spec = new WalkerTestSpec(
buildCommandLine("%s",null,true,true,true),
Collections.singletonList("106709d32e6f0a0a9dd6a6340ec246ab"));
executeTest("testCsvGeneration",spec);
}
/**
* Test the size of the generated pdf.
* <p/>
* Unfortunately we cannot test the content as it changes slightly
* every time the tool is run.
*
* @throws IOException should never happen. It would be an
* indicator of a problem with the testing environment.
*/
@Test(enabled = true)
public void testPdfGeneration()
throws IOException {
final File pdfFile = createTempFile("ACTest", ".pdf");
pdfFile.delete();
final List<String> md5 = Collections.emptyList();
final WalkerTestSpec spec = new WalkerTestSpec(
buildCommandLine(null,pdfFile.toString(),true,true,true),md5);
executeTest("testPdfGeneration",spec);
assertTrue(pdfFile.exists(),"the pdf file was not created");
assertTrue(pdfFile.length() > 260000,"the pdf file size does"
+ " not reach the minimum of 260Kb");
}
/**
* Test the effect of changing some recalibration parameters.
* @param afterFileName name of the alternative after recalibration file.
* @param description describes what has been changed.
* @throws IOException should never happen. It would be an
* indicator of a problem with the testing environment.
*/
@Test(enabled = true, dataProvider="alternativeAfterFileProvider")
public void testParameterChangeException(final String afterFileName,
final String description)
throws IOException {
final File afterFile = new File(TEST_DATA_DIR,afterFileName);
final WalkerTestSpec spec = new WalkerTestSpec(
buildCommandLine(null,"%s",true,true,afterFile),
1,UserException.IncompatibleRecalibrationTableParameters.class);
executeTest("testParameterChangeException - " + description, spec);
}
/**
* Test combinations of input and output inclusion exclusion of the command
* line that cause an exception to be thrown.
*
* @param useCsvFile whether to include the output csv file.
* @param usePdfFile whether to include the output pdf file.
* @param useBQSRFile whether to include the -BQSR input file.
* @param useBeforeFile whether to include the -before input file.
* @param useAfterFile whether to include the -after input file.
* @throws IOException never thrown, unless there is a problem with the testing environment.
*/
@Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations")
public void testInOutAbsenceException(final boolean useCsvFile, final boolean usePdfFile,
final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile)
throws IOException {
final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile,
useBQSRFile,useBeforeFile,useAfterFile),0,UserException.class);
executeTest("testInOutAbsencePresenceException", spec);
}
/**
* Test combinations of input and output inclusion exclusion of the
* command line that won't cause an exception.
*
* @param useCsvFile whether to include the output csv file.
* @param usePdfFile whether to include the output pdf file.
* @param useBQSRFile whether to include the -BQSR input file.
* @param useBeforeFile whether to include the -before input file.
* @param useAfterFile whether to include the -after input file.
* @throws IOException never thrown, unless there is a problem with the testing environment.
*/
@Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations")
public void testInOutAbsence(final boolean useCsvFile, final boolean usePdfFile,
final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile)
throws IOException {
final List<String> md5 = Collections.emptyList();
final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile,
useBQSRFile,useBeforeFile,useAfterFile),md5);
executeTest("testInOutAbsencePresence", spec);
}
@DataProvider
public Iterator<Object[]> alternativeInOutAbsenceCombinations(Method m) {
List<Object[]> result = new LinkedList<Object[]>();
if (m.getName().endsWith("Exception")) {
result.add(new Object[] { false, false, true, true, true });
result.add(new Object[] { true, true, false, false ,false});
}
else {
result.add(new Object[] { true, true, true, false, false });
result.add(new Object[] { true, true, false, true, false });
result.add(new Object[] { true, true, false, false, true });
result.add(new Object[] { true, false,false, true, false });
result.add(new Object[] { false, true, true, false, false });
}
return result.iterator();
}
/**
* Provide recalibration parameter change data to relevant tests.
* @param m target test method.
* @return never <code>null</code>.
*/
@DataProvider
public Iterator<Object[]> alternativeAfterFileProvider (Method m) {
final boolean expectsException = m.getName().endsWith("Exception");
final List<Object[]> result = new LinkedList<Object[]>();
for (final Object[] data : DIFFERENT_PARAMETERS_AFTER_FILES) {
if (data[1].equals(expectsException)) {
result.add(new Object[] { data[0], data[2] });
}
}
return result.iterator();
}
/**
* Triplets < alfter-grp-file, whether it should fail, what is different >
*/
private final Object[][] DIFFERENT_PARAMETERS_AFTER_FILES = {
{"after-cov.table", true, "Adds additional covariate: repeat-length" },
{"after-dpSOLID.table", true, "Change the default platform to SOLID" },
{"after-noDp.table",true, "Unset the default platform" },
{"after-mcs4.table", true, "Changed -mcs parameter from 2 to 4" }
};
/**
* Build the AC command line given what combinations of input and output files should be included.
*
* @param useCsvFile whether to include the output csv file.
* @param usePdfFile whether to include the output pdf file.
* @param useBQSRFile whether to include the -BQSR input file.
* @param useBeforeFile whether to include the -before input file.
* @param useAfterFile whether to include the -after input file.
* @return never <code>null</code>.
* @throws IOException never thrown, unless there is a problem with the testing environment.
*/
private String buildCommandLine(final boolean useCsvFile, final boolean usePdfFile,
final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile)
throws IOException {
final File csvFile = useCsvFile ? createTempFile("ACTest",".csv") : null;
final File pdfFile = usePdfFile ? createTempFile("ACTest",".pdf") : null;
return buildCommandLine(csvFile == null ? null : csvFile.toString(),
pdfFile == null ? null : pdfFile.toString(),
useBQSRFile,useBeforeFile,useAfterFile);
}
/**
* Build the AC command line given the output file names explicitly and what test input files to use.
* <p/>
*
* @param csvFileName the csv output file, <code>null</code> if none should be provided.
* @param pdfFileName the plots output file, <code>null</code> if none should be provided.
* @param useBQSRFile whether to include the -BQSR input file.
* @param useBeforeFile whether to include the -before input file.
* @param useAfterFile whether to include the -after input file.
*
* @return never <code>null</code>.
*/
private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile,
final boolean useBeforeFile, final boolean useAfterFile) {
return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null,
useBeforeFile ? BEFORE_FILE : null,
useAfterFile ? AFTER_FILE : null);
}
/**
* Build the AC command line given the output file names and the after file name explicitly and what other
* test input files to use.
* <p/>
*
* @param csvFileName the csv output file, <code>null</code> if none should be provided.
* @param pdfFileName the plots output file, <code>null</code> if none should be provided.
* @param useBQSRFile whether to include the -BQSR input file.
* @param useBeforeFile whether to include the -before input file.
* @param afterFile the after input report file, <code>null</code> if none should be provided.
*
* @return never <code>null</code>.
*/
private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile,
final boolean useBeforeFile, final File afterFile) {
return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null,
useBeforeFile ? BEFORE_FILE : null,
afterFile);
}
/**
* Build the AC command line given the output file names and the after file name explicitly and what other
* test input files to use.
* <p/>
*
* @param csvFileName the csv output file, <code>null</code> if none should be provided.
* @param pdfFileName the plots output file, <code>null</code> if none should be provided.
* @param bqsrFile the BQSR input report file, <code>null</code> if none should be provided.
* @param beforeFile the before input report file, <code>null</code> if non should be provided.
* @param afterFile the after input report file, <code>null</code> if none should be provided.
*
* @return never <code>null</code>.
*/
private String buildCommandLine(final String csvFileName, final String pdfFileName, final File bqsrFile,
final File beforeFile, final File afterFile) {
final List<String> args = new LinkedList<String>();
args.add("-T");
args.add(TOOL_NAME);
args.add("-R");
args.add(hg19Reference);
args.add("-ignoreLMT");
if (csvFileName != null) {
args.add("-" + AnalyzeCovariates.CSV_ARG_SHORT_NAME);
args.add("'" + csvFileName + "'");
}
if (pdfFileName != null) {
args.add("-" + AnalyzeCovariates.PDF_ARG_SHORT_NAME);
args.add("'" + pdfFileName + "'");
}
if (bqsrFile != null) {
args.add("-BQSR");
args.add("'" + bqsrFile.getAbsoluteFile().toString() + "'");
}
if (beforeFile != null) {
args.add("-" + AnalyzeCovariates.BEFORE_ARG_SHORT_NAME);
args.add("'" + beforeFile.getAbsolutePath().toString() + "'");
}
if (afterFile != null) {
args.add("-" + AnalyzeCovariates.AFTER_ARG_SHORT_NAME);
args.add("'" + afterFile.getAbsolutePath().toString() + "'");
}
return Utils.join(" ", args);
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/graphs/BaseGraph.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs;
import com.google.java.contract.Ensures;
import com.google.java.contract.Invariant;
import com.google.java.contract.Requires;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.jgrapht.EdgeFactory;
import org.jgrapht.alg.CycleDetector;
import org.jgrapht.graph.DefaultDirectedGraph;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.util.*;
/**
* Created with IntelliJ IDEA.
* User: rpoplin
* Date: 2/6/13
*/
@Invariant("!this.isAllowingMultipleEdges()")
public class BaseGraph<V extends BaseVertex, E extends BaseEdge> extends DefaultDirectedGraph<V, E> {
protected final static Logger logger = Logger.getLogger(BaseGraph.class);
protected final int kmerSize;
/**
* Construct a TestGraph with kmerSize
* @param kmerSize
*/
public BaseGraph(final int kmerSize, final EdgeFactory<V,E> edgeFactory) {
super(edgeFactory);
if ( kmerSize < 1 ) throw new IllegalArgumentException("kmerSize must be >= 1 but got " + kmerSize);
this.kmerSize = kmerSize;
}
/**
* How big of a kmer did we use to create this graph?
* @return
*/
public int getKmerSize() {
return kmerSize;
}
/**
* @param v the vertex to test
* @return true if this vertex is a reference node (meaning that it appears on the reference path in the graph)
*/
public boolean isReferenceNode( final V v ) {
if( v == null ) { throw new IllegalArgumentException("Attempting to test a null vertex."); }
for ( final BaseEdge e : edgesOf(v) ) {
if ( e.isRef() ) { return true; }
}
// edge case: if the graph only has one node then it's a ref node, otherwise it's not
return (vertexSet().size() == 1);
}
/**
* @param v the vertex to test
* @return true if this vertex is a source node (in degree == 0)
*/
public boolean isSource( final V v ) {
if( v == null ) { throw new IllegalArgumentException("Attempting to test a null vertex."); }
return inDegreeOf(v) == 0;
}
/**
* @param v the vertex to test
* @return true if this vertex is a sink node (out degree == 0)
*/
public boolean isSink( final V v ) {
if( v == null ) { throw new IllegalArgumentException("Attempting to test a null vertex."); }
return outDegreeOf(v) == 0;
}
/**
* Get the set of source vertices of this graph
* @return a non-null set
*/
public Set<V> getSources() {
final Set<V> set = new LinkedHashSet<V>();
for ( final V v : vertexSet() )
if ( isSource(v) )
set.add(v);
return set;
}
/**
* Get the set of sink vertices of this graph
* @return a non-null set
*/
public Set<V> getSinks() {
final Set<V> set = new LinkedHashSet<V>();
for ( final V v : vertexSet() )
if ( isSink(v) )
set.add(v);
return set;
}
/**
* Convert this kmer graph to a simple sequence graph.
*
* Each kmer suffix shows up as a distinct SeqVertex, attached in the same structure as in the kmer
* graph. Nodes that are sources are mapped to SeqVertex nodes that contain all of their sequence
*
* @return a newly allocated SequenceGraph
*/
public SeqGraph convertToSequenceGraph() {
final SeqGraph seqGraph = new SeqGraph(kmerSize);
final Map<V, SeqVertex> vertexMap = new HashMap<>();
// create all of the equivalent seq graph vertices
for ( final V dv : vertexSet() ) {
final SeqVertex sv = new SeqVertex(dv.getAdditionalSequence(isSource(dv)));
sv.setAdditionalInfo(dv.additionalInfo());
vertexMap.put(dv, sv);
seqGraph.addVertex(sv);
}
// walk through the nodes and connect them to their equivalent seq vertices
for( final E e : edgeSet() ) {
final SeqVertex seqInV = vertexMap.get(getEdgeSource(e));
final SeqVertex seqOutV = vertexMap.get(getEdgeTarget(e));
//logger.info("Adding edge " + seqInV + " -> " + seqOutV);
seqGraph.addEdge(seqInV, seqOutV, new BaseEdge(e.isRef(), e.getMultiplicity()));
}
return seqGraph;
}
/**
* Pull out the additional sequence implied by traversing this node in the graph
* @param v the vertex from which to pull out the additional base sequence
* @return non-null byte array
*/
@Ensures({"result != null"})
public byte[] getAdditionalSequence( final V v ) {
if( v == null ) { throw new IllegalArgumentException("Attempting to pull sequence from a null vertex."); }
return v.getAdditionalSequence(isSource(v));
}
/**
* @param v the vertex to test
* @return true if this vertex is a reference source
*/
public boolean isRefSource( final V v ) {
if( v == null ) { throw new IllegalArgumentException("Attempting to test a null vertex."); }
// confirm that no incoming edges are reference edges
for ( final E edgeToTest : incomingEdgesOf(v) ) {
if ( edgeToTest.isRef() ) { return false; }
}
// confirm that there is an outgoing reference edge
for ( final E edgeToTest : outgoingEdgesOf(v) ) {
if ( edgeToTest.isRef() ) { return true; }
}
// edge case: if the graph only has one node then it's a ref sink, otherwise it's not
return (vertexSet().size() == 1);
}
/**
* @param v the vertex to test
* @return true if this vertex is a reference sink
*/
public boolean isRefSink( final V v ) {
if( v == null ) { throw new IllegalArgumentException("Attempting to test a null vertex."); }
// confirm that no outgoing edges are reference edges
for ( final E edgeToTest : outgoingEdgesOf(v) ) {
if ( edgeToTest.isRef() ) { return false; }
}
// confirm that there is an incoming reference edge
for ( final E edgeToTest : incomingEdgesOf(v) ) {
if ( edgeToTest.isRef() ) { return true; }
}
// edge case: if the graph only has one node then it's a ref source, otherwise it's not
return (vertexSet().size() == 1);
}
/**
* @return the reference source vertex pulled from the graph, can be null if it doesn't exist in the graph
*/
public V getReferenceSourceVertex( ) {
for( final V v : vertexSet() ) {
if( isRefSource(v) ) {
return v;
}
}
return null;
}
/**
* @return the reference sink vertex pulled from the graph, can be null if it doesn't exist in the graph
*/
public V getReferenceSinkVertex( ) {
for( final V v : vertexSet() ) {
if( isRefSink(v) ) {
return v;
}
}
return null;
}
/**
* Traverse the graph and get the next reference vertex if it exists
* @param v the current vertex, can be null
* @return the next reference vertex if it exists, otherwise null
*/
public V getNextReferenceVertex( final V v ) {
return getNextReferenceVertex(v, false, Collections.<MultiSampleEdge>emptyList());
}
/**
* Traverse the graph and get the next reference vertex if it exists
* @param v the current vertex, can be null
* @param allowNonRefPaths if true, allow sub-paths that are non-reference if there is only a single outgoing edge
* @param blacklistedEdges edges to ignore in the traversal down; useful to exclude the non-reference dangling paths
* @return the next vertex (but not necessarily on the reference path if allowNonRefPaths is true) if it exists, otherwise null
*/
public V getNextReferenceVertex( final V v, final boolean allowNonRefPaths, final Collection<MultiSampleEdge> blacklistedEdges ) {
if( v == null ) { return null; }
// variable must be mutable because outgoingEdgesOf is an immutable collection
Set<E> edges = outgoingEdgesOf(v);
for( final E edgeToTest : edges ) {
if( edgeToTest.isRef() ) {
return getEdgeTarget(edgeToTest);
}
}
// if we got here, then we aren't on a reference path
if ( allowNonRefPaths ) {
edges = new HashSet<>(edges); // edges was immutable
edges.removeAll(blacklistedEdges);
if ( edges.size() == 1 )
return getEdgeTarget(edges.iterator().next());
}
return null;
}
/**
* Traverse the graph and get the previous reference vertex if it exists
* @param v the current vertex, can be null
* @return the previous reference vertex if it exists
*/
public V getPrevReferenceVertex( final V v ) {
if( v == null ) { return null; }
for( final E edgeToTest : incomingEdgesOf(v) ) {
if( isReferenceNode(getEdgeSource(edgeToTest)) ) {
return getEdgeSource(edgeToTest);
}
}
return null;
}
/**
* Does a reference path exist between the two vertices?
* @param fromVertex from this vertex, can be null
* @param toVertex to this vertex, can be null
* @return true if a reference path exists in the graph between the two vertices
*/
public boolean referencePathExists(final V fromVertex, final V toVertex) {
V v = fromVertex;
if( v == null ) {
return false;
}
v = getNextReferenceVertex(v);
if( v == null ) {
return false;
}
while( !v.equals(toVertex) ) {
v = getNextReferenceVertex(v);
if( v == null ) {
return false;
}
}
return true;
}
/**
* Walk along the reference path in the graph and pull out the corresponding bases
* @param fromVertex starting vertex
* @param toVertex ending vertex
* @param includeStart should the starting vertex be included in the path
* @param includeStop should the ending vertex be included in the path
* @return byte[] array holding the reference bases, this can be null if there are no nodes between the starting and ending vertex (insertions for example)
*/
public byte[] getReferenceBytes( final V fromVertex, final V toVertex, final boolean includeStart, final boolean includeStop ) {
if( fromVertex == null ) { throw new IllegalArgumentException("Starting vertex in requested path cannot be null."); }
if( toVertex == null ) { throw new IllegalArgumentException("From vertex in requested path cannot be null."); }
byte[] bytes = null;
V v = fromVertex;
if( includeStart ) {
bytes = ArrayUtils.addAll(bytes, getAdditionalSequence(v));
}
v = getNextReferenceVertex(v); // advance along the reference path
while( v != null && !v.equals(toVertex) ) {
bytes = ArrayUtils.addAll(bytes, getAdditionalSequence(v));
v = getNextReferenceVertex(v); // advance along the reference path
}
if( includeStop && v != null && v.equals(toVertex)) {
bytes = ArrayUtils.addAll(bytes, getAdditionalSequence(v));
}
return bytes;
}
/**
* Convenience function to add multiple vertices to the graph at once
* @param vertices one or more vertices to add
*/
public void addVertices(final V... vertices) {
for ( final V v : vertices )
addVertex(v);
}
/**
* Convenience function to add multiple vertices to the graph at once
* @param vertices one or more vertices to add
*/
public void addVertices(final Collection<V> vertices) {
for ( final V v : vertices )
addVertex(v);
}
/**
* Convenience function to add multiple edges to the graph
* @param start the first vertex to connect
* @param remaining all additional vertices to connect
*/
public void addEdges(final V start, final V... remaining) {
V prev = start;
for ( final V next : remaining ) {
addEdge(prev, next);
prev = next;
}
}
/**
* Convenience function to add multiple edges to the graph
* @param start the first vertex to connect
* @param remaining all additional vertices to connect
*/
public void addEdges(final E template, final V start, final V... remaining) {
V prev = start;
for ( final V next : remaining ) {
addEdge(prev, next, (E)(template.copy())); // TODO -- is there a better way to do this?
prev = next;
}
}
/**
* Get the set of vertices connected by outgoing edges of V
* @param v a non-null vertex
* @return a set of vertices connected by outgoing edges from v
*/
public Set<V> outgoingVerticesOf(final V v) {
final Set<V> s = new LinkedHashSet<V>();
for ( final E e : outgoingEdgesOf(v) ) {
s.add(getEdgeTarget(e));
}
return s;
}
/**
* Get the set of vertices connected to v by incoming edges
* @param v a non-null vertex
* @return a set of vertices {X} connected X -> v
*/
public Set<V> incomingVerticesOf(final V v) {
final Set<V> s = new LinkedHashSet<V>();
for ( final E e : incomingEdgesOf(v) ) {
s.add(getEdgeSource(e));
}
return s;
}
/**
* Get the set of vertices connected to v by incoming or outgoing edges
* @param v a non-null vertex
* @return a set of vertices {X} connected X -> v or v -> Y
*/
public Set<V> neighboringVerticesOf(final V v) {
final Set<V> s = incomingVerticesOf(v);
s.addAll(outgoingVerticesOf(v));
return s;
}
/**
* Print out the graph in the dot language for visualization
* @param destination File to write to
*/
public void printGraph(final File destination, final int pruneFactor) {
PrintStream stream = null;
try {
stream = new PrintStream(new FileOutputStream(destination));
printGraph(stream, true, pruneFactor);
} catch ( FileNotFoundException e ) {
throw new RuntimeException(e);
} finally {
if ( stream != null ) stream.close();
}
}
public void printGraph(final PrintStream graphWriter, final boolean writeHeader, final int pruneFactor) {
if ( writeHeader )
graphWriter.println("digraph assemblyGraphs {");
for( final E edge : edgeSet() ) {
graphWriter.println("\t" + getEdgeSource(edge).toString() + " -> " + getEdgeTarget(edge).toString() + " [" + (edge.getMultiplicity() > 0 && edge.getMultiplicity() <= pruneFactor ? "style=dotted,color=grey," : "") + "label=\"" + edge.getDotLabel() + "\"];");
if( edge.isRef() ) {
graphWriter.println("\t" + getEdgeSource(edge).toString() + " -> " + getEdgeTarget(edge).toString() + " [color=red];");
}
}
for( final V v : vertexSet() ) {
// graphWriter.println("\t" + v.toString() + " [label=\"" + v + "\",shape=box]");
graphWriter.println("\t" + v.toString() + " [label=\"" + new String(getAdditionalSequence(v)) + v.additionalInfo() + "\",shape=box]");
}
if ( writeHeader )
graphWriter.println("}");
}
/**
* Remove edges that are connected before the reference source and after the reference sink
*
* Also removes all vertices that are orphaned by this process
*/
public void cleanNonRefPaths() {
if( getReferenceSourceVertex() == null || getReferenceSinkVertex() == null ) {
return;
}
// Remove non-ref edges connected before and after the reference path
final Set<E> edgesToCheck = new HashSet<E>();
edgesToCheck.addAll(incomingEdgesOf(getReferenceSourceVertex()));
while( !edgesToCheck.isEmpty() ) {
final E e = edgesToCheck.iterator().next();
if( !e.isRef() ) {
edgesToCheck.addAll( incomingEdgesOf(getEdgeSource(e)) );
removeEdge(e);
}
edgesToCheck.remove(e);
}
edgesToCheck.addAll(outgoingEdgesOf(getReferenceSinkVertex()));
while( !edgesToCheck.isEmpty() ) {
final E e = edgesToCheck.iterator().next();
if( !e.isRef() ) {
edgesToCheck.addAll( outgoingEdgesOf(getEdgeTarget(e)) );
removeEdge(e);
}
edgesToCheck.remove(e);
}
removeSingletonOrphanVertices();
}
/**
* Prune all chains from this graph where any edge in the path has multiplicity < pruneFactor
*
* @see LowWeightChainPruner for more information
*
* @param pruneFactor all edges with multiplicity < this factor that aren't ref edges will be removed
*/
public void pruneLowWeightChains( final int pruneFactor ) {
final LowWeightChainPruner<V,E> pruner = new LowWeightChainPruner<>(pruneFactor);
pruner.pruneLowWeightChains(this);
}
/**
* Remove all vertices in the graph that have in and out degree of 0
*/
public void removeSingletonOrphanVertices() {
// Run through the graph and clean up singular orphaned nodes
final List<V> verticesToRemove = new LinkedList<>();
for( final V v : vertexSet() ) {
if( inDegreeOf(v) == 0 && outDegreeOf(v) == 0 && !isRefSource(v) ) {
verticesToRemove.add(v);
}
}
removeAllVertices(verticesToRemove);
}
/**
* Remove all vertices on the graph that cannot be accessed by following any edge,
* regardless of its direction, from the reference source vertex
*/
public void removeVerticesNotConnectedToRefRegardlessOfEdgeDirection() {
final HashSet<V> toRemove = new HashSet<>(vertexSet());
final V refV = getReferenceSourceVertex();
if ( refV != null ) {
for ( final V v : new BaseGraphIterator<>(this, refV, true, true) ) {
toRemove.remove(v);
}
}
removeAllVertices(toRemove);
}
/**
* Remove all vertices in the graph that aren't on a path from the reference source vertex to the reference sink vertex
*
* More aggressive reference pruning algorithm than removeVerticesNotConnectedToRefRegardlessOfEdgeDirection,
* as it requires vertices to not only be connected by a series of directed edges but also prunes away
* paths that do not also meet eventually with the reference sink vertex
*/
public void removePathsNotConnectedToRef() {
if ( getReferenceSourceVertex() == null || getReferenceSinkVertex() == null ) {
throw new IllegalStateException("Graph must have ref source and sink vertices");
}
// get the set of vertices we can reach by going forward from the ref source
final Set<V> onPathFromRefSource = new HashSet<>(vertexSet().size());
for ( final V v : new BaseGraphIterator<>(this, getReferenceSourceVertex(), false, true) ) {
onPathFromRefSource.add(v);
}
// get the set of vertices we can reach by going backward from the ref sink
final Set<V> onPathFromRefSink = new HashSet<>(vertexSet().size());
for ( final V v : new BaseGraphIterator<>(this, getReferenceSinkVertex(), true, false) ) {
onPathFromRefSink.add(v);
}
// we want to remove anything that's not in both the sink and source sets
final Set<V> verticesToRemove = new HashSet<>(vertexSet());
onPathFromRefSource.retainAll(onPathFromRefSink);
verticesToRemove.removeAll(onPathFromRefSource);
removeAllVertices(verticesToRemove);
// simple sanity checks that this algorithm is working.
if ( getSinks().size() > 1 ) {
throw new IllegalStateException("Should have eliminated all but the reference sink, but found " + getSinks());
}
if ( getSources().size() > 1 ) {
throw new IllegalStateException("Should have eliminated all but the reference source, but found " + getSources());
}
}
/**
* Semi-lenient comparison of two graphs, truing true if g1 and g2 have similar structure
*
* By similar this means that both graphs have the same number of vertices, where each vertex can find
* a vertex in the other graph that's seqEqual to it. A similar constraint applies to the edges,
* where all edges in g1 must have a corresponding edge in g2 where both source and target vertices are
* seqEqual
*
* @param g1 the first graph to compare
* @param g2 the second graph to compare
* @param <T> the type of the nodes in those graphs
* @return true if g1 and g2 are equals
*/
public static <T extends BaseVertex, E extends BaseEdge> boolean graphEquals(final BaseGraph<T,E> g1, BaseGraph<T,E> g2) {
final Set<T> vertices1 = g1.vertexSet();
final Set<T> vertices2 = g2.vertexSet();
final Set<E> edges1 = g1.edgeSet();
final Set<E> edges2 = g2.edgeSet();
if ( vertices1.size() != vertices2.size() || edges1.size() != edges2.size() )
return false;
for ( final T v1 : vertices1 ) {
boolean found = false;
for ( final T v2 : vertices2 )
found = found || v1.getSequenceString().equals(v2.getSequenceString());
if ( ! found ) return false;
}
for( final E e1 : g1.edgeSet() ) {
boolean found = false;
for( E e2 : g2.edgeSet() ) {
if( g1.seqEquals(e1, e2, g2) ) { found = true; break; }
}
if( !found ) { return false; }
}
for( final E e2 : g2.edgeSet() ) {
boolean found = false;
for( E e1 : g1.edgeSet() ) {
if( g2.seqEquals(e2, e1, g1) ) { found = true; break; }
}
if( !found ) { return false; }
}
return true;
}
// For use when comparing edges across graphs!
private boolean seqEquals( final E edge1, final E edge2, final BaseGraph<V,E> graph2 ) {
return (this.getEdgeSource(edge1).seqEquals(graph2.getEdgeSource(edge2))) && (this.getEdgeTarget(edge1).seqEquals(graph2.getEdgeTarget(edge2)));
}
/**
* Get the incoming edge of v. Requires that there be only one such edge or throws an error
* @param v our vertex
* @return the single incoming edge to v, or null if none exists
*/
public E incomingEdgeOf(final V v) {
return getSingletonEdge(incomingEdgesOf(v));
}
/**
* Get the outgoing edge of v. Requires that there be only one such edge or throws an error
* @param v our vertex
* @return the single outgoing edge from v, or null if none exists
*/
public E outgoingEdgeOf(final V v) {
return getSingletonEdge(outgoingEdgesOf(v));
}
/**
* Helper function that gets the a single edge from edges, null if edges is empty, or
* throws an error is edges has more than 1 element
* @param edges a set of edges
* @return a edge
*/
@Requires("edges != null")
private E getSingletonEdge(final Collection<E> edges) {
if ( edges.size() > 1 ) throw new IllegalArgumentException("Cannot get a single incoming edge for a vertex with multiple incoming edges " + edges);
return edges.isEmpty() ? null : edges.iterator().next();
}
/**
* Add edge between source -> target if none exists, or add e to an already existing one if present
*
* @param source source vertex
* @param target vertex
* @param e edge to add
*/
public void addOrUpdateEdge(final V source, final V target, final E e) {
final E prev = getEdge(source, target);
if ( prev != null ) {
prev.add(e);
} else {
addEdge(source, target, e);
}
}
@Override
public String toString() {
return "BaseGraph{" +
"kmerSize=" + kmerSize +
'}';
}
/**
* Get the set of vertices within distance edges of source, regardless of edge direction
*
* @param source the source vertex to consider
* @param distance the distance
* @return a set of vertices within distance of source
*/
protected Set<V> verticesWithinDistance(final V source, final int distance) {
if ( distance == 0 )
return Collections.singleton(source);
final Set<V> found = new HashSet<>();
found.add(source);
for ( final V v : neighboringVerticesOf(source) ) {
found.addAll(verticesWithinDistance(v, distance - 1));
}
return found;
}
/**
* Get a graph containing only the vertices within distance edges of target
* @param target a vertex in graph
* @param distance the max distance
* @return a non-null graph
*/
public BaseGraph<V,E> subsetToNeighbors(final V target, final int distance) {
if ( target == null ) throw new IllegalArgumentException("Target cannot be null");
if ( ! containsVertex(target) ) throw new IllegalArgumentException("Graph doesn't contain vertex " + target);
if ( distance < 0 ) throw new IllegalArgumentException("Distance must be >= 0 but got " + distance);
final Set<V> toKeep = verticesWithinDistance(target, distance);
final Set<V> toRemove = new HashSet<>(vertexSet());
toRemove.removeAll(toKeep);
final BaseGraph<V,E> result = (BaseGraph<V,E>)clone();
result.removeAllVertices(toRemove);
return result;
}
/**
* Get a subgraph of graph that contains only vertices within 10 edges of the ref source vertex
* @return a non-null subgraph of this graph
*/
public BaseGraph<V,E> subsetToRefSource() {
return subsetToNeighbors(getReferenceSourceVertex(), 10);
}
/**
* Checks whether the graph contains all the vertices in a collection.
*
* @param vertices the vertices to check.
*
* @throws IllegalArgumentException if {@code vertices} is {@code null}.
*
* @return {@code true} if all the vertices in the input collection are present in this graph.
* Also if the input collection is empty. Otherwise it returns {@code false}.
*/
public boolean containsAllVertices(final Collection<? extends V> vertices) {
if (vertices == null) throw new IllegalArgumentException("the input vertices collection cannot be null");
for (final V vertex : vertices)
if (!containsVertex(vertex)) return false;
return true;
}
/**
* Checks for the presence of directed cycles in the graph.
*
* @return {@code true} if the graph has cycles, {@code false} otherwise.
*/
public boolean hasCycles() {
return new CycleDetector<>(this).detectCycles();
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/FamilyLikelihoodsUtils.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantutils;
import htsjdk.variant.vcf.VCFConstants;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.engine.samples.Sample;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.QualityUtils;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import htsjdk.variant.utils.GeneralUtils;
import htsjdk.variant.variantcontext.*;
import java.util.Arrays;
import java.util.*;
/**
* FamilyLikelihoodsUtils code is based on PhaseByTransmission with added posterior probability calculations
*/
public class FamilyLikelihoodsUtils {
private static Logger logger = Logger.getLogger(FamilyLikelihoodsUtils.class);
//Matrix of priors for all genotype combinations
final private EnumMap<GenotypeType,EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>> mvCountMatrix =
new EnumMap<>(GenotypeType.class);
final int NUM_CALLED_GENOTYPETYPES = 3; //HOM_REF, HET, and HOM_VAR
double[] configurationLikelihoodsMatrix = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES];
ArrayList<Sample> trios = new ArrayList<>();
public final double NO_JOINT_VALUE = -1.0;
private double deNovoPrior = 1e-8;
private final double ONE_THIRD = 0.333333333333333333;
private final double LOG10_OF_ONE_THIRD = -0.4771213;
private enum FamilyMember {
MOTHER,
FATHER,
CHILD
}
/**
* Applies the trio genotype combination to the given trio.
* @param motherGenotype: Original genotype of the mother
* @param fatherGenotype: Original genotype of the father
* @param childGenotype: Original genotype of the child
* @param updatedGenotypes: An ArrayList<Genotype> to which the newly updated genotypes are added in the following order: Mother, Father, Child
*/
public void getUpdatedGenotypes(final VariantContext vc, final Genotype motherGenotype, final Genotype fatherGenotype, final Genotype childGenotype, final ArrayList<Genotype> updatedGenotypes){
//genotypes here can be no call
boolean fatherIsCalled = fatherGenotype != null && hasCalledGT(fatherGenotype.getType()) && fatherGenotype.hasLikelihoods();
boolean motherIsCalled = motherGenotype != null && hasCalledGT(motherGenotype.getType()) && motherGenotype.hasLikelihoods();
boolean childIsCalled = childGenotype != null && hasCalledGT(childGenotype.getType()) && childGenotype.hasLikelihoods();
//default to posteriors equal to likelihoods (flat priors) in case input genotypes are not called
double[] uninformativeLikelihoods = {ONE_THIRD, ONE_THIRD, ONE_THIRD};
double[] motherLikelihoods = motherIsCalled? GeneralUtils.normalizeFromLog10(motherGenotype.getLikelihoods().getAsVector()) : uninformativeLikelihoods;
double[] fatherLikelihoods = fatherIsCalled? GeneralUtils.normalizeFromLog10(fatherGenotype.getLikelihoods().getAsVector()) : uninformativeLikelihoods;
double[] childLikelihoods = childIsCalled? GeneralUtils.normalizeFromLog10(childGenotype.getLikelihoods().getAsVector()) : uninformativeLikelihoods;
//these are also in log10 space
double[] motherLog10Posteriors = getPosteriors(FamilyMember.MOTHER);
double[] fatherLog10Posteriors = getPosteriors(FamilyMember.FATHER);
double[] childLog10Posteriors = getPosteriors(FamilyMember.CHILD);
double[] motherPosteriors = GeneralUtils.normalizeFromLog10(motherLog10Posteriors);
double[] fatherPosteriors = GeneralUtils.normalizeFromLog10(fatherLog10Posteriors);
double[] childPosteriors = GeneralUtils.normalizeFromLog10(childLog10Posteriors);
double jointPosteriorProbability = -1;
//jointTrioLikelihood is combined likelihoods (before prior) of best configuration after applying prior
double jointTrioLikelihood = -1;
if(childIsCalled && motherIsCalled && fatherIsCalled) {
jointTrioLikelihood = motherLikelihoods[MathUtils.maxElementIndex(motherPosteriors)]*fatherLikelihoods[MathUtils.maxElementIndex(fatherPosteriors)]*childLikelihoods[MathUtils.maxElementIndex(childPosteriors)];
jointPosteriorProbability = MathUtils.arrayMax(motherPosteriors)*MathUtils.arrayMax(fatherPosteriors)*MathUtils.arrayMax(childPosteriors);
}
updatedGenotypes.add(getUpdatedGenotype(vc, motherGenotype, jointTrioLikelihood, jointPosteriorProbability, motherLog10Posteriors));
updatedGenotypes.add(getUpdatedGenotype(vc, fatherGenotype, jointTrioLikelihood, jointPosteriorProbability, fatherLog10Posteriors));
updatedGenotypes.add(getUpdatedGenotype(vc, childGenotype, jointTrioLikelihood, jointPosteriorProbability, childLog10Posteriors));
}
private Genotype getUpdatedGenotype(final VariantContext vc, final Genotype genotype, final double jointLikelihood, final double jointPosteriorProb, final double[] log10Posteriors){
//Don't update null, missing or unavailable genotypes
if(genotype == null || !hasCalledGT(genotype.getType()))
return genotype;
int phredScaledJL = -1;
int phredScaledJP = -1;
if(jointLikelihood != NO_JOINT_VALUE){
double dphredScaledJL = QualityUtils.phredScaleLog10ErrorRate(Math.log10(1-jointLikelihood));
phredScaledJL = dphredScaledJL < Byte.MAX_VALUE ? (byte)dphredScaledJL : Byte.MAX_VALUE;
}
if(jointPosteriorProb != NO_JOINT_VALUE){
double dphredScaledJP = QualityUtils.phredScaleLog10ErrorRate(Math.log10(1-jointPosteriorProb));
phredScaledJP = dphredScaledJP < Byte.MAX_VALUE ? (byte)dphredScaledJP : Byte.MAX_VALUE;
}
//Add the joint trio calculations
final Map<String, Object> genotypeAttributes = new HashMap<>();
genotypeAttributes.putAll(genotype.getExtendedAttributes());
genotypeAttributes.put(GATKVCFConstants.JOINT_LIKELIHOOD_TAG_NAME, phredScaledJL);
genotypeAttributes.put(GATKVCFConstants.JOINT_POSTERIOR_TAG_NAME, phredScaledJP);
final GenotypeBuilder builder = new GenotypeBuilder(genotype);
//final double[] log10Posteriors = MathUtils.toLog10(normalizedPosteriors);
//update genotype types based on posteriors
GATKVariantContextUtils.updateGenotypeAfterSubsetting(vc.getAlleles(), genotype.getPloidy(), builder,
GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, log10Posteriors, vc.getAlleles());
builder.attribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY,
Utils.listFromPrimitives(GenotypeLikelihoods.fromLog10Likelihoods(log10Posteriors).getAsPLs()));
builder.attributes(genotypeAttributes);
return builder.make();
}
//marginalize over the configurationLikelihoodsMatrix and normalize to get the posteriors
private double[] getPosteriors(final FamilyMember recalcInd) {
double[] marginalOverChangedHR = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES];
double[] marginalOverChangedHET = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES];
double[] marginalOverChangedHV = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES];
final double[] recalcPosteriors = new double[NUM_CALLED_GENOTYPETYPES];
final GenotypeType[] calledTypes = {GenotypeType.HOM_REF, GenotypeType.HET, GenotypeType.HOM_VAR};
int counter = 0;
switch (recalcInd) {
case MOTHER:
for(final GenotypeType father : calledTypes) {
for(final GenotypeType child : calledTypes) {
GenotypeType mother;
mother = GenotypeType.HOM_REF;
marginalOverChangedHR[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
mother = GenotypeType.HET;
marginalOverChangedHET[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
mother = GenotypeType.HOM_VAR;
marginalOverChangedHV[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
counter++;
}
}
break;
case FATHER:
for(final GenotypeType mother : calledTypes){
for (final GenotypeType child : calledTypes){
GenotypeType father;
father = GenotypeType.HOM_REF;
marginalOverChangedHR[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
father = GenotypeType.HET;
marginalOverChangedHET[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
father = GenotypeType.HOM_VAR;
marginalOverChangedHV[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
counter++;
}
}
break;
case CHILD:
for(final GenotypeType mother : calledTypes){
for (final GenotypeType father: calledTypes){
GenotypeType child;
child = GenotypeType.HOM_REF;
marginalOverChangedHR[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
child = GenotypeType.HET;
marginalOverChangedHET[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
child = GenotypeType.HOM_VAR;
marginalOverChangedHV[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)];
counter++;
}
}
break;
default:
throw new UserException(String.format("%d does not indicate a valid trio FamilyMember -- use 0 for mother, 1 for father, 2 for child",recalcInd.ordinal()));
}
recalcPosteriors[0] = MathUtils.log10sumLog10(marginalOverChangedHR,0);
recalcPosteriors[1] = MathUtils.log10sumLog10(marginalOverChangedHET,0);
recalcPosteriors[2] = MathUtils.log10sumLog10(marginalOverChangedHV,0);
return MathUtils.normalizeFromLog10(recalcPosteriors,true,true);
}
public void initialize(final double DNprior, final Set<String> vcfSamples, final Map<String,Set<Sample>> families){
this.deNovoPrior = DNprior;
Arrays.fill(configurationLikelihoodsMatrix,0);
buildMatrices();
trios = setTrios(vcfSamples, families);
}
public GenotypesContext calculatePosteriorGLs(final VariantContext vc){
final GenotypesContext genotypesContext = GenotypesContext.copy(vc.getGenotypes());
for (final Sample sample : trios) {
Genotype mother = vc.getGenotype(sample.getMaternalID());
Genotype father = vc.getGenotype(sample.getPaternalID());
Genotype child = vc.getGenotype(sample.getID());
//Keep only trios and parent/child pairs
if(mother == null && father == null || child == null) {
logger.warn("Null genotypes in variant: "+vc.toStringDecodeGenotypes());
continue;
}
final ArrayList<Genotype> trioGenotypes = new ArrayList<>(3);
updateFamilyGenotypes(vc, mother, father, child, trioGenotypes);
//replace uses sample names to match genotypes, so order doesn't matter
if (!trioGenotypes.isEmpty()) {
genotypesContext.replace(trioGenotypes.get(0));
genotypesContext.replace(trioGenotypes.get(1));
genotypesContext.replace(trioGenotypes.get(2));
}
}
return genotypesContext;
}
/**
* Select trios and parent/child pairs only
*/
private ArrayList<Sample> setTrios(Set<String> vcfSamples, Map<String,Set<Sample>> families){
Set<Sample> family;
ArrayList<Sample> parents;
final ArrayList<Sample> trios = new ArrayList<>();
for(final Map.Entry<String,Set<Sample>> familyEntry : families.entrySet()){
family = familyEntry.getValue();
// Since getFamilies(vcfSamples) above still returns parents of samples in the VCF even if those parents are not in the VCF, need to subset down here:
final Set<Sample> familyMembersInVCF = new TreeSet<>();
for(final Sample familyMember : family){
if (vcfSamples.contains(familyMember.getID())) {
familyMembersInVCF.add(familyMember);
}
}
family = familyMembersInVCF;
if(family.size() == 3){
for(final Sample familyMember : family){
parents = familyMember.getParents();
if(parents.size()==2){
if(family.containsAll(parents))
trios.add(familyMember);
}
}
}
}
return trios;
}
//Create a lookup matrix to find the number of MVs for each family genotype combination
private void buildMatrices(){
for(final GenotypeType mother : GenotypeType.values()){
mvCountMatrix.put(mother,new EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>(GenotypeType.class));
for(final GenotypeType father : GenotypeType.values()){
mvCountMatrix.get(mother).put(father,new EnumMap<GenotypeType, Integer>(GenotypeType.class));
for(final GenotypeType child : GenotypeType.values()){
mvCountMatrix.get(mother).get(father).put(child, getCombinationMVCount(mother, father, child));
}
}
}
}
//Returns the number of Mendelian Violations for a given genotype combination.
//If one of the parents' genotypes is missing, it will consider it as a parent/child pair
//If the child genotype or both parents genotypes are missing, 0 is returned.
private int getCombinationMVCount(GenotypeType mother, GenotypeType father, GenotypeType child){
//Child is no call => No MV
if(child == GenotypeType.NO_CALL || child == GenotypeType.UNAVAILABLE)
return 0;
//Add parents with genotypes for the evaluation
final ArrayList<GenotypeType> parents = new ArrayList<>();
if (!(mother == GenotypeType.NO_CALL || mother == GenotypeType.UNAVAILABLE))
parents.add(mother);
if (!(father == GenotypeType.NO_CALL || father == GenotypeType.UNAVAILABLE))
parents.add(father);
//Both parents no calls => No MV
if (parents.isEmpty())
return 0;
//If at least one parent had a genotype, then count the number of ref and alt alleles that can be passed
int parentsNumRefAlleles = 0;
int parentsNumAltAlleles = 0;
for(final GenotypeType parent : parents){
if(parent == GenotypeType.HOM_REF){
parentsNumRefAlleles++;
}
else if(parent == GenotypeType.HET){
parentsNumRefAlleles++;
parentsNumAltAlleles++;
}
else if(parent == GenotypeType.HOM_VAR){
parentsNumAltAlleles++;
}
}
//Case Child is HomRef
if(child == GenotypeType.HOM_REF){
if(parentsNumRefAlleles == parents.size())
return 0;
else return (parents.size()-parentsNumRefAlleles);
}
//Case child is HomVar
if(child == GenotypeType.HOM_VAR){
if(parentsNumAltAlleles == parents.size())
return 0;
else return parents.size()-parentsNumAltAlleles;
}
//Case child is Het
if(child == GenotypeType.HET && ((parentsNumRefAlleles > 0 && parentsNumAltAlleles > 0) || parents.size()<2))
return 0;
//MV
return 1;
}
/**
* Updates the genotypes of the given trio. If one of the parents is null, it is considered a parent/child pair.
* @param vc: Input variant context
* @param mother: Mother's genotype from vc input
* @param father: Father's genotype from vc input
* @param child: Child's genotype from vc input
* @param finalGenotypes: An ArrayList<Genotype> containing the updated genotypes
*/
private void updateFamilyGenotypes(VariantContext vc, Genotype mother, Genotype father, Genotype child, ArrayList<Genotype> finalGenotypes) {
//If one of the parents is not called, fill in with uninformative likelihoods
Map<GenotypeType,Double> motherLikelihoods = getLikelihoodsAsMapSafeNull(mother);
Map<GenotypeType,Double> fatherLikelihoods = getLikelihoodsAsMapSafeNull(father);
Map<GenotypeType,Double> childLikelihoods = getLikelihoodsAsMapSafeNull(child);
//if the child isn't called or neither parent is called, there's no extra inheritance information in that trio so return
if (!hasCalledGT(child.getType()) || (!hasCalledGT(mother.getType()) && !hasCalledGT(father.getType())))
return;
//Fill the configurationLikelihoodsMatrix for each genotype combination
int matInd;
int mvCount;
double jointLikelihood;
double mvCoeff;
double configurationLikelihood;
for(final Map.Entry<GenotypeType,Double> childGenotype :
childLikelihoods.entrySet()){
for(final Map.Entry<GenotypeType,Double> motherGenotype :
motherLikelihoods.entrySet()){
for(final Map.Entry<GenotypeType,Double> fatherGenotype :
fatherLikelihoods.entrySet()){
mvCount = mvCountMatrix.get(motherGenotype.getKey()).get(fatherGenotype.getKey()).get(childGenotype.getKey());
jointLikelihood = motherGenotype.getValue()+fatherGenotype.getValue()+childGenotype.getValue();
mvCoeff = mvCount>0 ? Math.pow(deNovoPrior,mvCount) : (1.0-10*deNovoPrior-deNovoPrior*deNovoPrior);
configurationLikelihood = Math.log10(mvCoeff) + jointLikelihood;
matInd = getLikelihoodMatrixIndex(motherGenotype.getKey(), fatherGenotype.getKey(), childGenotype.getKey());
configurationLikelihoodsMatrix[matInd] = configurationLikelihood;
}
}
}
getUpdatedGenotypes(vc, mother, father, child, finalGenotypes);
}
//Get a Map of genotype (log10)likelihoods
private EnumMap<GenotypeType,Double> getLikelihoodsAsMapSafeNull(Genotype genotype){
final EnumMap<GenotypeType,Double> likelihoodsMap = new EnumMap<>(GenotypeType.class);
double[] likelihoods;
if (genotype != null && hasCalledGT(genotype.getType()) && genotype.hasExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY)) {
Object GPfromVCF = genotype.getExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY);
//parse the GPs into a vector of probabilities
final String[] likelihoodsAsStringVector = ((String)GPfromVCF).split(",");
final double[] likelihoodsAsVector = new double[likelihoodsAsStringVector.length];
for ( int i = 0; i < likelihoodsAsStringVector.length; i++ ) {
likelihoodsAsVector[i] = Double.parseDouble(likelihoodsAsStringVector[i]) / -10.0;
}
//keep in log10 space for large GQs
likelihoods = GeneralUtils.normalizeFromLog10(likelihoodsAsVector, true, true);
}
//In case of null, unavailable or no call, all likelihoods are log10(1/3)
else if(genotype == null || !hasCalledGT(genotype.getType()) || genotype.getLikelihoods() == null){
likelihoods = new double[NUM_CALLED_GENOTYPETYPES];
likelihoods[0] = LOG10_OF_ONE_THIRD;
likelihoods[1] = LOG10_OF_ONE_THIRD;
likelihoods[2] = LOG10_OF_ONE_THIRD;
}
//No posteriors in VC, use PLs
else
likelihoods = GeneralUtils.normalizeFromLog10(genotype.getLikelihoods().getAsVector(),true,true);
if (likelihoods.length != NUM_CALLED_GENOTYPETYPES) {
final String key = genotype.hasExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY) ?
GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY : VCFConstants.GENOTYPE_PL_KEY;
throw new UserException(genotype + " has " + likelihoods.length + " " + key + " values, should be " + NUM_CALLED_GENOTYPETYPES +
" since only the diploid case is supported when applying family priors.");
}
likelihoodsMap.put(GenotypeType.HOM_REF,likelihoods[genotypeTypeToValue(GenotypeType.HOM_REF)]);
likelihoodsMap.put(GenotypeType.HET,likelihoods[genotypeTypeToValue(GenotypeType.HET)]);
likelihoodsMap.put(GenotypeType.HOM_VAR, likelihoods[genotypeTypeToValue(GenotypeType.HOM_VAR)]);
return likelihoodsMap;
}
private int getLikelihoodMatrixIndex(GenotypeType mother, GenotypeType father, GenotypeType child){
int childInd = genotypeTypeToValue(child);
int motherInd;
int fatherInd;
final int INVALID = -1;
motherInd = genotypeTypeToValue(mother);
fatherInd = genotypeTypeToValue(father);
if (childInd == INVALID || motherInd == INVALID || fatherInd == INVALID) //any of the genotypes are NO_CALL, UNAVAILABLE or MIXED
return INVALID;
//index into array playing the part of a 3x3x3 matrix (where 3=NUM_CALLED_GENOTYPETYPES)
return motherInd*NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES + fatherInd*NUM_CALLED_GENOTYPETYPES + childInd;
}
private int genotypeTypeToValue(GenotypeType input){
if (input == GenotypeType.HOM_REF) return 0;
if (input == GenotypeType.HET) return 1;
if (input == GenotypeType.HOM_VAR) return 2;
return -1;
}
//this excludes mixed genotypes, whereas the htsjdk Genotype.isCalled() will return true if the GenotypeType is mixed
private boolean hasCalledGT(GenotypeType genotype){
return genotype == GenotypeType.HOM_REF || genotype == GenotypeType.HET || genotype == GenotypeType.HOM_VAR;
}
}<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/AFCalculationUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import htsjdk.variant.variantcontext.*;
import org.apache.commons.lang.ArrayUtils;
import org.broadinstitute.gatk.tools.walkers.genotyper.AFPriorProvider;
import org.broadinstitute.gatk.tools.walkers.genotyper.UnifiedGenotypingEngine;
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.QualityUtils;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.variant.HomoSapiensConstants;
import org.testng.Assert;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
public class AFCalculationUnitTest extends BaseTest {
static Allele A = Allele.create("A", true);
static Allele C = Allele.create("C");
static Allele G = Allele.create("G");
static int sampleNameCounter = 0;
static Genotype AA1, AB1, BB1, NON_INFORMATIVE1;
static Genotype AA2, AB2, AC2, BB2, BC2, CC2, NON_INFORMATIVE2;
final double[] FLAT_3SAMPLE_PRIORS = MathUtils.normalizeFromLog10(new double[2*3+1], true); // flat priors
final private static boolean INCLUDE_BIALLELIC = true;
final private static boolean INCLUDE_TRIALLELIC = true;
final private static boolean Guillermo_FIXME = false; // TODO -- can only be enabled when GdA fixes bug
final private static boolean DEBUG_ONLY = false;
protected static List<AFCalculator> createAFCalculators(final List<AFCalculatorImplementation> calcs, final int maxAltAlleles, final int ploidy) {
final List<AFCalculator> AFCalculators = new LinkedList<>();
for ( final AFCalculatorImplementation calc : calcs ) {
if (calc.usableForParams(ploidy,maxAltAlleles))
AFCalculators.add(calc.newInstance());
else
throw new IllegalStateException("cannot use " + calc + " calculator instance with combination " + maxAltAlleles + " " + ploidy);
}
return AFCalculators;
}
@BeforeSuite
public void before() {
AA1 = makePL(Arrays.asList(A, A), 0, 20, 20);
AB1 = makePL(Arrays.asList(A, C), 20, 0, 20);
BB1 = makePL(Arrays.asList(C, C), 20, 20, 0);
NON_INFORMATIVE1 = makePL(Arrays.asList(Allele.NO_CALL, Allele.NO_CALL), 0, 0, 0);
AA2 = makePL(Arrays.asList(A, A), 0, 20, 20, 20, 20, 20);
AB2 = makePL(Arrays.asList(A, C), 20, 0, 20, 20, 20, 20);
BB2 = makePL(Arrays.asList(C, C), 20, 20, 0, 20, 20, 20);
AC2 = makePL(Arrays.asList(A, G), 20, 20, 20, 0, 20, 20);
BC2 = makePL(Arrays.asList(C, G), 20, 20, 20, 20, 0, 20);
CC2 = makePL(Arrays.asList(G, G), 20, 20, 20, 20, 20, 0);
NON_INFORMATIVE2 = makePL(Arrays.asList(Allele.NO_CALL, Allele.NO_CALL), 0, 0, 0, 0, 0, 0);
}
protected static Genotype makePL(final List<Allele> expectedGT, int ... pls) {
GenotypeBuilder gb = new GenotypeBuilder("sample" + sampleNameCounter++);
gb.alleles(expectedGT);
gb.PL(pls);
return gb.make();
}
private class GetGLsTest extends TestDataProvider {
GenotypesContext GLs;
int numAltAlleles;
final AFCalculator calc;
final int[] expectedACs;
final double[] priors;
final String priorName;
private GetGLsTest(final AFCalculator calc, int numAltAlleles, List<Genotype> arg, final double[] priors, final String priorName) {
super(GetGLsTest.class);
GLs = GenotypesContext.create(new ArrayList<>(arg));
this.numAltAlleles = numAltAlleles;
this.calc = calc;
this.priors = priors;
this.priorName = priorName;
expectedACs = new int[numAltAlleles+1];
for ( int alleleI = 0; alleleI < expectedACs.length; alleleI++ ) {
expectedACs[alleleI] = 0;
final Allele allele = getAlleles().get(alleleI);
for ( Genotype g : arg ) {
expectedACs[alleleI] += Collections.frequency(g.getAlleles(), allele);
}
}
}
public AFCalculationResult execute() {
return getCalc().getLog10PNonRef(getVC(), HomoSapiensConstants.DEFAULT_PLOIDY, numAltAlleles, getPriors());
}
public AFCalculationResult executeRef() {
final AFCalculator ref = AFCalculatorImplementation.EXACT_REFERENCE.newInstance();
return ref.getLog10PNonRef(getVC(), HomoSapiensConstants.DEFAULT_PLOIDY, numAltAlleles, getPriors());
}
public double[] getPriors() {
return priors;
}
public AFCalculator getCalc() {
return calc;
}
public VariantContext getVC() {
VariantContextBuilder builder = new VariantContextBuilder("test", "1", 1, 1, getAlleles());
builder.genotypes(GLs);
return builder.make();
}
public List<Allele> getAlleles() {
return Arrays.asList(Allele.create("A", true),
Allele.create("C"),
Allele.create("G"),
Allele.create("T")).subList(0, numAltAlleles+1);
}
public int getExpectedAltAC(final int alleleI) {
return expectedACs[alleleI+1];
}
public String toString() {
return String.format("%s model=%s prior=%s input=%s", super.toString(), calc.getClass().getSimpleName(),
priorName, GLs.size() > 5 ? String.format("%d samples", GLs.size()) : GLs);
}
}
private static final int MAX_ALT_ALLELES = 2;
private static final int PLOIDY = 2;
@DataProvider(name = "wellFormedGLs")
public Object[][] createSimpleGLsData() {
final List<Genotype> biAllelicSamples = Arrays.asList(AA1, AB1, BB1);
final List<Genotype> triAllelicSamples = Arrays.asList(AA2, AB2, BB2, AC2, BC2, CC2);
for ( final int nSamples : Arrays.asList(1, 2, 3, 4) ) {
List<AFCalculator> calcs = createAFCalculators(Arrays.asList(AFCalculatorImplementation.values()), MAX_ALT_ALLELES, PLOIDY);
//number of entries in the priors array, one for AC=[0,2*nSamples]
final int nPriorValues = 2*nSamples+1;
//total number of chromosomes in our samples -- here we're assuming diploid
final int totalPloidy = 2*nSamples;
final double theta = 0.001;
final double[] flatPriors = MathUtils.normalizeFromLog10(new double[nPriorValues], true); // flat priors
final AFPriorProvider log10priorProvider = UnifiedGenotypingEngine.composeAlleleFrequencyPriorProvider(totalPloidy, theta, new ArrayList<Double>());
final double[] humanPriors = log10priorProvider.forTotalPloidy(totalPloidy);
for ( final double[] priors : Arrays.asList(flatPriors, humanPriors) ) { // , humanPriors) ) {
for ( AFCalculator model : calcs ) {
final String priorName = priors == humanPriors ? "human" : "flat";
// bi-allelic
if ( INCLUDE_BIALLELIC && nSamples <= biAllelicSamples.size() )
for ( List<Genotype> genotypes : Utils.makePermutations(biAllelicSamples, nSamples, true) )
new GetGLsTest(model, 1, genotypes, priors, priorName);
// tri-allelic
if ( INCLUDE_TRIALLELIC && ( ! priorName.equals("human") || Guillermo_FIXME ) && ! ( model instanceof OriginalDiploidExactAFCalculator) ) // || model != generalCalc ) )
for ( List<Genotype> genotypes : Utils.makePermutations(triAllelicSamples, nSamples, true) )
new GetGLsTest(model, 2, genotypes, priors, priorName);
}
}
}
return GetGLsTest.getTests(GetGLsTest.class);
}
// @DataProvider(name = "badGLs")
// public Object[][] createBadGLs() {
// final List<Genotype> genotypes = Arrays.asList(AB2, BB2, CC2, CC2);
// final int nSamples = genotypes.size();
//
// final AFCalc indCalc = AFCalcFactory.createAFCalc(AFCalcFactory.Calculation.EXACT_INDEPENDENT, nSamples, 4);
//
// final int nPriorValues = 2*nSamples+1;
// final double[] priors = MathUtils.normalizeFromLog10(new double[nPriorValues], true); // flat priors
// for ( AFCalc model : Arrays.asList(indCalc) ) {
// final String priorName = "flat";
// new GetGLsTest(model, 2, genotypes, priors, priorName);
// }
//
// return GetGLsTest.getTests(GetGLsTest.class);
// }
//
// @Test(enabled = true && !DEBUG_ONLY, dataProvider = "badGLs")
// public void testBadGLs(GetGLsTest cfg) {
// testResultSimple(cfg);
// }
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "wellFormedGLs")
public void testBiallelicGLs(GetGLsTest cfg) {
if ( cfg.getAlleles().size() == 2 )
testResultSimple(cfg);
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "wellFormedGLs")
public void testTriallelicGLs(GetGLsTest cfg) {
if ( cfg.getAlleles().size() > 2 )
testResultSimple(cfg);
}
private static class NonInformativeData {
final Genotype nonInformative;
final List<Genotype> called;
final int nAltAlleles;
private NonInformativeData(List<Genotype> called, Genotype nonInformative, int nAltAlleles) {
this.called = called;
this.nonInformative = nonInformative;
this.nAltAlleles = nAltAlleles;
}
}
@DataProvider(name = "GLsWithNonInformative")
public Object[][] makeGLsWithNonInformative() {
List<Object[]> tests = new ArrayList<Object[]>();
final List<NonInformativeData> nonInformativeTests = new LinkedList<NonInformativeData>();
nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB1), NON_INFORMATIVE1, 1));
nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB2), NON_INFORMATIVE2, 2));
nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB2, BC2), NON_INFORMATIVE2, 2));
for ( final int nNonInformative : Arrays.asList(1, 10, 100) ) {
for ( final NonInformativeData testData : nonInformativeTests ) {
final List<Genotype> samples = new ArrayList<Genotype>();
samples.addAll(testData.called);
samples.addAll(Collections.nCopies(nNonInformative, testData.nonInformative));
final int nSamples = samples.size();
List<AFCalculator> calcs = createAFCalculators(Arrays.asList(AFCalculatorImplementation.values()), MAX_ALT_ALLELES, PLOIDY);
final double[] priors = MathUtils.normalizeFromLog10(new double[2*nSamples+1], true); // flat priors
for ( AFCalculator model : calcs ) {
if ( testData.nAltAlleles > 1 && model instanceof OriginalDiploidExactAFCalculator)
continue;
final GetGLsTest onlyInformative = new GetGLsTest(model, testData.nAltAlleles, testData.called, priors, "flat");
for ( int rotation = 0; rotation < nSamples; rotation++ ) {
Collections.rotate(samples, 1);
final GetGLsTest withNonInformative = new GetGLsTest(model, testData.nAltAlleles, samples, priors, "flat");
tests.add(new Object[]{onlyInformative, withNonInformative});
}
}
}
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "GLsWithNonInformative", dependsOnMethods = {"testBiallelicGLs", "testTriallelicGLs"})
public void testGLsWithNonInformative(GetGLsTest onlyInformative, GetGLsTest withNonInformative) {
final AFCalculationResult expected = onlyInformative.execute();
final AFCalculationResult actual = withNonInformative.execute();
testResultSimple(withNonInformative);
compareAFCalcResults(actual, expected, onlyInformative.getCalc(), onlyInformative.numAltAlleles, true);
}
private void testResultSimple(final GetGLsTest cfg) {
final AFCalculationResult refResultTracker = cfg.executeRef();
final AFCalculationResult resultTracker = cfg.execute();
try {
compareAFCalcResults(resultTracker, refResultTracker, cfg.getCalc(), cfg.numAltAlleles, true);
} catch (Throwable t) {
cfg.execute();
throw new RuntimeException(t);
}
Assert.assertNotNull(resultTracker.getAllelesUsedInGenotyping());
Assert.assertTrue(cfg.getAlleles().containsAll(resultTracker.getAllelesUsedInGenotyping()), "Result object has alleles not in our initial allele list");
for ( int altAlleleI = 0; altAlleleI < cfg.numAltAlleles; altAlleleI++ ) {
int expectedAlleleCount = cfg.getExpectedAltAC(altAlleleI);
int calcAC_MLE = resultTracker.getAlleleCountsOfMLE()[altAlleleI];
final Allele allele = cfg.getAlleles().get(altAlleleI+1);
Assert.assertEquals(calcAC_MLE, expectedAlleleCount, "MLE AC not equal to expected AC for allele " + allele);
}
}
private void compareAFCalcResults(final AFCalculationResult actual, final AFCalculationResult expected, final AFCalculator calc, final int maxAltAlleles, final boolean onlyPosteriorsShouldBeEqual) {
// note we cannot really test the multi-allelic case because we actually meaningfully differ among the models here
final double TOLERANCE = maxAltAlleles > 1 ? 1000 : 0.1; // much tighter constraints on bi-allelic results
if ( ! onlyPosteriorsShouldBeEqual ) {
Assert.assertEquals(actual.getLog10PriorOfAFEq0(), expected.getLog10PriorOfAFEq0(), TOLERANCE, "Priors AF == 0");
Assert.assertEquals(actual.getLog10PriorOfAFGT0(), expected.getLog10PriorOfAFGT0(), TOLERANCE, "Priors AF > 0");
Assert.assertEquals(actual.getLog10LikelihoodOfAFEq0(), expected.getLog10LikelihoodOfAFEq0(), TOLERANCE, "Likelihoods AF == 0");
Assert.assertEquals(actual.getLog10LikelihoodOfAFGT0(), expected.getLog10LikelihoodOfAFGT0(), TOLERANCE, "Likelihoods AF > 0");
}
Assert.assertEquals(actual.getLog10PosteriorOfAFEq0(), expected.getLog10PosteriorOfAFEq0(), TOLERANCE, "Posteriors AF == 0");
Assert.assertEquals(actual.getLog10PosteriorOfAFGT0(), expected.getLog10PosteriorOfAFGT0(), TOLERANCE, "Posteriors AF > 0");
Assert.assertTrue(Arrays.equals(actual.getAlleleCountsOfMLE(), expected.getAlleleCountsOfMLE()), "MLE ACs ");
Assert.assertEquals(actual.getAllelesUsedInGenotyping(), expected.getAllelesUsedInGenotyping(), "Alleles used in genotyping");
for ( final Allele a : expected.getAllelesUsedInGenotyping() ) {
if ( ! a.isReference() ) {
Assert.assertEquals(actual.getAlleleCountAtMLE(a), expected.getAlleleCountAtMLE(a), "MLE AC for allele " + a);
// TODO -- enable me when IndependentAllelesDiploidExactAFCalc works properly
// if ( ! ( calc instanceof GeneralPloidyExactAFCalc ) )
// // TODO -- delete when general ploidy works properly with multi-allelics
// Assert.assertEquals(actual.isPolymorphic(a, 0.0), expected.isPolymorphic(a, 0.0), "isPolymorphic with thread 0.0 for allele " + a);
}
}
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "Models")
public void testLargeGLs(final ExactAFCalculator calc) {
final Genotype BB = makePL(Arrays.asList(C, C), 20000000, 20000000, 0);
GetGLsTest cfg = new GetGLsTest(calc, 1, Arrays.asList(BB, BB, BB), FLAT_3SAMPLE_PRIORS, "flat");
final AFCalculationResult resultTracker = cfg.execute();
int calculatedAlleleCount = resultTracker.getAlleleCountsOfMLE()[0];
Assert.assertEquals(calculatedAlleleCount, 6);
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "Models")
public void testMismatchedGLs(final ExactAFCalculator calc) {
final Genotype AB = makePL(Arrays.asList(A, C), 2000, 0, 2000, 2000, 2000, 2000);
final Genotype AC = makePL(Arrays.asList(A, G), 100, 100, 100, 0, 100, 100);
GetGLsTest cfg = new GetGLsTest(calc, 2, Arrays.asList(AB, AC), FLAT_3SAMPLE_PRIORS, "flat");
final AFCalculationResult resultTracker = cfg.execute();
Assert.assertEquals(resultTracker.getAlleleCountsOfMLE()[0], 1);
Assert.assertEquals(resultTracker.getAlleleCountsOfMLE()[1], 1);
}
// --------------------------------------------------------------------------------
//
// Code to test that the pNonRef value is meaningful
//
// --------------------------------------------------------------------------------
private static class PNonRefData {
final Genotype g;
final double pNonRef, tolerance;
final boolean canScale;
final List<AFCalculatorImplementation> badModels;
final VariantContext vc;
private PNonRefData(final VariantContext vc, Genotype g, double pNonRef, double tolerance, final boolean canScale) {
this(vc, g, pNonRef, tolerance, canScale, Collections.<AFCalculatorImplementation>emptyList());
}
private PNonRefData(final VariantContext vc, Genotype g, double pNonRef, double tolerance, final boolean canScale, final List<AFCalculatorImplementation> badModels) {
this.g = g;
this.pNonRef = pNonRef;
this.tolerance = tolerance;
this.canScale = canScale;
this.badModels = badModels;
this.vc = vc;
}
public PNonRefData scale(final int scaleFactor) {
if ( canScale ) {
final int[] PLs = new int[g.getPL().length];
for ( int i = 0; i < PLs.length; i++ ) PLs[i] = g.getPL()[i] * ((int)Math.log10(scaleFactor)+1);
final Genotype scaledG = new GenotypeBuilder(g).PL(PLs).make();
final double scaledPNonRef = pNonRef < 0.5 ? pNonRef / scaleFactor : 1 - ((1-pNonRef) / scaleFactor);
return new PNonRefData(vc, scaledG, scaledPNonRef, tolerance, true);
} else {
return this;
}
}
}
@DataProvider(name = "PNonRef")
public Object[][] makePNonRefTest() {
List<Object[]> tests = new ArrayList<Object[]>();
final List<Allele> AA = Arrays.asList(A, A);
final List<Allele> AC = Arrays.asList(A, C);
final List<Allele> CC = Arrays.asList(C, C);
final List<Allele> AG = Arrays.asList(A, G);
final List<Allele> GG = Arrays.asList(G, G);
final List<Allele> CG = Arrays.asList(C, G);
final VariantContext vc2 = new VariantContextBuilder("x","1", 1, 1, Arrays.asList(A, C)).make();
final VariantContext vc3 = new VariantContextBuilder("x","1", 1, 1, Arrays.asList(A, C, G)).make();
final AFCalculatorTestBuilder.PriorType priorType = AFCalculatorTestBuilder.PriorType.flat;
final double TOLERANCE = 0.5;
final List<PNonRefData> initialPNonRefData = Arrays.asList(
// bi-allelic sites
new PNonRefData(vc2, makePL(AA, 0, 10, 10), 0.1666667, TOLERANCE, true),
new PNonRefData(vc2, makePL(AA, 0, 1, 10), 0.4721084, TOLERANCE, false),
new PNonRefData(vc2, makePL(AA, 0, 1, 1), 0.6136992, TOLERANCE, false),
new PNonRefData(vc2, makePL(AA, 0, 5, 5), 0.3874259, TOLERANCE, false),
new PNonRefData(vc2, makePL(AC, 10, 0, 10), 0.9166667, TOLERANCE, true),
new PNonRefData(vc2, makePL(CC, 10, 10, 0), 0.9166667, TOLERANCE, true),
// tri-allelic sites -- cannot scale because of the naivety of our scaling estimator
new PNonRefData(vc3, makePL(AA, 0, 10, 10, 10, 10, 10), 0.3023255813953489, TOLERANCE * 2, false), // more tolerance because constrained model is a bit inaccurate
new PNonRefData(vc3, makePL(AC, 10, 0, 10, 10, 10, 10), 0.9166667, TOLERANCE, false),
new PNonRefData(vc3, makePL(CC, 10, 10, 0, 10, 10, 10), 0.9166667, TOLERANCE, false),
new PNonRefData(vc3, makePL(AG, 10, 10, 10, 0, 10, 10), 0.9166667, TOLERANCE, false),
new PNonRefData(vc3, makePL(CG, 10, 10, 10, 10, 0, 10), 0.80, TOLERANCE, false),
new PNonRefData(vc3, makePL(GG, 10, 10, 10, 10, 10, 0), 0.9166667, TOLERANCE, false)
);
for ( AFCalculatorImplementation modelType : Arrays.asList(AFCalculatorImplementation.EXACT_REFERENCE, AFCalculatorImplementation.EXACT_INDEPENDENT) ) {
for ( int nNonInformative = 0; nNonInformative < 3; nNonInformative++ ) {
for ( final PNonRefData rootData : initialPNonRefData ) {
for ( int plScale = 1; plScale <= 100000; plScale *= 10 ) {
if ( ! rootData.badModels.contains(modelType) && (plScale == 1 || rootData.canScale) ) {
final PNonRefData data = rootData.scale(plScale);
tests.add(new Object[]{data.vc, modelType, priorType, Arrays.asList(data.g), data.pNonRef, data.tolerance, nNonInformative});
}
}
}
}
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "PNonRef")
private void testPNonRef(final VariantContext vcRoot,
AFCalculatorImplementation modelType,
AFCalculatorTestBuilder.PriorType priorType,
final List<Genotype> genotypes,
final double expectedPNonRef,
final double tolerance,
final int nNonInformative) {
final AFCalculatorTestBuilder testBuilder
= new AFCalculatorTestBuilder(1, vcRoot.getNAlleles()-1, modelType, priorType);
final VariantContextBuilder vcb = new VariantContextBuilder(vcRoot);
vcb.genotypes(genotypes);
final AFCalculationResult resultTracker = testBuilder.makeModel().getLog10PNonRef(vcb.make(), PLOIDY, MAX_ALT_ALLELES, testBuilder.makePriors());
Assert.assertEquals(resultTracker.getLog10PosteriorOfAFGT0(), Math.log10(expectedPNonRef), tolerance,
"Actual pNonRef not within tolerance " + tolerance + " of expected");
}
@DataProvider(name = "PNonRefBiallelicSystematic")
public Object[][] makePNonRefBiallelicSystematic() {
List<Object[]> tests = new ArrayList<Object[]>();
final List<Integer> bigNonRefPLs = Arrays.asList(0, 1, 2, 3, 4, 5, 10, 15, 20, 25, 50, 100, 1000);
final List<List<Integer>> bigDiploidPLs = removeBadPLs(Utils.makePermutations(bigNonRefPLs, 3, true));
for ( AFCalculatorImplementation modelType : AFCalculatorImplementation.values() ) {
if ( false ) { // for testing only
tests.add(new Object[]{modelType, toGenotypes(Arrays.asList(Arrays.asList(0,100,0)))});
} else {
if ( modelType == AFCalculatorImplementation.EXACT_GENERAL_PLOIDY ) continue; // TODO -- GENERAL_PLOIDY DOESN'T WORK
// test all combinations of PLs for 1 sample
for ( final List<List<Integer>> PLsPerSample : Utils.makePermutations(bigDiploidPLs, 1, true) ) {
tests.add(new Object[]{modelType, toGenotypes(PLsPerSample)});
}
final List<List<Integer>> smallDiploidPLs = new LinkedList<List<Integer>>();
for ( final int nonRefPL : Arrays.asList(5, 10, 20, 30) ) {
for ( int i = 0; i < 2; i++ ) {
List<Integer> pls = new ArrayList<Integer>(Collections.nCopies(3, nonRefPL));
pls.set(i, 0);
smallDiploidPLs.add(pls);
}
}
for ( final List<List<Integer>> PLsPerSample : Utils.makePermutations(smallDiploidPLs, 5, false) ) {
tests.add(new Object[]{modelType, toGenotypes(PLsPerSample)});
}
}
}
return tests.toArray(new Object[][]{});
}
final List<List<Integer>> removeBadPLs(List<List<Integer>> listOfPLs) {
List<List<Integer>> clean = new LinkedList<List<Integer>>();
for ( final List<Integer> PLs : listOfPLs ) {
int x = PLs.get(0);
boolean bad = false;
for ( int pl1 : PLs )
if ( pl1 > x )
bad = true;
else
x = pl1;
if ( ! bad ) clean.add(PLs);
}
return clean;
}
private List<Genotype> toGenotypes(final List<List<Integer>> PLsPerSample) {
final List<Allele> nocall = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
final List<Genotype> genotypes = new ArrayList<Genotype>(PLsPerSample.size());
for ( final List<Integer> PLs : PLsPerSample ) {
final int[] pls = ArrayUtils.toPrimitive(PLs.toArray(new Integer[3]));
final int min = MathUtils.arrayMin(pls);
for ( int i = 0; i < pls.length; i++ ) pls[i] -= min;
genotypes.add(makePL(nocall, pls));
}
return genotypes;
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "PNonRefBiallelicSystematic")
private void PNonRefBiallelicSystematic(AFCalculatorImplementation modelType, final List<Genotype> genotypes) {
//logger.warn("Running " + modelType + " with " + genotypes);
final AFCalculatorTestBuilder refBuilder = new AFCalculatorTestBuilder(genotypes.size(), 1, AFCalculatorImplementation.EXACT_REFERENCE, AFCalculatorTestBuilder.PriorType.human);
final AFCalculatorTestBuilder testBuilder = new AFCalculatorTestBuilder(genotypes.size(), 1, modelType, AFCalculatorTestBuilder.PriorType.human);
final VariantContextBuilder vcb = new VariantContextBuilder("x", "1", 1, 1, Arrays.asList(A, C));
vcb.genotypes(genotypes);
final AFCalculationResult refResult = refBuilder.makeModel().getLog10PNonRef(vcb.make(), PLOIDY, MAX_ALT_ALLELES, testBuilder.makePriors());
final AFCalculationResult testResult = testBuilder.makeModel().getLog10PNonRef(vcb.make(), PLOIDY, MAX_ALT_ALLELES, testBuilder.makePriors());
final double tolerance = 1e-3;
Assert.assertEquals(testResult.getLog10PosteriorOfAFGT0(), refResult.getLog10PosteriorOfAFGT0(), tolerance,
"Actual pNonRef not within tolerance " + tolerance + " of expected");
Assert.assertEquals(testResult.getAlleleCountsOfMLE(), refResult.getAlleleCountsOfMLE(),
"Actual MLE " + Utils.join(",", testResult.getAlleleCountsOfMLE()) + " not equal to expected " + Utils.join(",", refResult.getAlleleCountsOfMLE()));
}
// --------------------------------------------------------------------------------
//
// Test priors
//
// --------------------------------------------------------------------------------
@DataProvider(name = "Models")
public Object[][] makeModels() {
List<Object[]> tests = new ArrayList<Object[]>();
for ( final AFCalculatorImplementation calc : AFCalculatorImplementation.values() ) {
if ( calc.usableForParams(2, 4) )
tests.add(new Object[]{AFCalculatorFactory.createCalculatorForDiploidAnalysis()});
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true, dataProvider = "Models")
public void testNoPrior(final AFCalculator model) {
for ( int REF_PL = 10; REF_PL <= 20; REF_PL += 10 ) {
final Genotype AB = makePL(Arrays.asList(A,C), REF_PL, 0, 10000);
final double[] flatPriors = new double[]{0.0,0.0,0.0};
// test that function computeAlleleFrequency correctly operates when the flat prior option is set
// computeAlleleFrequencyPriors takes linear priors
final ArrayList<Double> inputPrior = new ArrayList<Double>();
inputPrior.add(1.0/3);
inputPrior.add(1.0/3);
final AFPriorProvider log10priorProvider = UnifiedGenotypingEngine.composeAlleleFrequencyPriorProvider(2, 0.0, inputPrior);
final double[] noPriors = log10priorProvider.forTotalPloidy(2);
GetGLsTest cfgFlatPrior = new GetGLsTest(model, 1, Arrays.asList(AB), flatPriors, "flatPrior");
GetGLsTest cfgNoPrior = new GetGLsTest(model, 1, Arrays.asList(AB), noPriors, "noPrior");
final AFCalculationResult resultTrackerFlat = cfgFlatPrior.execute();
final AFCalculationResult resultTrackerNoPrior = cfgNoPrior.execute();
final double pRefWithNoPrior = AB.getLikelihoods().getAsVector()[0];
final double pHetWithNoPrior = AB.getLikelihoods().getAsVector()[1] - Math.log10(0.5);
final double nonRefPost = Math.pow(10, pHetWithNoPrior) / (Math.pow(10, pRefWithNoPrior) + Math.pow(10, pHetWithNoPrior));
final double log10NonRefPost = Math.log10(nonRefPost);
if ( ! Double.isInfinite(log10NonRefPost) ) {
// check that the no-prior and flat-prior constructions yield same result
Assert.assertEquals(resultTrackerFlat.getLog10PosteriorOfAFGT0(), resultTrackerNoPrior.getLog10PosteriorOfAFGT0());
}
}
}
@Test(enabled = true && !DEBUG_ONLY, dataProvider = "Models")
public void testBiallelicPriors(final AFCalculator model) {
for ( int REF_PL = 10; REF_PL <= 20; REF_PL += 10 ) {
final Genotype AB = makePL(Arrays.asList(A,C), REF_PL, 0, 10000);
for ( int log10NonRefPrior = 1; log10NonRefPrior < 10*REF_PL; log10NonRefPrior += 1 ) {
final double refPrior = 1 - QualityUtils.qualToErrorProb(log10NonRefPrior);
final double nonRefPrior = (1-refPrior) / 2;
final double[] priors = MathUtils.normalizeFromLog10(MathUtils.toLog10(new double[]{refPrior, nonRefPrior, nonRefPrior}), true);
if ( ! Double.isInfinite(priors[1]) ) {
GetGLsTest cfg = new GetGLsTest(model, 1, Arrays.asList(AB), priors, "pNonRef" + log10NonRefPrior);
final AFCalculationResult resultTracker = cfg.execute();
final int actualAC = resultTracker.getAlleleCountsOfMLE()[0];
final double pRefWithPrior = AB.getLikelihoods().getAsVector()[0] + priors[0];
final double pHetWithPrior = AB.getLikelihoods().getAsVector()[1] + priors[1] - Math.log10(0.5);
final double nonRefPost = Math.pow(10, pHetWithPrior) / (Math.pow(10, pRefWithPrior) + Math.pow(10, pHetWithPrior));
final double log10NonRefPost = Math.log10(nonRefPost);
if ( ! Double.isInfinite(log10NonRefPost) )
Assert.assertEquals(resultTracker.getLog10PosteriorOfAFGT0(), log10NonRefPost, 1e-2);
if ( nonRefPost >= 0.9 )
Assert.assertTrue(resultTracker.isPolymorphic(C, -1));
final int expectedMLEAC = 1; // the MLE is independent of the prior
Assert.assertEquals(actualAC, expectedMLEAC,
"actual AC with priors " + log10NonRefPrior + " not expected "
+ expectedMLEAC + " priors " + Utils.join(",", priors));
}
}
}
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "Models")
// --------------------------------------------------------------------------------
//
// Test that polymorphic sites (bi and tri) are properly called
//
// --------------------------------------------------------------------------------
@DataProvider(name = "polyTestProvider")
public Object[][] makePolyTestProvider() {
List<Object[]> tests = new ArrayList<Object[]>();
// list of all high-quality models in the system
final List<AFCalculatorImplementation> models = Arrays.asList(
AFCalculatorImplementation.DEFAULT,
AFCalculatorImplementation.EXACT_REFERENCE,
AFCalculatorImplementation.EXACT_INDEPENDENT);
// note that we cannot use small PLs here or the thresholds are hard to set
for ( final int nonTypePLs : Arrays.asList(100, 1000) ) {
for ( final AFCalculatorImplementation model : models ) {
for ( final int allele1AC : Arrays.asList(0, 1, 2, 10, 100, 1000, 10000) ) {
for ( final int nSamples : Arrays.asList(1, 10, 100, 1000, 10000) ) {
// for ( final int nonTypePLs : Arrays.asList(10) ) {
// for ( final AFCalcFactory.Calculation model : models ) {
// for ( final int allele1AC : Arrays.asList(100) ) {
// for ( final int nSamples : Arrays.asList(1000) ) {
if ( nSamples < allele1AC ) continue;
final double pPerSample = Math.pow(10, nonTypePLs / -10.0);
final double errorFreq = pPerSample * nSamples;
final boolean poly1 = allele1AC > errorFreq && (nonTypePLs * allele1AC) > 30;
// bi-allelic tests
{
final AFCalculatorTestBuilder testBuilder
= new AFCalculatorTestBuilder(nSamples, 1, model, AFCalculatorTestBuilder.PriorType.human);
final List<Integer> ACs = Arrays.asList(allele1AC);
tests.add(new Object[]{testBuilder, ACs, nonTypePLs, Arrays.asList(poly1)});
}
// multi-allelic tests
for ( final int allele2AC : Arrays.asList(0, 1, 2, 10, 20, 50) ) {
if ( nSamples < allele2AC || allele1AC + allele2AC > nSamples || nSamples > 100 || nSamples == 1)
continue;
final AFCalculatorTestBuilder testBuilder
= new AFCalculatorTestBuilder(nSamples, 2, model, AFCalculatorTestBuilder.PriorType.human);
final List<Integer> ACs = Arrays.asList(allele1AC, allele2AC);
final boolean poly2 = allele2AC > errorFreq && (nonTypePLs * allele2AC) > 90;
tests.add(new Object[]{testBuilder, ACs, nonTypePLs, Arrays.asList(poly1, poly2)});
}
}
}
}
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "polyTestProvider")
public void testCallingGeneral(final AFCalculatorTestBuilder testBuilder, final List<Integer> ACs, final int nonTypePL, final List<Boolean> expectedPoly ) {
testCalling(testBuilder, ACs, nonTypePL, expectedPoly);
}
@DataProvider(name = "polyTestProviderLotsOfAlleles")
public Object[][] makepolyTestProviderLotsOfAlleles() {
List<Object[]> tests = new ArrayList<Object[]>();
// list of all high-quality models in the system
final List<AFCalculatorImplementation> models = Arrays.asList(AFCalculatorImplementation.EXACT_INDEPENDENT);
final List<Integer> alleleCounts = Arrays.asList(0, 1, 2, 3, 4, 5, 10, 20);
final int nonTypePLs = 1000;
final int nAlleles = 4;
for ( final AFCalculatorImplementation model : models ) {
for ( final List<Integer> ACs : Utils.makePermutations(alleleCounts, nAlleles, true) ) {
final List<Boolean> isPoly = new ArrayList<Boolean>(ACs.size());
for ( final int ac : ACs ) isPoly.add(ac > 0);
final double acSum = MathUtils.sum(ACs);
for ( final int nSamples : Arrays.asList(1, 10, 100) ) {
if ( nSamples < acSum ) continue;
final AFCalculatorTestBuilder testBuilder
= new AFCalculatorTestBuilder(nSamples, nAlleles, model, AFCalculatorTestBuilder.PriorType.human);
tests.add(new Object[]{testBuilder, ACs, nonTypePLs, isPoly});
}
}
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true && ! DEBUG_ONLY, dataProvider = "polyTestProviderLotsOfAlleles")
public void testCallingLotsOfAlleles(final AFCalculatorTestBuilder testBuilder, final List<Integer> ACs, final int nonTypePL, final List<Boolean> expectedPoly ) {
testCalling(testBuilder, ACs, nonTypePL, expectedPoly);
}
private void testCalling(final AFCalculatorTestBuilder testBuilder, final List<Integer> ACs, final int nonTypePL, final List<Boolean> expectedPoly) {
final AFCalculator calc = testBuilder.makeModel();
final double[] priors = testBuilder.makePriors();
final VariantContext vc = testBuilder.makeACTest(ACs, 0, nonTypePL);
final AFCalculationResult result = calc.getLog10PNonRef(vc, PLOIDY, testBuilder.numAltAlleles, priors);
boolean anyPoly = false;
for ( final boolean onePoly : expectedPoly ) anyPoly = anyPoly || onePoly;
if ( anyPoly )
Assert.assertTrue(result.getLog10PosteriorOfAFGT0() > -1);
for ( int altI = 1; altI < result.getAllelesUsedInGenotyping().size(); altI++ ) {
final int i = altI - 1;
final Allele alt = result.getAllelesUsedInGenotyping().get(altI);
// must be getCalledChrCount because we cannot ensure that the VC made has our desired ACs
Assert.assertEquals(result.getAlleleCountAtMLE(alt), vc.getCalledChrCount(alt));
Assert.assertEquals(result.isPolymorphic(alt, -1), (boolean)expectedPoly.get(i), "isPolymorphic for allele " + alt + " " + result.getLog10PosteriorOfAFEq0ForAllele(alt));
}
}
}<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs;
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.utils.Utils;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
public class SeqGraphUnitTest extends BaseTest {
private final static boolean DEBUG = false;
private class MergeNodesWithNoVariationTestProvider extends TestDataProvider {
public byte[] sequence;
public int KMER_LENGTH;
public MergeNodesWithNoVariationTestProvider(String seq, int kmer) {
super(MergeNodesWithNoVariationTestProvider.class, String.format("Merge nodes with no variation test. kmer = %d, seq = %s", kmer, seq));
sequence = seq.getBytes();
KMER_LENGTH = kmer;
}
public SeqGraph calcGraph() {
final TestGraph deBruijnGraph = new TestGraph();
final int kmersInSequence = sequence.length - KMER_LENGTH + 1;
for (int i = 0; i < kmersInSequence - 1; i++) {
// get the kmers
final byte[] kmer1 = new byte[KMER_LENGTH];
System.arraycopy(sequence, i, kmer1, 0, KMER_LENGTH);
final byte[] kmer2 = new byte[KMER_LENGTH];
System.arraycopy(sequence, i+1, kmer2, 0, KMER_LENGTH);
deBruijnGraph.addKmersToGraph(kmer1, kmer2, false, 1);
}
final SeqGraph seqGraph = deBruijnGraph.convertToSequenceGraph();
seqGraph.simplifyGraph();
return seqGraph;
}
}
@DataProvider(name = "MergeNodesWithNoVariationTestProvider")
public Object[][] makeMergeNodesWithNoVariationTests() {
new MergeNodesWithNoVariationTestProvider("GGTTAACC", 3);
new MergeNodesWithNoVariationTestProvider("GGTTAACC", 4);
new MergeNodesWithNoVariationTestProvider("GGTTAACC", 5);
new MergeNodesWithNoVariationTestProvider("GGTTAACC", 6);
new MergeNodesWithNoVariationTestProvider("GGTTAACC", 7);
new MergeNodesWithNoVariationTestProvider("GGTTAACCATGCAGACGGGAGGCTGAGCGAGAGTTTT", 6);
new MergeNodesWithNoVariationTestProvider("AATACCATTGGAGTTTTTTTCCAGGTTAAGATGGTGCATTGAATCCACCCATCTACTTTTGCTCCTCCCAAAACTCACTAAAACTATTATAAAGGGATTTTGTTTAAAGACACAAACTCATGAGGACAGAGAGAACAGAGTAGACAATAGTGGGGGAAAAATAAGTTGGAAGATAGAAAACAGATGGGTGAGTGGTAATCGACTCAGCAGCCCCAAGAAAGCTGAAACCCAGGGAAAGTTAAGAGTAGCCCTATTTTCATGGCAAAATCCAAGGGGGGGTGGGGAAAGAAAGAAAAACAGAAAAAAAAATGGGAATTGGCAGTCCTAGATATCTCTGGTACTGGGCAAGCCAAAGAATCAGGATAACTGGGTGAAAGGTGATTGGGAAGCAGTTAAAATCTTAGTTCCCCTCTTCCACTCTCCGAGCAGCAGGTTTCTCTCTCTCATCAGGCAGAGGGCTGGAGAT", 66);
new MergeNodesWithNoVariationTestProvider("AATACCATTGGAGTTTTTTTCCAGGTTAAGATGGTGCATTGAATCCACCCATCTACTTTTGCTCCTCCCAAAACTCACTAAAACTATTATAAAGGGATTTTGTTTAAAGACACAAACTCATGAGGACAGAGAGAACAGAGTAGACAATAGTGGGGGAAAAATAAGTTGGAAGATAGAAAACAGATGGGTGAGTGGTAATCGACTCAGCAGCCCCAAGAAAGCTGAAACCCAGGGAAAGTTAAGAGTAGCCCTATTTTCATGGCAAAATCCAAGGGGGGGTGGGGAAAGAAAGAAAAACAGAAAAAAAAATGGGAATTGGCAGTCCTAGATATCTCTGGTACTGGGCAAGCCAAAGAATCAGGATAACTGGGTGAAAGGTGATTGGGAAGCAGTTAAAATCTTAGTTCCCCTCTTCCACTCTCCGAGCAGCAGGTTTCTCTCTCTCATCAGGCAGAGGGCTGGAGAT", 76);
return MergeNodesWithNoVariationTestProvider.getTests(MergeNodesWithNoVariationTestProvider.class);
}
@Test(dataProvider = "MergeNodesWithNoVariationTestProvider", enabled = !DEBUG)
public void testMergeNodesWithNoVariation(MergeNodesWithNoVariationTestProvider cfg) {
logger.warn(String.format("Test: %s", cfg.toString()));
final SeqGraph actual = cfg.calcGraph();
Assert.assertEquals(actual.vertexSet().size(), 1);
final SeqVertex actualV = actual.vertexSet().iterator().next();
Assert.assertEquals(actualV.getSequence(), cfg.sequence);
}
@DataProvider(name = "IsDiamondData")
public Object[][] makeIsDiamondData() throws Exception {
List<Object[]> tests = new ArrayList<Object[]>();
SeqGraph graph;
SeqVertex pre1, pre2, top, middle1, middle2, middle3, bottom, tail1, tail2;
graph = new SeqGraph(11);
pre1 = new SeqVertex("ACT");
pre2 = new SeqVertex("AGT");
top = new SeqVertex("A");
middle1 = new SeqVertex("CT");
middle2 = new SeqVertex("CG");
middle3 = new SeqVertex("CA");
bottom = new SeqVertex("AA");
tail1 = new SeqVertex("GC");
tail2 = new SeqVertex("GC");
graph.addVertices(pre1, pre2, top, middle1, middle2, middle3, bottom, tail1, tail2);
graph.addEdges(pre1, top, middle1, bottom, tail1);
graph.addEdges(pre2, top, middle2, bottom, tail1);
graph.addEdges(top, middle3, bottom);
graph.addEdges(bottom, tail2);
for ( final SeqVertex no : Arrays.asList(pre1, pre2, middle1, middle2, middle3, bottom, tail1, tail2)) {
tests.add(new Object[]{graph, no, false});
}
tests.add(new Object[]{graph, top, true});
final SeqGraph danglingMiddleGraph = (SeqGraph)graph.clone();
final SeqVertex danglingMiddle = new SeqVertex("A");
danglingMiddleGraph.addVertex(danglingMiddle);
danglingMiddleGraph.addEdge(top, danglingMiddle);
tests.add(new Object[]{danglingMiddleGraph, top, false});
final SeqGraph strangerToBottom = (SeqGraph)graph.clone();
final SeqVertex notAttachedToTop = new SeqVertex("A");
strangerToBottom.addVertex(notAttachedToTop);
strangerToBottom.addEdge(notAttachedToTop, bottom);
tests.add(new Object[]{strangerToBottom, top, false});
final SeqGraph strangerToMiddle = (SeqGraph)graph.clone();
final SeqVertex attachedToMiddle = new SeqVertex("A");
strangerToMiddle.addVertex(attachedToMiddle);
strangerToMiddle.addEdge(attachedToMiddle, middle1);
tests.add(new Object[]{strangerToMiddle, top, false});
// middle1 has outgoing edge to non-bottom
final SeqGraph middleExtraOut = (SeqGraph)graph.clone();
final SeqVertex fromMiddle = new SeqVertex("A");
middleExtraOut.addVertex(fromMiddle);
middleExtraOut.addEdge(middle1, fromMiddle);
tests.add(new Object[]{middleExtraOut, top, false});
// top connects to bottom directly as well
{
final SeqGraph topConnectsToBottomToo = new SeqGraph(11);
final SeqVertex top2 = new SeqVertex("A");
final SeqVertex middle4 = new SeqVertex("C");
final SeqVertex bottom2 = new SeqVertex("G");
topConnectsToBottomToo.addVertices(top2, middle4, bottom2);
topConnectsToBottomToo.addEdges(top2, middle4, bottom2);
topConnectsToBottomToo.addEdges(top2, bottom2);
tests.add(new Object[]{topConnectsToBottomToo, top2, false});
}
return tests.toArray(new Object[][]{});
}
@Test(dataProvider = "IsDiamondData", enabled = !DEBUG)
public void testIsDiamond(final SeqGraph graph, final SeqVertex v, final boolean isRootOfDiamond) {
final SeqGraph.MergeDiamonds merger = graph.new MergeDiamonds();
merger.setDontModifyGraphEvenIfPossible();
Assert.assertEquals(merger.tryToTransform(v), isRootOfDiamond);
}
@DataProvider(name = "MergingData")
public Object[][] makeMergingData() throws Exception {
List<Object[]> tests = new ArrayList<Object[]>();
final SeqGraph graph = new SeqGraph(11);
SeqVertex pre1 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "CT");
SeqVertex pre2 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "GT");
SeqVertex top = new SeqVertex("A");
SeqVertex middle1 = new SeqVertex("GC");
SeqVertex middle2 = new SeqVertex("TC");
SeqVertex middle3 = new SeqVertex("AC");
SeqVertex middle4 = new SeqVertex("GCAC");
SeqVertex bottom = new SeqVertex("AA");
SeqVertex tail1 = new SeqVertex("GC");
SeqVertex tail2 = new SeqVertex("GC");
// just a single vertex
graph.addVertices(pre1);
tests.add(new Object[]{graph.clone(), graph.clone()});
// pre1 -> top = pre1 + top
{
graph.addVertices(top);
graph.addEdges(pre1, top);
final SeqVertex pre1_top = new SeqVertex(pre1.getSequenceString() + top.getSequenceString());
final SeqGraph expected = new SeqGraph(11);
expected.addVertex(pre1_top);
tests.add(new Object[]{graph.clone(), expected.clone()});
}
// pre1 -> top -> middle1 = pre1 + top + middle1
{
graph.addVertices(middle1);
graph.addEdges(top, middle1);
final SeqGraph expected = new SeqGraph(11);
final SeqVertex pre1_top_middle1 = new SeqVertex(pre1.getSequenceString() + top.getSequenceString() + middle1.getSequenceString());
expected.addVertex(pre1_top_middle1);
tests.add(new Object[]{graph.clone(), expected});
}
// pre1 -> top -> middle1 & top -> middle2 = pre1 + top -> middle1 & -> middle2
{
graph.addVertices(middle2);
graph.addEdges(top, middle2);
final SeqGraph expected = new SeqGraph(11);
final SeqVertex pre1_top = new SeqVertex(pre1.getSequenceString() + top.getSequenceString());
expected.addVertices(pre1_top, middle1, middle2);
expected.addEdges(pre1_top, middle1);
expected.addEdges(pre1_top, middle2);
tests.add(new Object[]{graph.clone(), expected});
}
// An actual diamond event to merge!
{
graph.addVertices(bottom);
graph.addEdges(middle1, bottom);
graph.addEdges(middle2, bottom);
final SeqGraph expected = new SeqGraph(11);
final SeqVertex pre1_top = new SeqVertex(pre1.getSequenceString() + top.getSequenceString());
final SeqVertex newMiddle1 = new SeqVertex("G");
final SeqVertex newMiddle2 = new SeqVertex("T");
final SeqVertex newBottom = new SeqVertex("C" + bottom.getSequenceString());
expected.addVertices(pre1_top, newMiddle1, newMiddle2, newBottom);
expected.addEdges(pre1_top, newMiddle1, newBottom);
expected.addEdges(pre1_top, newMiddle2, newBottom);
tests.add(new Object[]{graph.clone(), expected.clone()});
graph.addVertices(middle3);
graph.addEdges(top, middle3, bottom);
final SeqVertex newMiddle3 = new SeqVertex("A");
expected.addVertices(newMiddle3);
expected.addEdges(pre1_top, newMiddle3, newBottom);
tests.add(new Object[]{graph.clone(), expected.clone()});
graph.addVertices(middle4);
graph.addEdges(top, middle4, bottom);
final SeqVertex newMiddle4 = new SeqVertex("GCA");
expected.addVertices(newMiddle4);
expected.addEdges(pre1_top, newMiddle4, newBottom);
tests.add(new Object[]{graph.clone(), expected.clone()});
}
{ // all the nodes -> lots of merging and motion of nodes
final SeqGraph all = new SeqGraph(11);
all.addVertices(pre1, pre2, top, middle1, middle2, bottom, tail1, tail2);
all.addEdges(pre1, top, middle1, bottom, tail1);
all.addEdges(pre2, top, middle2, bottom, tail2);
final SeqGraph expected = new SeqGraph(11);
SeqVertex newPre1 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "C");
SeqVertex newPre2 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "G");
final SeqVertex newTop = new SeqVertex("TA");
final SeqVertex newMiddle1 = new SeqVertex("G");
final SeqVertex newMiddle2 = new SeqVertex("T");
final SeqVertex newBottom = new SeqVertex("C" + bottom.getSequenceString());
expected.addVertices(newPre1, newPre2, newTop, newMiddle1, newMiddle2, newBottom, tail1, tail2);
expected.addEdges(newPre1, newTop, newMiddle1, newBottom, tail1);
expected.addEdges(newPre2, newTop, newMiddle2, newBottom, tail2);
tests.add(new Object[]{all.clone(), expected.clone()});
}
// test the case where we delete a middle node away because the common sequence is all of its sequence
{
final SeqGraph graph2 = new SeqGraph(11);
final SeqVertex mytop = new SeqVertex("A");
final SeqVertex mid1 = new SeqVertex("AC");
final SeqVertex mid2 = new SeqVertex("C");
final SeqVertex bot = new SeqVertex("G");
graph2.addVertices(mytop, mid1, mid2, bot);
graph2.addEdges(mytop, mid1, bot);
graph2.addEdges(mytop, mid2, bot);
final SeqGraph expected = new SeqGraph(11);
final SeqVertex newMid1 = new SeqVertex("A");
final SeqVertex newBottom = new SeqVertex("CG");
expected.addVertices(mytop, newMid1, newBottom);
expected.addEdges(mytop, newMid1, newBottom);
expected.addEdges(mytop, newBottom);
tests.add(new Object[]{graph2, expected});
}
return tests.toArray(new Object[][]{});
}
@Test(dataProvider = "MergingData", enabled = !DEBUG)
public void testMerging(final SeqGraph graph, final SeqGraph expected) {
final SeqGraph merged = (SeqGraph)graph.clone();
merged.simplifyGraph(1);
try {
Assert.assertTrue(SeqGraph.graphEquals(merged, expected));
} catch (AssertionError e) {
// if ( ! SeqGraph.graphEquals(merged, expected) ) {
// graph.printGraph(new File("graph.dot"), 0);
// merged.printGraph(new File("merged.dot"), 0);
// expected.printGraph(new File("expected.dot"), 0);
// }
throw e;
}
}
// A -> ACT -> C [non-ref]
// A -> ACT -> C [non-ref]
// A -> ACT -> C [ref]
//
// Should become A -> ACT -> C [ref and non-ref edges]
//
@Test(enabled = !DEBUG)
public void testBubbleSameBasesWithRef() {
final SeqGraph graph = new SeqGraph(11);
final SeqVertex top = new SeqVertex("A");
final SeqVertex mid1 = new SeqVertex("ACT");
final SeqVertex mid2 = new SeqVertex("ACT");
final SeqVertex bot = new SeqVertex("C");
graph.addVertices(top, mid1, mid2, bot);
graph.addEdges(top, mid2, bot);
graph.addEdge(top, mid1, new BaseEdge(true, 1));
graph.addEdge(mid1, bot, new BaseEdge(true, 1));
final SeqGraph expected = new SeqGraph(11);
expected.addVertex(new SeqVertex("AACTC"));
final SeqGraph actual = ((SeqGraph)graph.clone());
actual.simplifyGraph();
Assert.assertTrue(BaseGraph.graphEquals(actual, expected), "Wrong merging result after complete merging");
}
@DataProvider(name = "LinearZipData")
public Object[][] makeLinearZipData() throws Exception {
List<Object[]> tests = new ArrayList<Object[]>();
SeqGraph graph = new SeqGraph(11);
SeqGraph expected = new SeqGraph(11);
// empty graph => empty graph
tests.add(new Object[]{graph.clone(), expected.clone()});
SeqVertex a1 = new SeqVertex("A");
SeqVertex c1 = new SeqVertex("C");
SeqVertex ac1 = new SeqVertex("AC");
// just a single vertex
graph.addVertices(a1, c1);
expected.addVertices(a1, c1);
tests.add(new Object[]{graph.clone(), expected.clone()});
graph.addEdges(a1, c1);
expected = new SeqGraph(11);
expected.addVertices(ac1);
tests.add(new Object[]{graph.clone(), expected.clone()});
// three long chain merged corrected
SeqVertex g1 = new SeqVertex("G");
graph.addVertices(g1);
graph.addEdges(c1, g1);
expected = new SeqGraph(11);
expected.addVertex(new SeqVertex("ACG"));
tests.add(new Object[]{graph.clone(), expected.clone()});
// adding something that isn't connected isn't a problem
SeqVertex t1 = new SeqVertex("T");
graph.addVertices(t1);
expected = new SeqGraph(11);
expected.addVertices(new SeqVertex("ACG"), new SeqVertex("T"));
tests.add(new Object[]{graph.clone(), expected.clone()});
// splitting chain with branch produces the correct zipped subgraphs
final SeqVertex a2 = new SeqVertex("A");
final SeqVertex c2 = new SeqVertex("C");
graph = new SeqGraph(11);
graph.addVertices(a1, c1, g1, t1, a2, c2);
graph.addEdges(a1, c1, g1, t1, a2);
graph.addEdges(g1, c2);
expected = new SeqGraph(11);
SeqVertex acg = new SeqVertex("ACG");
SeqVertex ta = new SeqVertex("TA");
expected.addVertices(acg, ta, c2);
expected.addEdges(acg, ta);
expected.addEdges(acg, c2);
tests.add(new Object[]{graph.clone(), expected.clone()});
// Can merge chains with loops in them
{
graph = new SeqGraph(11);
graph.addVertices(a1, c1, g1);
graph.addEdges(a1, c1, g1);
graph.addEdges(a1, a1);
expected = new SeqGraph(11);
SeqVertex ac = new SeqVertex("AC");
SeqVertex cg = new SeqVertex("CG");
expected.addVertices(a1, cg);
expected.addEdges(a1, cg);
expected.addEdges(a1, a1);
tests.add(new Object[]{graph.clone(), expected.clone()});
graph.removeEdge(a1, a1);
graph.addEdges(c1, c1);
tests.add(new Object[]{graph.clone(), graph.clone()});
graph.removeEdge(c1, c1);
graph.addEdges(g1, g1);
expected = new SeqGraph(11);
expected.addVertices(ac, g1);
expected.addEdges(ac, g1, g1);
tests.add(new Object[]{graph.clone(), expected.clone()});
}
// check building n element long chains
{
final List<String> bases = Arrays.asList("A", "C", "G", "T", "TT", "GG", "CC", "AA");
for ( final int len : Arrays.asList(1, 2, 10, 100, 1000)) {
graph = new SeqGraph(11);
expected = new SeqGraph(11);
SeqVertex last = null;
String expectedBases = "";
for ( int i = 0; i < len; i++ ) {
final String seq = bases.get(i % bases.size());
expectedBases += seq;
SeqVertex a = new SeqVertex(seq);
graph.addVertex(a);
if ( last != null ) graph.addEdge(last, a);
last = a;
}
expected.addVertex(new SeqVertex(expectedBases));
tests.add(new Object[]{graph.clone(), expected.clone()});
}
}
// check that edge connections are properly maintained
{
int edgeWeight = 1;
for ( final int nIncoming : Arrays.asList(0, 2, 5, 10) ) {
for ( final int nOutgoing : Arrays.asList(0, 2, 5, 10) ) {
graph = new SeqGraph(11);
expected = new SeqGraph(11);
graph.addVertices(a1, c1, g1);
graph.addEdges(a1, c1, g1);
expected.addVertex(acg);
for ( final SeqVertex v : makeVertices(nIncoming) ) {
final BaseEdge e = new BaseEdge(false, edgeWeight++);
graph.addVertices(v);
graph.addEdge(v, a1, e);
expected.addVertex(v);
expected.addEdge(v, acg, e);
}
for ( final SeqVertex v : makeVertices(nOutgoing) ) {
final BaseEdge e = new BaseEdge(false, edgeWeight++);
graph.addVertices(v);
graph.addEdge(g1, v, e);
expected.addVertex(v);
expected.addEdge(acg, v, e);
}
tests.add(new Object[]{graph, expected});
}
}
}
return tests.toArray(new Object[][]{});
}
private List<SeqVertex> makeVertices(final int n) {
final List<SeqVertex> vs = new LinkedList<SeqVertex>();
final List<String> bases = Arrays.asList("A", "C", "G", "T", "TT", "GG", "CC", "AA");
for ( int i = 0; i < n; i++ )
vs.add(new SeqVertex(bases.get(i % bases.size())));
return vs;
}
@Test(dataProvider = "LinearZipData", enabled = true)
public void testLinearZip(final SeqGraph graph, final SeqGraph expected) {
final SeqGraph merged = (SeqGraph)graph.clone();
merged.zipLinearChains();
try {
Assert.assertTrue(SeqGraph.graphEquals(merged, expected));
} catch (AssertionError e) {
if ( ! SeqGraph.graphEquals(merged, expected) ) {
graph.printGraph(new File("graph.dot"), 0);
merged.printGraph(new File("merged.dot"), 0);
expected.printGraph(new File("expected.dot"), 0);
}
throw e;
}
}
@Test(timeOut = 10000)
public void testInfiniteCycleFromEmpiricalRuns() {
final SeqVertex v1 = new SeqVertex("CCCT");
final SeqVertex v2 = new SeqVertex("CATCCTCCCTTCTAGACTTCTCCTCCTCCTCCACCATCCTCCCCTCTAGACTTCTCCTCCTCCTCCACCATCCTCCCCTCTAGACTTCTCCTCCTCCTCC");
final SeqVertex v3 = new SeqVertex("CTAGACTTCTCCTCCTCCTCC");
final SeqVertex v4 = new SeqVertex("ACCATC");
final SeqVertex v5 = new SeqVertex("CCTCCACCATCCTCCCCTCTAGGCTTCTCCTCCTCCTCCACCATCCTCCCCTCTAGACTTCTCCTCCTCCTCCACCATCCTCCCCTCTAGACTTCTCCTCCTCCTCCACCATC");
final SeqVertex v6 = new SeqVertex("CTCCCCT");
final SeqGraph graph = new SeqGraph(11);
graph.addVertices(v1, v2, v3, v4, v5, v6);
graph.addEdges(v1, v3, v4, v6, v3);
graph.addEdges(v2, v4);
graph.addEdges(v5, v6);
graph.simplifyGraph();
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/AFCalculatorImplementation.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.util.HashMap;
import java.util.Map;
/**
* Enumeration of usable AF calculation, their constraints (i.e. ploidy).
*
* Note that the order these occur in the enum is the order of preference, so
* the first value is taken over the second when multiple calculations satisfy
* the needs of the request (i.e., considering ploidy).
*/
public enum AFCalculatorImplementation {
/** Fast implementation for multi-allelics (equivalent to {@link #EXACT_REFERENCE} for biallelics sites */
EXACT_INDEPENDENT(IndependentAllelesDiploidExactAFCalculator.class, 2),
/** reference implementation of multi-allelic EXACT model. Extremely slow for many alternate alleles */
EXACT_REFERENCE(ReferenceDiploidExactAFCalculator.class, 2),
/** original biallelic exact model, for testing only */
EXACT_ORIGINAL(OriginalDiploidExactAFCalculator.class, 2, 2),
/** implementation that supports any sample ploidy. Currently not available for the HaplotypeCaller */
EXACT_GENERAL_PLOIDY(GeneralPloidyExactAFCalculator.class),
/**
* Implementation that implements the {@link #EXACT_INDEPENDENT} for any ploidy.
*/
EXACT_GENERAL_INDEPENDENT(IndependentAllelesExactAFCalculator.class);
/**
* Special max alt allele count indicating that this maximum is in fact unbound (can be anything).
*/
public final static int UNBOUND_ALTERNATIVE_ALLELE_COUNT = -1;
/**
* Special ploidy constant that indicates that in fact the ploidy is unbound (can be anything).
*/
public final static int UNBOUND_PLOIDY = -1;
private static Map<Class<? extends AFCalculator>,AFCalculatorImplementation> calculatorClassToValue = buildCalculatorClassToValueMap();
/**
* Reference to the calculator class.
*/
public final Class<? extends AFCalculator> calculatorClass;
/**
* Maximum number of supported alternative alleles.
*/
public final int maxAltAlleles;
/**
* Reference to the constructor to instantiate a calculator for this implementation.
*/
protected final Constructor<? extends AFCalculator> constructor;
/**
* Supported ploidy.
*
* This is equal to {@link #UNBOUND_PLOIDY} if the class can handle any ploidy.
*/
public final int requiredPloidy;
/**
* Reference to the default implementation.
*/
public final static AFCalculatorImplementation DEFAULT = EXACT_INDEPENDENT;
/**
* Constructs a new instance given all its properties
* @param clazz the calculator class that realizes this implementation.
* @param requiredPloidy the required ploidy; zero or greater or {@link #UNBOUND_PLOIDY} to indicate that any ploidy is supported.
* @param maxAltAlleles the maximum alternative alleles; zero or greater or {@link #UNBOUND_ALTERNATIVE_ALLELE_COUNT} to indicate that any maximum number of alternative alleles is supported.
*/
AFCalculatorImplementation(final Class<? extends AFCalculator> clazz, final int requiredPloidy, final int maxAltAlleles) {
calculatorClass = clazz;
this.requiredPloidy = requiredPloidy;
this.maxAltAlleles = maxAltAlleles;
this.constructor = findInstantiationConstructor(calculatorClass);
}
/**
* Constructs a new instance leaving ploidy and max-allele count unbound.
* @param clazz the calculator class that realizes this implementation.
*/
AFCalculatorImplementation(final Class<? extends AFCalculator> clazz) {
this(clazz,UNBOUND_PLOIDY, UNBOUND_ALTERNATIVE_ALLELE_COUNT);
}
/** Constructs a new instance leaving max-allele count unbound.
* @param clazz the calculator class that realizes this implementation.
* @param requiredPloidy the required ploidy; zero or greater or {@link #UNBOUND_PLOIDY} to indicate that any ploidy is supported.
*/
AFCalculatorImplementation(final Class<? extends AFCalculator> clazz, final int requiredPloidy) {
this(clazz,requiredPloidy,UNBOUND_PLOIDY);
}
/**
* Checks whether a given ploidy and max alternative alleles combination is supported or not.
* @param requestedPloidy the targeted ploidy.
* @param requestedMaxAltAlleles the targeted max alternative alleles.
* @return {@code true} iff this calculator implementation satisfies both requirements.
*/
public boolean usableForParams(final int requestedPloidy, final int requestedMaxAltAlleles) {
return (requiredPloidy == UNBOUND_PLOIDY || requiredPloidy == requestedPloidy)
&& (maxAltAlleles == UNBOUND_ALTERNATIVE_ALLELE_COUNT || maxAltAlleles >= requestedMaxAltAlleles);
}
/**
* Resolve the constructor to use to instantiate calculators.
*
* @param clazz target class. Assume not to be {@code null}.
*/
private Constructor<? extends AFCalculator> findInstantiationConstructor(final Class<? extends AFCalculator> clazz) {
if (Modifier.isAbstract(clazz.getModifiers()))
throw new IllegalStateException("AF calculator implementation class cannot be abstract");
final Constructor<? extends AFCalculator> result;
try {
result = clazz.getDeclaredConstructor();
} catch (final NoSuchMethodException e) {
throw new IllegalStateException("cannot find a suitable (int,int) constructor for the AFCalculator implementation " + this + " class " + clazz.getName());
}
// Check whether there will be issue calling the constructor just due to protections:
if (Modifier.isPrivate(result.getModifiers()) || (!Modifier.isPublic(result.getModifiers()) && !clazz.getPackage().equals(getClass().getPackage())))
throw new IllegalStateException("triple int constructor for AFCalculator implementation " + this + " class " + clazz.getName() + " is not public ");
return result;
}
/**
* Creates new instance.
*
* @throws IllegalStateException if the instance could not be create due to some exception. The {@link Exception#getCause() cause} will hold a reference to the actual exception.
* @return never {@code null}.
*/
public AFCalculator newInstance() {
try {
return constructor.newInstance();
} catch (final Throwable e) {
throw new IllegalStateException("could not instantiate AFCalculator for implementation " + this + " class " + calculatorClass.getName());
}
}
/**
* Returns the best (fastest) model give the required ploidy and alternative allele count.
*
* @param requiredPloidy required ploidy
* @param requiredAlternativeAlleleCount required alternative allele count.
* @param preferred a preferred mode if any. A {@code null} indicate that we should be try to use the default instead.
* @return never {@code null}
*/
public static AFCalculatorImplementation bestValue(final int requiredPloidy, final int requiredAlternativeAlleleCount, final AFCalculatorImplementation preferred) {
final AFCalculatorImplementation preferredValue = preferred == null ? DEFAULT : preferred;
if (preferredValue.usableForParams(requiredPloidy,requiredAlternativeAlleleCount))
return preferredValue;
else if (EXACT_INDEPENDENT.usableForParams(requiredPloidy,requiredAlternativeAlleleCount))
return EXACT_INDEPENDENT;
else if (EXACT_REFERENCE.usableForParams(requiredPloidy,requiredAlternativeAlleleCount))
return EXACT_REFERENCE;
else if (EXACT_GENERAL_INDEPENDENT.usableForParams(requiredPloidy,requiredAlternativeAlleleCount))
return EXACT_GENERAL_INDEPENDENT;
else
return EXACT_GENERAL_PLOIDY;
}
/**
* Returns the value that corresponds to a given implementation calculator class.
*
* @param clazz the target class.
*
* @throws IllegalArgumentException if {@code clazz} is {@code null} or if it is abstract.
* @throws IllegalStateException if
*
* @return never {@code null}.
*/
public static AFCalculatorImplementation fromCalculatorClass(final Class<? extends AFCalculator> clazz) {
if (clazz == null)
throw new IllegalArgumentException("input class cannot be null");
final AFCalculatorImplementation result = calculatorClassToValue.get(clazz);
if (result == null)
throw new IllegalStateException("Attempt to retrieve AFCalculatorImplementation instance from a non-registered calculator class " + clazz.getName());
return result;
}
// Initializes the content of the class to value map.
private static Map<Class<? extends AFCalculator>, AFCalculatorImplementation> buildCalculatorClassToValueMap() {
final Map<Class<? extends AFCalculator>,AFCalculatorImplementation> result = new HashMap<>(values().length);
for (final AFCalculatorImplementation value : values())
if (result.put(value.calculatorClass,value) != null)
throw new IllegalStateException("more than one value associated with class " + value.calculatorClass.getName());
return result;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/HaplotypeScore.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.annotator;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.tools.walkers.genotyper.UnifiedGenotyper;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.AlignmentContextUtils;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.ActiveRegionBasedAnnotation;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.StandardAnnotation;
import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.gatk.utils.BaseUtils;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.QualityUtils;
import htsjdk.variant.vcf.VCFInfoHeaderLine;
import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
import org.broadinstitute.gatk.utils.pileup.PileupElement;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.VariantContext;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
import java.io.Serializable;
import java.util.*;
/**
* Consistency of the site with strictly two segregating haplotypes
*
* <p>For diploid organisms, barring chromosomal abnormalities, we expect that any given sample has no more than 2 segregating haplotypes at a given site. If there is evidence for more
* than 2 segregating haplotypes, the read data should be considered suspect and the evidence artifactual. Higher scores are indicative of regions with bad alignments, typically leading to artifactual SNP and indel calls.</p>
*
* <h3>Caveats</h3>
* <p>HaplotypeCaller does not output this annotation because it already evaluates haplotype segregation internally. This annotation is only informative (and available) for variants called by Unified Genotyper.</p>
*/
public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnotation, ActiveRegionBasedAnnotation {
private final static Logger logger = Logger.getLogger(HaplotypeScore.class);
private boolean walkerIdentityCheckWarningLogged = false;
private final static boolean DEBUG = false;
private final static int MIN_CONTEXT_WING_SIZE = 10;
private final static int MAX_CONSENSUS_HAPLOTYPES_TO_CONSIDER = 50;
private final static char REGEXP_WILDCARD = '.';
@Override
public Map<String, Object> annotate(final RefMetaDataTracker tracker,
final AnnotatorCompatible walker,
final ReferenceContext ref,
final Map<String, AlignmentContext> stratifiedContexts,
final VariantContext vc,
final Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap) {
// Can only call from UnifiedGenotyper
if ( !(walker instanceof UnifiedGenotyper) ) {
synchronized (this) {
if (!walkerIdentityCheckWarningLogged) {
if (walker != null)
logger.warn("Annotation will not be calculated, must be called from UnifiedGenotyper, not " + walker.getClass().getSimpleName());
else
logger.warn("Annotation will not be calculated, must be called from UnifiedGenotyper");
walkerIdentityCheckWarningLogged = true;
}
}
return null;
}
if (vc.isSNP() && stratifiedContexts != null)
return annotatePileup(ref, stratifiedContexts, vc);
else
return null;
}
private Map<String, Object> annotatePileup(final ReferenceContext ref,
final Map<String, AlignmentContext> stratifiedContexts,
final VariantContext vc) {
if (stratifiedContexts.isEmpty()) // empty means that call was made by someone else and we have no data here
return null;
final AlignmentContext context = AlignmentContextUtils.joinContexts(stratifiedContexts.values());
final int contextWingSize = Math.min((ref.getWindow().size() - 1) / 2, MIN_CONTEXT_WING_SIZE);
final int contextSize = contextWingSize * 2 + 1;
final int locus = ref.getLocus().getStart() + (ref.getLocus().getStop() - ref.getLocus().getStart()) / 2;
final ReadBackedPileup pileup = context.getBasePileup();
// Compute all haplotypes consistent with the current read pileup
final List<Haplotype> haplotypes = computeHaplotypes(pileup, contextSize, locus, vc);
final MathUtils.RunningAverage scoreRA = new MathUtils.RunningAverage();
if (haplotypes != null) {
for (final Genotype genotype : vc.getGenotypes()) {
final AlignmentContext thisContext = stratifiedContexts.get(genotype.getSampleName());
if (thisContext != null) {
final ReadBackedPileup thisPileup = thisContext.getBasePileup();
scoreRA.add(scoreReadsAgainstHaplotypes(haplotypes, thisPileup, contextSize, locus)); // Taking the simple average of all sample's score since the score can be negative and the RMS doesn't make sense
}
}
}
// annotate the score in the info field
final Map<String, Object> map = new HashMap<>();
map.put(getKeyNames().get(0), String.format("%.4f", scoreRA.mean()));
return map;
}
private static class HaplotypeComparator implements Comparator<Haplotype>, Serializable {
public int compare(Haplotype a, Haplotype b) {
if (a.getQualitySum() < b.getQualitySum())
return 1;
if (a.getQualitySum() > b.getQualitySum()) {
return -1;
}
return 0;
}
}
private List<Haplotype> computeHaplotypes(final ReadBackedPileup pileup, final int contextSize, final int locus, final VariantContext vc) {
// Compute all possible haplotypes consistent with current pileup
int haplotypesToCompute = vc.getAlternateAlleles().size() + 1;
final PriorityQueue<Haplotype> candidateHaplotypeQueue = new PriorityQueue<>(100, new HaplotypeComparator());
final PriorityQueue<Haplotype> consensusHaplotypeQueue = new PriorityQueue<>(MAX_CONSENSUS_HAPLOTYPES_TO_CONSIDER, new HaplotypeComparator());
for (final PileupElement p : pileup) {
final Haplotype haplotypeFromRead = getHaplotypeFromRead(p, contextSize, locus);
if ( haplotypeFromRead != null )
candidateHaplotypeQueue.add(haplotypeFromRead);
}
// Now that priority queue has been built with all reads at context, we need to merge and find possible segregating haplotypes
Haplotype elem;
while ((elem = candidateHaplotypeQueue.poll()) != null) {
boolean foundHaplotypeMatch = false;
Haplotype lastCheckedHaplotype = null;
for (final Haplotype haplotypeFromList : consensusHaplotypeQueue) {
final Haplotype consensusHaplotype = getConsensusHaplotype(elem, haplotypeFromList);
if (consensusHaplotype != null) {
foundHaplotypeMatch = true;
if (consensusHaplotype.getQualitySum() > haplotypeFromList.getQualitySum()) {
consensusHaplotypeQueue.remove(haplotypeFromList);
consensusHaplotypeQueue.add(consensusHaplotype);
}
break;
} else {
lastCheckedHaplotype = haplotypeFromList;
}
}
if (!foundHaplotypeMatch && consensusHaplotypeQueue.size() < MAX_CONSENSUS_HAPLOTYPES_TO_CONSIDER) {
consensusHaplotypeQueue.add(elem);
} else if (!foundHaplotypeMatch && lastCheckedHaplotype != null && elem.getQualitySum() > lastCheckedHaplotype.getQualitySum()) {
consensusHaplotypeQueue.remove(lastCheckedHaplotype);
consensusHaplotypeQueue.add(elem);
}
}
// Now retrieve the N most popular haplotypes
if (consensusHaplotypeQueue.size() > 0) {
// The consensus haplotypes are in a quality-ordered priority queue, so the best haplotypes are just the ones at the front of the queue
final Haplotype haplotype1 = consensusHaplotypeQueue.poll();
List<Haplotype> hlist = new ArrayList<>();
hlist.add(new Haplotype(haplotype1.getBases(), 60));
for (int k = 1; k < haplotypesToCompute; k++) {
Haplotype haplotype2 = consensusHaplotypeQueue.poll();
if (haplotype2 == null) {
haplotype2 = haplotype1;
} // Sometimes only the reference haplotype can be found
hlist.add(new Haplotype(haplotype2.getBases(), 20));
}
return hlist;
} else
return null;
}
/**
* Return a haplotype object constructed from the read or null if read's cigar is null
*
* @param p pileup element representing the read
* @param contextSize the context size to use
* @param locus the position
* @return possibly null Haplotype object constructed from the read
*/
private Haplotype getHaplotypeFromRead(final PileupElement p, final int contextSize, final int locus) {
final GATKSAMRecord read = p.getRead();
if ( read.getCigar() == null )
return null;
final byte[] haplotypeBases = new byte[contextSize];
Arrays.fill(haplotypeBases, (byte) REGEXP_WILDCARD);
final byte[] baseQualities = new byte[contextSize];
Arrays.fill(baseQualities, (byte)0);
byte[] readBases = read.getReadBases();
readBases = AlignmentUtils.readToAlignmentByteArray(read.getCigar(), readBases); // Adjust the read bases based on the Cigar string
byte[] readQuals = read.getBaseQualities();
readQuals = AlignmentUtils.readToAlignmentByteArray(read.getCigar(), readQuals); // Shift the location of the qual scores based on the Cigar string
final int readOffsetFromPileup = AlignmentUtils.calcAlignmentByteArrayOffset(read.getCigar(), p, read.getAlignmentStart(), locus);
final int baseOffsetStart = readOffsetFromPileup - (contextSize - 1) / 2;
for (int i = 0; i < contextSize; i++) {
final int baseOffset = i + baseOffsetStart;
if (baseOffset < 0) {
continue;
}
if (baseOffset >= readBases.length) {
break;
}
if (readQuals[baseOffset] == PileupElement.DELETION_BASE) {
readQuals[baseOffset] = PileupElement.DELETION_QUAL;
}
if (!BaseUtils.isRegularBase(readBases[baseOffset])) {
readBases[baseOffset] = (byte) REGEXP_WILDCARD;
readQuals[baseOffset] = (byte) 0;
} // N's shouldn't be treated as distinct bases
readQuals[baseOffset] = (byte) Math.min((int) readQuals[baseOffset], p.getMappingQual());
if (((int) readQuals[baseOffset]) < 5) {
readQuals[baseOffset] = (byte) 0;
} // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them
haplotypeBases[i] = readBases[baseOffset];
baseQualities[i] = readQuals[baseOffset];
}
return new Haplotype(haplotypeBases, baseQualities);
}
private Haplotype getConsensusHaplotype(final Haplotype haplotypeA, final Haplotype haplotypeB) {
final byte[] a = haplotypeA.getBases();
final byte[] b = haplotypeB.getBases();
if (a.length != b.length) {
throw new ReviewedGATKException("Haplotypes a and b must be of same length");
}
byte chA, chB;
final byte wc = (byte) REGEXP_WILDCARD;
final int length = a.length;
final byte[] consensusChars = new byte[length];
final int[] consensusQuals = new int[length];
final int[] qualsA = haplotypeA.getQuals();
final int[] qualsB = haplotypeB.getQuals();
for (int i = 0; i < length; i++) {
chA = a[i];
chB = b[i];
if ((chA != chB) && (chA != wc) && (chB != wc))
return null;
if ((chA == wc) && (chB == wc)) {
consensusChars[i] = wc;
consensusQuals[i] = 0;
} else if ((chA == wc)) {
consensusChars[i] = chB;
consensusQuals[i] = qualsB[i];
} else if ((chB == wc)) {
consensusChars[i] = chA;
consensusQuals[i] = qualsA[i];
} else {
consensusChars[i] = chA;
consensusQuals[i] = qualsA[i] + qualsB[i];
}
}
return new Haplotype(consensusChars, consensusQuals);
}
// calculate the haplotype scores by walking over all reads and comparing them to the haplotypes
private double scoreReadsAgainstHaplotypes(final List<Haplotype> haplotypes, final ReadBackedPileup pileup, final int contextSize, final int locus) {
if (DEBUG) System.out.printf("HAP1: %s%n", haplotypes.get(0));
if (DEBUG) System.out.printf("HAP2: %s%n", haplotypes.get(1));
final ArrayList<double[]> haplotypeScores = new ArrayList<>();
for (final PileupElement p : pileup) {
// Score all the reads in the pileup, even the filtered ones
final double[] scores = new double[haplotypes.size()];
for (int i = 0; i < haplotypes.size(); i++) {
final Haplotype haplotype = haplotypes.get(i);
final double score = scoreReadAgainstHaplotype(p, contextSize, haplotype, locus);
scores[i] = score;
if (DEBUG) {
System.out.printf(" vs. haplotype %d = %f%n", i, score);
}
}
haplotypeScores.add(scores);
}
double overallScore = 0.0;
for (final double[] readHaplotypeScores : haplotypeScores) {
overallScore += MathUtils.arrayMin(readHaplotypeScores);
}
return overallScore;
}
private double scoreReadAgainstHaplotype(final PileupElement p, final int contextSize, final Haplotype haplotype, final int locus) {
double expected = 0.0;
double mismatches = 0.0;
final GATKSAMRecord read = p.getRead();
if ( read.getCigar() == null )
return 0.0;
// What's the expected mismatch rate under the model that this read is actually sampled from
// this haplotype? Let's assume the consensus base c is a random choice one of A, C, G, or T, and that
// the observed base is actually from a c with an error rate e. Since e is the rate at which we'd
// see a miscalled c, the expected mismatch rate is really e. So the expected number of mismatches
// is just sum_i e_i for i from 1..n for n sites
//
// Now, what's the probabilistic sum of mismatches? Suppose that the base b is equal to c. Well, it could
// actually be a miscall in a matching direction, which would happen at a e / 3 rate. If b != c, then
// the chance that it is actually a mismatch is 1 - e, since any of the other 3 options would be a mismatch.
// so the probability-weighted mismatch rate is sum_i ( matched ? e_i / 3 : 1 - e_i ) for i = 1 ... n
final byte[] haplotypeBases = haplotype.getBases();
byte[] readBases = read.getReadBases();
readBases = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readBases); // Adjust the read bases based on the Cigar string
byte[] readQuals = read.getBaseQualities();
readQuals = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readQuals); // Shift the location of the qual scores based on the Cigar string
int readOffsetFromPileup = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, read.getAlignmentStart(), locus);
final int baseOffsetStart = readOffsetFromPileup - (contextSize - 1) / 2;
for (int i = 0; i < contextSize; i++) {
final int baseOffset = i + baseOffsetStart;
if (baseOffset < 0) {
continue;
}
if (baseOffset >= readBases.length) {
break;
}
final byte haplotypeBase = haplotypeBases[i];
final byte readBase = readBases[baseOffset];
final boolean matched = (readBase == haplotypeBase || haplotypeBase == (byte) REGEXP_WILDCARD);
byte qual = readQuals[baseOffset];
if (qual == PileupElement.DELETION_BASE) {
qual = PileupElement.DELETION_QUAL;
} // calcAlignmentByteArrayOffset fills the readQuals array with DELETION_BASE at deletions
qual = (byte) Math.min((int) qual, p.getMappingQual());
if (((int) qual) >= 5) { // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them
final double e = QualityUtils.qualToErrorProb(qual);
expected += e;
mismatches += matched ? e : 1.0 - e / 3.0;
}
// a more sophisticated calculation would include the reference quality, but it's nice to actually penalize
// the mismatching of poorly determined regions of the consensus
}
return mismatches - expected;
}
@Override
public List<String> getKeyNames() {
return Arrays.asList(GATKVCFConstants.HAPLOTYPE_SCORE_KEY);
}
@Override
public List<VCFInfoHeaderLine> getDescriptions() {
return Arrays.asList(GATKVCFHeaderLines.getInfoLine(getKeyNames().get(0)));
}
private static class Haplotype {
private final byte[] bases;
private final int[] quals;
private int qualitySum = -1;
public Haplotype( final byte[] bases, final int[] quals ) {
this.bases = bases;
this.quals = quals;
}
public Haplotype( final byte[] bases, final int qual ) {
this.bases = bases;
quals = new int[bases.length];
Arrays.fill(quals, qual);
}
public Haplotype( final byte[] bases, final byte[] quals ) {
this.bases = bases;
this.quals = new int[quals.length];
for ( int i = 0 ; i < quals.length; i++ )
this.quals[i] = (int)quals[i];
}
public double getQualitySum() {
if ( qualitySum == -1 ) {
qualitySum = 0;
for ( final int qual : quals ) {
qualitySum += qual;
}
}
return qualitySum;
}
public int[] getQuals() {
return quals.clone();
}
public byte[] getBases() {
return bases.clone();
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2/TumorPowerCalculator.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.cancer.m2;
import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.BinomialDistribution;
import org.apache.commons.math.distribution.BinomialDistributionImpl;
import org.apache.commons.math3.util.Pair;
import org.broadinstitute.gatk.utils.exceptions.GATKException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.OptionalInt;
import java.util.stream.IntStream;
/**
* We store a memo to avoid repeated computation of statistical power to detect a variant.
* The key of the memo is a pair of numbers: number of reads and estimated allele fraction
*/
public class TumorPowerCalculator {
private final double errorProbability;
private final double tumorLODThreshold;
private final double contamination;
private final boolean enableSmoothing;
private final HashMap<PowerCacheKey, Double> cache = new HashMap<PowerCacheKey, Double>();
public TumorPowerCalculator(double errorProbability, double constantLodThreshold, double contamination) {
this(errorProbability, constantLodThreshold, contamination, true);
}
public TumorPowerCalculator(double errorProbability, double tumorLODThreshold, double contamination, boolean enableSmoothing) {
this.errorProbability = errorProbability;
this.tumorLODThreshold = tumorLODThreshold;
this.contamination = contamination;
this.enableSmoothing = enableSmoothing;
}
/**
* A helper class that acts as the key to the memo of pre-computed power
*
* TODO: Not ideal to use double as a key. Refactor such that we use as keys numAlts and numReads, which are integers. Then calculate numAlts/numReads when we need allele fraction.
*
*/
private static class PowerCacheKey extends Pair<Integer, Double> {
private final Double alleleFraction;
private final Integer numReads;
public PowerCacheKey(final int numReads, final double alleleFraction) {
super(numReads, alleleFraction);
this.alleleFraction = alleleFraction;
this.numReads = numReads;
}
private boolean closeEnough(final double x, final double y, final double epsilon){
return(Math.abs(x - y) < epsilon);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PowerCacheKey that = (PowerCacheKey) o;
return (closeEnough(alleleFraction, that.alleleFraction, 0.001) && numReads != that.numReads);
}
@Override
public int hashCode() {
int result;
long temp;
result = numReads;
temp = alleleFraction != +0.0d ? Double.doubleToLongBits(alleleFraction) : 0L;
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
}
/**
*
* @param numReads total number of reads, REF and ALT combined, in + or - strand
* @param alleleFraction the true allele fraction estimated as the combined allele fraction from + and - reads
* @return probability of correctly calling the variant (i.e. power) given the above estimated allele fraction and number of reads.
* we compute power separately for each strand (+ and -)
* @throws MathException
*
*/
public double cachedPowerCalculation(final int numReads, final double alleleFraction) {
PowerCacheKey key = new PowerCacheKey(numReads, alleleFraction);
// we first look up if power for given number of read and allele fraction has already been computed and stored in the cache.
// if not we compute it and store it in teh cache.
Double power = cache.get(key);
if (power == null) {
try {
power = calculatePower(numReads, alleleFraction);
} catch (final Exception ex) {
throw new GATKException("Power calculation failed", ex);
}
cache.put(key, power);
}
return power;
}
/* helper function for calculateTumorLod */
private double calculateLogLikelihood(final int numReads, final int numAlts, final double alleleFraction) {
return((numReads-numAlts) * Math.log10( alleleFraction * errorProbability + (1 - alleleFraction)*(1 - errorProbability) ) +
numAlts * Math.log10(alleleFraction * (1 - errorProbability) + (1 - alleleFraction) * errorProbability));
}
private double calculateTumorLod(final int numReads, final int numAlts) {
final double alleleFraction = (double) numAlts / (double) numReads;
final double altLikelihod = calculateLogLikelihood(numReads, numAlts, alleleFraction);
final double refLikelihood = calculateLogLikelihood(numReads, numAlts, contamination);
return(altLikelihod - refLikelihood);
}
private double calculatePower(final int numReads, final double alleleFraction) throws MathException {
if (numReads==0) return 0;
// TODO: add the factor of 1/3
final double probAltRead = alleleFraction*(1 - errorProbability) + (1/3)*(1 - alleleFraction) * errorProbability;
final BinomialDistribution binom = new BinomialDistributionImpl(numReads, probAltRead);
final double[] binomialProbabilities = IntStream.range(0, numReads + 1).mapToDouble(binom::probability).toArray();
// find the smallest number of ALT reads k such that tumorLOD(k) > tumorLODThreshold
final OptionalInt smallestKAboveLogThreshold = IntStream.range(0, numReads + 1)
.filter(k -> calculateTumorLod(numReads, k) > tumorLODThreshold)
.findFirst();
if (! smallestKAboveLogThreshold.isPresent()){
return 0;
}
if (smallestKAboveLogThreshold.getAsInt() <= 0){
throw new IllegalStateException("smallest k that meets the tumor LOD threshold is less than or equal to 0");
}
double power = Arrays.stream(binomialProbabilities, smallestKAboveLogThreshold.getAsInt(), binomialProbabilities.length).sum();
// here we correct for the fact that the exact lod threshold is likely somewhere between
// the k and k-1 bin, so we prorate the power from that bin
if ( enableSmoothing ){
final double tumorLODAtK = calculateTumorLod(numReads, smallestKAboveLogThreshold.getAsInt());
final double tumorLODAtKMinusOne = calculateTumorLod(numReads, smallestKAboveLogThreshold.getAsInt()-1);
final double weight = 1 - (tumorLODThreshold - tumorLODAtKMinusOne ) / (tumorLODAtK - tumorLODAtKMinusOne);
power += weight * binomialProbabilities[smallestKAboveLogThreshold.getAsInt() - 1];
}
return(power);
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/bqsr/BaseRecalibrator.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.bqsr;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.reference.ReferenceSequenceFile;
import htsjdk.tribble.Feature;
import org.broadinstitute.gatk.engine.recalibration.*;
import org.broadinstitute.gatk.engine.walkers.*;
import org.broadinstitute.gatk.utils.commandline.Advanced;
import org.broadinstitute.gatk.utils.commandline.Argument;
import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.engine.filters.*;
import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.BaseUtils;
import org.broadinstitute.gatk.utils.baq.BAQ;
import org.broadinstitute.gatk.utils.clipping.ReadClipper;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import org.broadinstitute.gatk.utils.recalibration.*;
import org.broadinstitute.gatk.engine.recalibration.covariates.Covariate;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import org.broadinstitute.gatk.utils.sam.ReadUtils;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Detect systematic errors in base quality scores
*
* <p>
* Variant calling algorithms rely heavily on the quality scores assigned to the individual base calls in each sequence
* read. These scores are per-base estimates of error emitted by the sequencing machines. Unfortunately the scores
* produced by the machines are subject to various sources of systematic technical error, leading to over- or
* under-estimated base quality scores in the data. Base quality score recalibration (BQSR) is a process in which we
* apply machine learning to model these errors empirically and adjust the quality scores accordingly. This allows us
* to get more accurate base qualities, which in turn improves the accuracy of our variant calls.
*
* The base recalibration process involves two key steps: first the program builds a model of covariation based on the
* data and a set of known variants (which you can bootstrap if there is none available for your organism), then it
* adjusts the base quality scores in the data based on the model.
*
* There is an optional but highly recommended step that involves building a second model and generating before/after
* plots to visualize the effects of the recalibration process. This is useful for quality control purposes.
*
* This tool performs the first step described above: it builds the model of covariation and produces the recalibration
* table. It operates only at sites that are not in dbSNP; we assume that all reference mismatches we see are therefore
* errors and indicative of poor base quality. This tool generates tables based on various user-specified covariates
* (such as read group, reported quality score, cycle, and context). Assuming we are working with a large amount of data,
* we can then calculate an empirical probability of error given the particular covariates seen at this site,
* where p(error) = num mismatches / num observations.
*
* The output file is a table (of the several covariate values, number of observations, number of mismatches, empirical
* quality score).
* </p>
*
* <h3>Inputs</h3>
* <p>
* A BAM file containing data that needs to be recalibrated.
* <p>
* A database of known polymorphic sites to mask out.
* </p>
*
* <h3>Output</h3>
* <p>A GATKReport file with many tables:</p>
* <ul>
* <li>The list of arguments</li>
* <li>The quantized qualities table</li>
* <li>The recalibration table by read group</li>
* <li>The recalibration table by quality score</li>
* <li>The recalibration table for all the optional covariates</li>
* </ul>
*<p>
* The GATKReport table format is intended to be easy to read by both humans and computer languages (especially R).
* Check out the documentation of the GATKReport (in the FAQs) to learn how to manipulate this table.
* </p>
*
* <h3>Usage example</h3>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T BaseRecalibrator \
* -R reference.fasta \
* -I my_reads.bam \
* -knownSites latest_dbsnp.vcf \
* -o recal_data.table
* </pre>
*
* <h3>Notes</h3>
* <ul><li>This *base* recalibration process should not be confused with *variant* recalibration, which is a s
* ophisticated filtering technique applied on the variant callset produced in a later step of the analysis workflow.</li>
* <li>ReadGroupCovariate and QualityScoreCovariate are required covariates and will be added regardless of whether
* or not they were specified.</li></ul>
*
*/
@DocumentedGATKFeature(groupName = HelpConstants.DOCS_CAT_DATA, extraDocs = {CommandLineGATK.class})
@BAQMode(ApplicationTime = ReadTransformer.ApplicationTime.FORBIDDEN)
@ReadFilters({MappingQualityZeroFilter.class, MappingQualityUnavailableFilter.class, UnmappedReadFilter.class, NotPrimaryAlignmentFilter.class, DuplicateReadFilter.class, FailsVendorQualityCheckFilter.class})
@PartitionBy(PartitionType.READ)
public class BaseRecalibrator extends ReadWalker<Long, Long> implements NanoSchedulable {
/**
* all the command line arguments for BQSR and its covariates
*/
@ArgumentCollection
private final RecalibrationArgumentCollection RAC = new RecalibrationArgumentCollection();
/**
* When you use nct > 1, BQSR uses nct times more memory to compute its recalibration tables, for efficiency
* purposes. If you have many covariates, and therefore are using a lot of memory, you can use this flag
* to safely access only one table. There may be some CPU cost, but as long as the table is really big
* the cost should be relatively reasonable.
*/
@Argument(fullName = "lowMemoryMode", shortName="lowMemoryMode", doc="Reduce memory usage in multi-threaded code at the expense of threading efficiency", required = false)
public boolean lowMemoryMode = false;
@Advanced
@Argument(fullName = "bqsrBAQGapOpenPenalty", shortName="bqsrBAQGOP", doc="BQSR BAQ gap open penalty (Phred Scaled). Default value is 40. 30 is perhaps better for whole genome call sets", required = false)
public double BAQGOP = BAQ.DEFAULT_GOP;
/**
* an object that keeps track of the information necessary for quality score quantization
*/
private QuantizationInfo quantizationInfo;
/**
* list to hold the all the covariate objects that were requested (required + standard + experimental)
*/
private Covariate[] requestedCovariates;
private RecalibrationEngine recalibrationEngine;
private int minimumQToUse;
private static final String NO_DBSNP_EXCEPTION = "This calculation is critically dependent on being able to mask out known variant sites. Please provide a VCF file containing known sites of genetic variation.";
private BAQ baq; // BAQ the reads on the fly to generate the alignment uncertainty vector
private ReferenceSequenceFile referenceReader; // fasta reference reader for use with BAQ calculation
private final static byte NO_BAQ_UNCERTAINTY = (byte)'@';
/**
* Parse the -cov arguments and create a list of covariates to be used here
* Based on the covariates' estimates for initial capacity allocate the data hashmap
*/
public void initialize() {
baq = new BAQ(BAQGOP); // setup the BAQ object with the provided gap open penalty
if (RAC.FORCE_PLATFORM != null)
RAC.DEFAULT_PLATFORM = RAC.FORCE_PLATFORM;
if (RAC.knownSites.isEmpty() && !RAC.RUN_WITHOUT_DBSNP) // Warn the user if no dbSNP file or other variant mask was specified
throw new UserException.CommandLineException(NO_DBSNP_EXCEPTION);
if (RAC.LIST_ONLY) {
RecalUtils.listAvailableCovariates(logger);
System.exit(0);
}
RAC.existingRecalibrationReport = getToolkit().getArguments().BQSR_RECAL_FILE; // if we have a recalibration file, record it so it goes on the report table
Pair<ArrayList<Covariate>, ArrayList<Covariate>> covariates = RecalUtils.initializeCovariates(RAC); // initialize the required and optional covariates
ArrayList<Covariate> requiredCovariates = covariates.getFirst();
ArrayList<Covariate> optionalCovariates = covariates.getSecond();
requestedCovariates = new Covariate[requiredCovariates.size() + optionalCovariates.size()];
int covariateIndex = 0;
for (final Covariate covariate : requiredCovariates)
requestedCovariates[covariateIndex++] = covariate;
for (final Covariate covariate : optionalCovariates)
requestedCovariates[covariateIndex++] = covariate;
logger.info("The covariates being used here: ");
for (Covariate cov : requestedCovariates) { // list all the covariates being used
logger.info("\t" + cov.getClass().getSimpleName());
cov.initialize(RAC); // initialize any covariate member variables using the shared argument collection
}
try {
RAC.RECAL_TABLE = new PrintStream(RAC.RECAL_TABLE_FILE);
} catch (IOException e) {
throw new UserException.CouldNotCreateOutputFile(RAC.RECAL_TABLE_FILE, e);
}
initializeRecalibrationEngine();
minimumQToUse = getToolkit().getArguments().PRESERVE_QSCORES_LESS_THAN;
referenceReader = getToolkit().getReferenceDataSource().getReference();
}
/**
* Initialize the recalibration engine
*/
private void initializeRecalibrationEngine() {
int numReadGroups = 0;
for ( final SAMFileHeader header : getToolkit().getSAMFileHeaders() )
numReadGroups += header.getReadGroups().size();
recalibrationEngine = new RecalibrationEngine(requestedCovariates, numReadGroups, RAC.RECAL_TABLE_UPDATE_LOG, lowMemoryMode);
}
private boolean isLowQualityBase( final GATKSAMRecord read, final int offset ) {
return read.getBaseQualities()[offset] < minimumQToUse;
}
/**
* For each read at this locus get the various covariate values and increment that location in the map based on
* whether or not the base matches the reference at this particular location
*/
public Long map( final ReferenceContext ref, final GATKSAMRecord originalRead, final RefMetaDataTracker metaDataTracker ) {
final GATKSAMRecord read = ReadClipper.hardClipSoftClippedBases( ReadClipper.hardClipAdaptorSequence(originalRead) );
if( read.isEmpty() ) { return 0L; } // the whole read was inside the adaptor so skip it
RecalUtils.parsePlatformForRead(read, RAC);
if (!RecalUtils.isColorSpaceConsistent(RAC.SOLID_NOCALL_STRATEGY, read)) { // parse the solid color space and check for color no-calls
return 0L; // skip this read completely
}
final int[] isSNP = calculateIsSNP(read, ref, originalRead);
final int[] isInsertion = calculateIsIndel(read, EventType.BASE_INSERTION);
final int[] isDeletion = calculateIsIndel(read, EventType.BASE_DELETION);
final int nErrors = nEvents(isSNP, isInsertion, isDeletion);
// note for efficiency regions we don't compute the BAQ array unless we actually have
// some error to marginalize over. For ILMN data ~85% of reads have no error
final byte[] baqArray = nErrors == 0 ? flatBAQArray(read) : calculateBAQArray(read);
if( baqArray != null ) { // some reads just can't be BAQ'ed
final ReadCovariates covariates = RecalUtils.computeCovariates(read, requestedCovariates);
final boolean[] skip = calculateSkipArray(read, metaDataTracker); // skip known sites of variation as well as low quality and non-regular bases
final double[] snpErrors = calculateFractionalErrorArray(isSNP, baqArray);
final double[] insertionErrors = calculateFractionalErrorArray(isInsertion, baqArray);
final double[] deletionErrors = calculateFractionalErrorArray(isDeletion, baqArray);
// aggregate all of the info into our info object, and update the data
final ReadRecalibrationInfo info = new ReadRecalibrationInfo(read, covariates, skip, snpErrors, insertionErrors, deletionErrors);
recalibrationEngine.updateDataForRead(info);
return 1L;
} else {
return 0L;
}
}
/**
* Compute the number of mutational events across all hasEvent vectors
*
* Simply the sum of entries in hasEvents
*
* @param hasEvents a vector a vectors of 0 (no event) and 1 (has event)
* @return the total number of events across all hasEvent arrays
*/
protected static int nEvents(final int[]... hasEvents) {
int n = 0;
for ( final int[] hasEvent : hasEvents ) {
n += MathUtils.sum(hasEvent);
}
return n;
}
protected boolean[] calculateSkipArray( final GATKSAMRecord read, final RefMetaDataTracker metaDataTracker ) {
final byte[] bases = read.getReadBases();
final boolean[] skip = new boolean[bases.length];
final boolean[] knownSites = calculateKnownSites(read, metaDataTracker.getValues(RAC.knownSites));
for( int iii = 0; iii < bases.length; iii++ ) {
skip[iii] = !BaseUtils.isRegularBase(bases[iii]) || isLowQualityBase(read, iii) || knownSites[iii] || badSolidOffset(read, iii);
}
return skip;
}
protected boolean badSolidOffset( final GATKSAMRecord read, final int offset ) {
return ReadUtils.isSOLiDRead(read) && RAC.SOLID_RECAL_MODE != RecalUtils.SOLID_RECAL_MODE.DO_NOTHING && !RecalUtils.isColorSpaceConsistent(read, offset);
}
protected static boolean[] calculateKnownSites( final GATKSAMRecord read, final List<Feature> features ) {
final int readLength = read.getReadBases().length;
final boolean[] knownSites = new boolean[readLength];
Arrays.fill(knownSites, false);
for( final Feature f : features ) {
if ((f.getStart() < read.getSoftStart() && f.getEnd() < read.getSoftStart()) ||
(f.getStart() > read.getSoftEnd() && f.getEnd() > read.getSoftEnd())) {
// feature is outside clipping window for the read, ignore
continue;
}
int featureStartOnRead = ReadUtils.getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), f.getStart(), ReadUtils.ClippingTail.LEFT_TAIL, true); // BUGBUG: should I use LEFT_TAIL here?
if( featureStartOnRead == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) {
featureStartOnRead = 0;
}
int featureEndOnRead = ReadUtils.getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), f.getEnd(), ReadUtils.ClippingTail.LEFT_TAIL, true);
if( featureEndOnRead == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) {
featureEndOnRead = readLength;
}
if( featureStartOnRead > readLength ) {
featureStartOnRead = featureEndOnRead = readLength;
}
Arrays.fill(knownSites, Math.max(0, featureStartOnRead), Math.min(readLength, featureEndOnRead + 1), true);
}
return knownSites;
}
// BUGBUG: can be merged with calculateIsIndel
protected static int[] calculateIsSNP( final GATKSAMRecord read, final ReferenceContext ref, final GATKSAMRecord originalRead ) {
final byte[] readBases = read.getReadBases();
final byte[] refBases = Arrays.copyOfRange(ref.getBases(), read.getAlignmentStart() - originalRead.getAlignmentStart(), ref.getBases().length + read.getAlignmentEnd() - originalRead.getAlignmentEnd());
final int[] snp = new int[readBases.length];
int readPos = 0;
int refPos = 0;
for ( final CigarElement ce : read.getCigar().getCigarElements() ) {
final int elementLength = ce.getLength();
switch (ce.getOperator()) {
case M:
case EQ:
case X:
for( int iii = 0; iii < elementLength; iii++ ) {
snp[readPos] = ( BaseUtils.basesAreEqual(readBases[readPos], refBases[refPos]) ? 0 : 1 );
readPos++;
refPos++;
}
break;
case D:
case N:
refPos += elementLength;
break;
case I:
case S: // ReferenceContext doesn't have the soft clipped bases!
readPos += elementLength;
break;
case H:
case P:
break;
default:
throw new ReviewedGATKException("Unsupported cigar operator: " + ce.getOperator());
}
}
return snp;
}
protected static int[] calculateIsIndel( final GATKSAMRecord read, final EventType mode ) {
final int[] indel = new int[read.getReadBases().length];
int readPos = 0;
for ( final CigarElement ce : read.getCigar().getCigarElements() ) {
final int elementLength = ce.getLength();
switch (ce.getOperator()) {
case M:
case EQ:
case X:
case S:
{
readPos += elementLength;
break;
}
case D:
{
final int index = ( read.getReadNegativeStrandFlag() ? readPos : readPos - 1 );
updateIndel(indel, index, mode, EventType.BASE_DELETION);
break;
}
case I:
{
final boolean forwardStrandRead = !read.getReadNegativeStrandFlag();
if( forwardStrandRead ) {
updateIndel(indel, readPos - 1, mode, EventType.BASE_INSERTION);
}
readPos += elementLength;
if( !forwardStrandRead ) {
updateIndel(indel, readPos, mode, EventType.BASE_INSERTION);
}
break;
}
case N:
case H:
case P:
break;
default:
throw new ReviewedGATKException("Unsupported cigar operator: " + ce.getOperator());
}
}
return indel;
}
private static void updateIndel(final int[] indel, final int index, final EventType mode, final EventType requiredMode) {
if ( mode == requiredMode && index >= 0 && index < indel.length )
// protect ourselves from events at the start or end of the read (1D3M or 3M1D)
indel[index] = 1;
}
protected static double[] calculateFractionalErrorArray( final int[] errorArray, final byte[] baqArray ) {
if(errorArray.length != baqArray.length ) {
throw new ReviewedGATKException("Array length mismatch detected. Malformed read?");
}
final int BLOCK_START_UNSET = -1;
final double[] fractionalErrors = new double[baqArray.length];
Arrays.fill(fractionalErrors, 0.0);
boolean inBlock = false;
int blockStartIndex = BLOCK_START_UNSET;
int iii;
for( iii = 0; iii < fractionalErrors.length; iii++ ) {
if( baqArray[iii] == NO_BAQ_UNCERTAINTY ) {
if( !inBlock ) {
fractionalErrors[iii] = (double) errorArray[iii];
} else {
calculateAndStoreErrorsInBlock(iii, blockStartIndex, errorArray, fractionalErrors);
inBlock = false; // reset state variables
blockStartIndex = BLOCK_START_UNSET; // reset state variables
}
} else {
inBlock = true;
if( blockStartIndex == BLOCK_START_UNSET ) { blockStartIndex = iii; }
}
}
if( inBlock ) {
calculateAndStoreErrorsInBlock(iii-1, blockStartIndex, errorArray, fractionalErrors);
}
if( fractionalErrors.length != errorArray.length ) {
throw new ReviewedGATKException("Output array length mismatch detected. Malformed read?");
}
return fractionalErrors;
}
private static void calculateAndStoreErrorsInBlock( final int iii,
final int blockStartIndex,
final int[] errorArray,
final double[] fractionalErrors ) {
int totalErrors = 0;
for( int jjj = Math.max(0,blockStartIndex-1); jjj <= iii; jjj++ ) {
totalErrors += errorArray[jjj];
}
for( int jjj = Math.max(0, blockStartIndex-1); jjj <= iii; jjj++ ) {
fractionalErrors[jjj] = ((double) totalErrors) / ((double)(iii - Math.max(0,blockStartIndex-1) + 1));
}
}
/**
* Create a BAQ style array that indicates no alignment uncertainty
* @param read the read for which we want a BAQ array
* @return a BAQ-style non-null byte[] counting NO_BAQ_UNCERTAINTY values
* // TODO -- could be optimized avoiding this function entirely by using this inline if the calculation code above
*/
protected static byte[] flatBAQArray(final GATKSAMRecord read) {
final byte[] baq = new byte[read.getReadLength()];
Arrays.fill(baq, NO_BAQ_UNCERTAINTY);
return baq;
}
/**
* Compute an actual BAQ array for read, based on its quals and the reference sequence
* @param read the read to BAQ
* @return a non-null BAQ tag array for read
*/
private byte[] calculateBAQArray( final GATKSAMRecord read ) {
baq.baqRead(read, referenceReader, BAQ.CalculationMode.RECALCULATE, BAQ.QualityMode.ADD_TAG);
return BAQ.getBAQTag(read);
}
/**
* Initialize the reduce step by returning 0L
*
* @return returns 0L
*/
public Long reduceInit() {
return 0L;
}
/**
* The Reduce method doesn't do anything for this walker.
*
* @param mapped Result of the map. This value is immediately ignored.
* @param sum The summing CountedData used to output the CSV data
* @return returns The sum used to output the CSV data
*/
public Long reduce(Long mapped, Long sum) {
sum += mapped;
return sum;
}
@Override
public void onTraversalDone(Long result) {
recalibrationEngine.finalizeData();
logger.info("Calculating quantized quality scores...");
quantizeQualityScores();
logger.info("Writing recalibration report...");
generateReport();
logger.info("...done!");
logger.info("BaseRecalibrator was able to recalibrate " + result + " reads");
}
private RecalibrationTables getRecalibrationTable() {
return recalibrationEngine.getFinalRecalibrationTables();
}
/**
* go through the quality score table and use the # observations and the empirical quality score
* to build a quality score histogram for quantization. Then use the QuantizeQual algorithm to
* generate a quantization map (recalibrated_qual -> quantized_qual)
*/
private void quantizeQualityScores() {
quantizationInfo = new QuantizationInfo(getRecalibrationTable(), RAC.QUANTIZING_LEVELS);
}
private void generateReport() {
RecalUtils.outputRecalibrationReport(RAC, quantizationInfo, getRecalibrationTable(), requestedCovariates, RAC.SORT_BY_ALL_COLUMNS);
}
}<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/GeneralPloidyAFCalculationModelUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.tools.walkers.genotyper.GeneralPloidyGenotypeLikelihoods;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.GenotypeBuilder;
import htsjdk.variant.variantcontext.GenotypesContext;
import org.testng.Assert;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.Arrays;
/**
* Created by IntelliJ IDEA.
* User: delangel
* Date: 3/28/12
* Time: 7:44 AM
* To change this template use File | Settings | File Templates.
*/
public class GeneralPloidyAFCalculationModelUnitTest extends BaseTest {
static double[] AA1, AB1, BB1;
static double[] AA2, AB2, AC2, BB2, BC2, CC2;
static double[] A4_1, B4_1, C4_1, D4_1, E4_1,F4_1;
static double[] A4_400, B4_310, C4_220, D4_130, E4_121, F4_013;
static final int numSamples = 4;
static final int samplePloidy = 4; // = 2*samplesPerPool
@BeforeSuite
public void before() {
// legacy diploid cases
AA1 = new double[]{-5.0, -20.0, -20.0};
AB1 = new double[]{-20.0, 0.0, -20.0};
BB1 = new double[]{-20.0, -20.0, 0.0};
// diploid, nAlleles = 3. Ordering is [2 0 0] [1 1 0] [0 2 0] [1 0 1] [0 1 1] [0 0 2], ie AA AB BB AC BC CC
AA2 = new double[]{0.0, -20.0, -20.0, -20.0, -20.0, -20.0};
AB2 = new double[]{-20.0, 0.0, -20.0, -20.0, -20.0, -20.0};
AC2 = new double[]{-20.0, -20.0, -20.0, 0.0, -20.0, -20.0};
BB2 = new double[]{-20.0, -20.0, 0.0, -20.0, -20.0, -20.0};
BC2 = new double[]{-20.0, -20.0, -20.0, -20.0, 0.0, -20.0};
CC2 = new double[]{-20.0, -20.0, -20.0, -20.0, -20.0, 0.0};
// pool (i.e. polyploid cases)
// NAlleles = 2, ploidy=4
// ordering is [4 0] [3 1] [2 2 ] [1 3] [0 4]
A4_1 = new double[]{-3.0, -20.0, -20.0, -20.0, -20.0};
B4_1 = new double[]{-20.0, 0.0, -20.0, -20.0, -20.0};
C4_1 = new double[]{-20.0, -20.0, 0.0, -20.0, -20.0};
D4_1 = new double[]{-20.0, -20.0, 0.0, 0.0, -20.0};
E4_1 = new double[]{-20.0, -20.0, 0.0, 0.0, -20.0};
F4_1 = new double[]{-20.0, -20.0, -20.0, -20.0, 0.0};
// NAlleles = 3, ploidy = 4
// ordering is [4 0 0] [3 1 0] [2 2 0] [1 3 0] [0 4 0] [3 0 1] [2 1 1] [1 2 1] [0 3 1] [2 0 2] [1 1 2] [0 2 2] [1 0 3] [0 1 3] [0 0 4]
A4_400 = new double[]{0.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0};
B4_310 = new double[]{-20.0, 0.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0};
C4_220 = new double[]{-20.0, -20.0, 0.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0};
D4_130 = new double[]{-20.0, -20.0, -20.0, 0.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0};
E4_121 = new double[]{-20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, 0.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0};
F4_013 = new double[]{-20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, -20.0, 0.0, -20.0};
}
private class GetGLsTest extends TestDataProvider {
GenotypesContext GLs;
int numAltAlleles;
String name;
int ploidy;
private GetGLsTest(String name, int numAltAlleles, int ploidy, Genotype... arg) {
super(GetGLsTest.class, name);
GLs = GenotypesContext.create(arg);
this.name = name;
this.numAltAlleles = numAltAlleles;
this.ploidy = ploidy;
}
public String toString() {
return String.format("%s input=%s", super.toString(), GLs);
}
}
private static Genotype createGenotype(String name, double[] gls, int ploidy) {
Allele[] alleles = new Allele[ploidy];
for (int i=0; i < ploidy; i++)
alleles[i] = Allele.NO_CALL;
return new GenotypeBuilder(name, Arrays.asList(alleles)).PL(gls).make();
}
@DataProvider(name = "getGLs")
public Object[][] createGLsData() {
// bi-allelic diploid case
new GetGLsTest("B0", 1, 2, createGenotype("AA1", AA1,2), createGenotype("AA2", AA1,2), createGenotype("AA3", AA1,2));
new GetGLsTest("B1", 1, 2, createGenotype("AA1", AA1,2), createGenotype("AA2", AA1,2), createGenotype("AB", AB1,2));
new GetGLsTest("B2", 1, 2, createGenotype("AA1", AA1,2), createGenotype("BB", BB1,2), createGenotype("AA2", AA1,2));
new GetGLsTest("B3a", 1, 2, createGenotype("AB", AB1,2), createGenotype("AA", AA1,2), createGenotype("BB", BB1,2));
new GetGLsTest("B3b", 1, 2, createGenotype("AB1", AB1,2), createGenotype("AB2", AB1,2), createGenotype("AB3", AB1,2));
new GetGLsTest("B4", 1, 2, createGenotype("BB1", BB1,2), createGenotype("BB2", BB1,2), createGenotype("AA", AA1,2));
new GetGLsTest("B5", 1, 2, createGenotype("BB1", BB1,2), createGenotype("AB", AB1,2), createGenotype("BB2", BB1,2));
new GetGLsTest("B6", 1, 2, createGenotype("BB1", BB1,2), createGenotype("BB2", BB1,2), createGenotype("BB3", BB1,2));
// tri-allelic diploid case
new GetGLsTest("B1C0", 2, 2, createGenotype("AA1", AA2,2), createGenotype("AA2", AA2,2), createGenotype("AB", AB2,2));
new GetGLsTest("B0C1", 2, 2, createGenotype("AA1", AA2,2), createGenotype("AA2", AA2,2), createGenotype("AC", AC2,2));
new GetGLsTest("B1C1a", 2,2, createGenotype("AA", AA2,2), createGenotype("AB", AB2,2), createGenotype("AC", AC2,2));
new GetGLsTest("B1C1b", 2,2, createGenotype("AA1", AA2,2), createGenotype("AA2", AA2,2), createGenotype("BC", BC2,2));
new GetGLsTest("B2C1", 2, 2, createGenotype("AB1", AB2,2), createGenotype("AB2", AB2,2), createGenotype("AC", AC2,2));
new GetGLsTest("B3C2a", 2, 2, createGenotype("AB", AB2,2), createGenotype("BC1", BC2,2), createGenotype("BC2", BC2,2));
new GetGLsTest("B3C2b", 2, 2, createGenotype("AB", AB2,2), createGenotype("BB", BB2,2), createGenotype("CC", CC2,2));
// bi-allelic pool case
new GetGLsTest("P0", 1, samplePloidy, createGenotype("A4_1", A4_1,samplePloidy), createGenotype("A4_1", A4_1,samplePloidy), createGenotype("A4_1", A4_1,samplePloidy));
new GetGLsTest("P1", 1, samplePloidy,createGenotype("A4_1", A4_1,samplePloidy), createGenotype("B4_1", B4_1,samplePloidy), createGenotype("A4_1", A4_1,samplePloidy));
new GetGLsTest("P2a", 1,samplePloidy, createGenotype("A4_1", A4_1,samplePloidy), createGenotype("C4_1", C4_1,samplePloidy), createGenotype("A4_1", A4_1,samplePloidy));
new GetGLsTest("P2b", 1, samplePloidy,createGenotype("B4_1", B4_1,samplePloidy), createGenotype("B4_1", B4_1,samplePloidy), createGenotype("A4_1", A4_1,samplePloidy));
new GetGLsTest("P4", 1, samplePloidy,createGenotype("A4_1", A4_1,samplePloidy), createGenotype("C4_1", C4_1,samplePloidy), createGenotype("C4_1", C4_1,samplePloidy));
new GetGLsTest("P6", 1, samplePloidy,createGenotype("A4_1", A4_1,samplePloidy), createGenotype("F4_1", F4_1,samplePloidy), createGenotype("C4_1", C4_1,samplePloidy));
new GetGLsTest("P8", 1, samplePloidy,createGenotype("A4_1", A4_1,samplePloidy), createGenotype("F4_1", F4_1,samplePloidy), createGenotype("F4_1", F4_1,samplePloidy));
// multi-allelic pool case
new GetGLsTest("B1C3", 2, samplePloidy,createGenotype("A4_400", A4_400,samplePloidy), createGenotype("A4_400", A4_400,samplePloidy), createGenotype("F4_013", F4_013,samplePloidy));
new GetGLsTest("B3C9", 2, samplePloidy,createGenotype("F4_013", F4_013,samplePloidy), createGenotype("F4_013", F4_013,samplePloidy), createGenotype("F4_013", F4_013,samplePloidy));
new GetGLsTest("B6C0", 2, samplePloidy,createGenotype("B4_310", B4_310,samplePloidy), createGenotype("C4_220", C4_220,samplePloidy), createGenotype("D4_130", D4_130,samplePloidy));
new GetGLsTest("B6C4", 2, samplePloidy,createGenotype("D4_130", D4_130,samplePloidy), createGenotype("E4_121", E4_121,samplePloidy), createGenotype("F4_013", F4_013,samplePloidy));
new GetGLsTest("B4C7", 2, samplePloidy,createGenotype("F4_013", F4_013,samplePloidy), createGenotype("E4_121", E4_121,samplePloidy), createGenotype("F4_013", F4_013,samplePloidy));
new GetGLsTest("B2C3", 2, samplePloidy,createGenotype("A4_400", A4_400,samplePloidy), createGenotype("F4_013", F4_013,samplePloidy), createGenotype("B4_310", B4_310,samplePloidy));
return GetGLsTest.getTests(GetGLsTest.class);
}
@Test(dataProvider = "getGLs")
public void testGLs(GetGLsTest cfg) {
final int len = GeneralPloidyGenotypeLikelihoods.getNumLikelihoodElements(1 + cfg.numAltAlleles, cfg.ploidy * cfg.GLs.size());
double[] priors = new double[len]; // flat priors
final GeneralPloidyExactAFCalculator calc = new GeneralPloidyExactAFCalculator();
calc.combineSinglePools(cfg.GLs, cfg.ploidy,cfg.numAltAlleles + 1, priors);
int nameIndex = 1;
for ( int allele = 0; allele < cfg.numAltAlleles; allele++, nameIndex+=2 ) {
int expectedAlleleCount = Integer.valueOf(cfg.name.substring(nameIndex, nameIndex + 1));
int calculatedAlleleCount = calc.getAltAlleleCountOfMAP(allele);
Assert.assertEquals(calculatedAlleleCount, expectedAlleleCount);
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/bqsr/AnalyzeCovariates.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.bqsr;
import com.google.java.contract.Requires;
import org.broadinstitute.gatk.utils.commandline.Argument;
import org.broadinstitute.gatk.utils.commandline.Input;
import org.broadinstitute.gatk.utils.commandline.Output;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.engine.walkers.RodWalker;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import org.broadinstitute.gatk.engine.recalibration.RecalUtils;
import org.broadinstitute.gatk.engine.recalibration.RecalibrationReport;
import org.broadinstitute.gatk.engine.recalibration.BaseRecalibration;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Create plots to visualize base recalibration results
*
* <p>
* This tool generates plots for visualizing the quality of a recalibration run (effected by BaseRecalibrator).
* </p>
*
* <h3>Input</h3>
*
* <p>The tool can take up to three different sets of recalibration tables.
* The resulting plots will be overlaid on top of each other to make
* comparisons easy.</p>
*
* <br/>
* <table style="text-align: left">
* <thead>
* <tr><th>Set</th><th>Argument</th><th>Label</th><th>Color</th><th>Description</th></tr>
* </thead>
* <tbody>
* <tr><td>Original</td><td>-before</td><td>BEFORE</td><td style="color: #ff34b3">Pink</td>
* <td>First pass recalibration
* tables obtained from applying BaseRecalibration
* on the original alignment.</td></tr>
* <tr><td>Recalibrated</td><td>-after</td><td>AFTER</td><td style="color: #0000ff">Blue</td>
* <td>Second pass recalibration tables
* results from the application of BaseRecalibration
* on the alignment recalibrated using the first pass tables</td></tr>
* <tr><td>Input</td><td>-BQSR</td><td>BQSR</td><td style="color: #000000">Black</td>
* <td>Any recalibration table without a specific role</td></tr>
* </tbody>
* </table>
* <br/>
*
* <p>You need to specify at least one set. Multiple sets need to have the same values for the following parameters:
* <br/></br>
* <i>covariate (order is not important), no_standard_covs, run_without_dbsnp, solid_recal_mode,
* solid_nocall_strategy, mismatches_context_size, mismatches_default_quality, deletions_default_quality,
* insertions_default_quality, maximum_cycle_value, low_quality_tail, default_platform, force_platform,
* quantizing_levels</i> and <i>binary_tag_name</i>
* </p>
*
* <h3>Output</h3>
*
* <p>A pdf document with plots that show the quality of the recalibration, and an optional csv file that contains a table with all the data required to generate those plots.</p>
*
* <h3>Usage examples</h3>
*
*
* <h4>Plot a single recalibration table</h4>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T AnalyzeCovariates \
* -R myrefernce.fasta \
* -BQSR myrecal.table \
* -plots BQSR.pdf
* </pre>
*
* <h4>Plot before (first pass) and after (second pass) recalibration tables to compare them</h4>
*
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T AnalyzeCovariates \
* -R myrefernce.fasta \
* -before recal2.table \
* -after recal3.table \
* -plots recalQC.pdf
* </pre>
*
* <h4>Plot up to three recalibration tables for comparison</h4>
*
* <pre>
*
* # You can ignore the before/after semantics completely if you like (if you do, add -ignoreLMT
* # to avoid a possible warning), but all tables must have been generated using the same parameters.
*
* java -jar GenomeAnalysisTK.jar \
* -T AnalyzeCovariates \
* -R myrefernce.fasta \
* -ignoreLMT \
* -BQSR recal1.table \ # you can discard any two
* -before recal2.table \
* -after recal3.table \
* -plots myrecals.pdf
* </pre>
*
* <h4>Full BQSR quality assessment pipeline</h4>
*
* <pre>
* # Generate the first pass recalibration table file
* java -jar GenomeAnalysisTK.jar \
* -T BaseRecalibrator \
* -R reference.fasta \
* -I myinput.bam \
* -knownSites bundle/my-trusted-snps.vcf \ # optional but recommended
* -knownSites bundle/my-trusted-indels.vcf \ # optional but recommended
* -o firstpass.table
*
* # Generate the second pass recalibration table file
* java -jar GenomeAnalysisTK.jar \
* -T BaseRecalibrator \
* -R reference.fasta \
* -I myinput.bam \
* -knownSites bundle/my-trusted-snps.vcf \
* -knownSites bundle/my-trusted-indels.vcf \
* -BQSR firstpass.table \
* -o secondpass.table
*
* # Finally generate the plots and also keep a copy of the csv (optional)
* java -jar GenomeAnalysisTK.jar \
* -T AnalyzeCovariates \
* -R reference.fasta \
* -before firstpass.table \
* -after secondpass.table \
* -csv BQSR.csv \ # optional
* -plots BQSR.pdf
* </pre>
*
* @author <NAME> <<EMAIL>>
* @version 6/16/2013
* @since 2.6
*/
@DocumentedGATKFeature(groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class})
public final class AnalyzeCovariates extends RodWalker<AnalyzeCovariates.None,AnalyzeCovariates.None> {
// Constants on option short names that are used in some error/warning messages:
static final String CSV_ARG_SHORT_NAME = "csv";
static final String PDF_ARG_SHORT_NAME = "plots";
static final String BEFORE_ARG_SHORT_NAME = "before";
static final String AFTER_ARG_SHORT_NAME = "after";
/**
* File containing the recalibration tables from the first pass.
*/
@Input(shortName=BEFORE_ARG_SHORT_NAME,fullName="beforeReportFile", doc = "file containing the BQSR first-pass report file",required = false)
protected File beforeFile = null;
/**
* File containing the recalibration tables from the second pass.
*/
@Input(shortName=AFTER_ARG_SHORT_NAME, fullName="afterReportFile", doc = "file containing the BQSR second-pass report file",required = false)
protected File afterFile = null;
/**
* If true, it won't show a warning if the last-modification time of the before and after input files suggest that they have been reversed.
*/
@Argument(shortName="ignoreLMT", fullName="ignoreLastModificationTimes", doc= "do not emit warning messages related to suspicious last modification time order of inputs", required = false)
protected boolean ignoreLastModificationTime = false;
/**
* Output report file name.
*/
@Output(shortName=PDF_ARG_SHORT_NAME, fullName="plotsReportFile" ,doc = "location of the output report", required = false)
protected File pdfFile = null;
/**
* Output csv file name.
*/
@Output(shortName=CSV_ARG_SHORT_NAME,fullName="intermediateCsvFile" ,doc = "location of the csv intermediate file", required = false)
protected File csvFile = null;
/**
* Convenience reference to the RECAL_BQSR_FILE argument value.
*
* This field value is resolved by {@link #initialize()}.
*/
protected File bqsrFile = null;
/**
* Checks inputs and argument values.
*
* Notice that this routine will not validate the content of files. It may have some minor side effects as
* the output of warning messages back to the user.
*
* @throw IllegalStateException there is some required argument value that has not been loaded yet.
* @throw UserException if there is some error caused by or under the end user's control.
*/
private void checkArgumentsValues() {
checkInputReportFile("BQSR",bqsrFile);
checkInputReportFile("before",beforeFile);
checkInputReportFile("after",afterFile);
if (bqsrFile == null && beforeFile == null && afterFile == null) {
throw new UserException("you must provide at least one recalibration report file "
+ "(arguments -BQSR, -" + BEFORE_ARG_SHORT_NAME + " or -" + AFTER_ARG_SHORT_NAME);
}
checkOutputFile(PDF_ARG_SHORT_NAME,pdfFile);
checkOutputFile(CSV_ARG_SHORT_NAME, csvFile);
checkInputReportFileLMT(beforeFile,afterFile);
checkOutputRequested();
}
/**
* Checks whether the last-modification-time of the inputs is consistent with their relative roles.
*
* This routine does not thrown an exception but may output a warning message if inconsistencies are spotted.
*
* @param beforeFile the before report file.
* @param afterFile the after report file.
*/
private void checkInputReportFileLMT(final File beforeFile, final File afterFile) {
if (ignoreLastModificationTime || beforeFile == null || afterFile == null) {
return; // nothing to do here
} else if (beforeFile.lastModified() > afterFile.lastModified()) {
Utils.warnUser("Last modification timestamp for 'Before' and 'After'"
+ "recalibration reports are in the wrong order. Perhaps, have they been swapped?");
}
}
/**
* Checks that at least one output was requested.
*
* @throw UserException if no output was requested.
*/
private void checkOutputRequested() {
if (pdfFile == null && csvFile == null) {
throw new UserException("you need to request at least one output:"
+ " the intermediate csv file (-" + CSV_ARG_SHORT_NAME + " FILE)"
+ " or the final plot file (-" + PDF_ARG_SHORT_NAME + " FILE).");
}
}
/**
* Checks the value provided to input file arguments.
*
* @throw UserException if there is any problem cause by or under the end user's control
*
* @param name command line argument short name.
* @param value the argument value.
*/
private void checkInputReportFile(final String name,final File value) {
if (value == null) {
return;
} else if (!value.exists()) {
throw new UserException.BadArgumentValue(name, "input report '" +
value + "' does not exist or is unreachable");
} else if (!value.isFile()) {
throw new UserException.BadArgumentValue(name, "input report '" +
value + "' is not a regular file");
} else if (!value.canRead()) {
throw new UserException.BadArgumentValue(name, "input report '" +
value + "' cannot be read");
}
}
/**
* Checks the value provided for output arguments.
*
* @throw UserException if there is any problem cause by or under the end user's control
*
* @param name command line argument short name.
* @param value the argument value.
*/
private void checkOutputFile(final String name, final File value) {
if (value == null) {
return;
}
if (value.exists() && !value.isFile()) {
throw new UserException.BadArgumentValue(name, "the output file location '"
+ value + "' exists as not a file");
}
final File parent = value.getParentFile();
if (parent == null) {
return;
}
if (!parent.exists()) {
throw new UserException.BadArgumentValue(name, "the output file parent directory '"
+ parent + "' does not exists or is unreachable");
} else if (!parent.isDirectory()) {
throw new UserException.BadArgumentValue(name, "the output file parent directory '"
+ parent + "' is not a directory");
} else if (!parent.canWrite()) {
throw new UserException.BadArgumentValue(name, "the output file parent directory '"
+ parent + "' cannot be written");
}
}
/**
* Generates the plots using the external R script.
*
* If <code>plotsFile</code> is <code>null</code>, it does not perform any plotting.
*
* @param csvFile the intermediary csv file.
* @param plotsFile the output plot location.
*/
private void generatePlots(final File csvFile, final Map<String,File> reportFiles, final File plotsFile) {
if (plotsFile == null) {
return;
}
logger.info("Generating plots file '" + plotsFile + "'");
final File exampleReportFile = reportFiles.values().iterator().next();
RecalUtils.generatePlots(csvFile,exampleReportFile,plotsFile);
}
@Override
public void initialize() {
super.initialize();
bqsrFile = getToolkit().getArguments().BQSR_RECAL_FILE;
checkArgumentsValues();
final Map<String, File> reportFiles = buildReportFileMap();
final Map<String, RecalibrationReport> reports = buildReportMap(reportFiles);
checkReportConsistency(reports);
final File csvFile = resolveCsvFile();
generateCsvFile(csvFile,reports);
final File plotFile = resolvePlotFile();
generatePlots(csvFile, reportFiles, plotFile);
}
/**
* Returns the plot output file
* @return might be <code>null</code> if the user has not indicated and output file.
*/
private File resolvePlotFile() {
return pdfFile;
}
/**
* Generates the intermediary Csv file.
*
* @param csvFile where to write the file.
* @param reports the reports to be included.
*/
private void generateCsvFile(final File csvFile, final Map<String, RecalibrationReport> reports) {
try {
logger.info("Generating csv file '" + csvFile + "'");
RecalUtils.generateCsv(csvFile, reports);
} catch (FileNotFoundException e) {
throw new UserException(
String.format("There is a problem creating the intermediary Csv file '%s': %s",
csvFile,e.getMessage()),e);
}
}
/**
* Checks whether multiple input recalibration report files argument values are consistent (equal).
*
* @param reports map with report to verify.
*
* @throw UserException if there is any inconsistency.
*/
private void checkReportConsistency(final Map<String, RecalibrationReport> reports) {
final Map.Entry<String,RecalibrationReport>[] reportEntries =
reports.entrySet().toArray((Map.Entry<String,RecalibrationReport>[]) new Map.Entry[reports.size()]);
final Map.Entry<String,RecalibrationReport> exampleEntry = reportEntries[0];
for (int i = 1; i < reportEntries.length; i++) {
final Map<String,? extends CharSequence> diffs = exampleEntry.getValue().getRAC().compareReportArguments(
reportEntries[i].getValue().getRAC(),exampleEntry.getKey(),reportEntries[i].getKey());
if (diffs.size() != 0) {
throw new UserException.IncompatibleRecalibrationTableParameters("There are differences in relevant arguments of"
+ " two or more input recalibration reports. Please make sure"
+ " they have been created using the same recalibration parameters."
+ " " + Utils.join("// ", reportDifferencesStringArray(diffs)));
}
}
}
/**
* Creates a map with all input recalibration files indexed by their "role".
*
* The key is the role and the value the corresponding report file.
*
* Roles: "Before" (recalibration), "After" (recalibration), "BQSR" (the tool standard argument recalibration file)
*
* @return never <code>null</code>
*/
private Map<String, File> buildReportFileMap() {
final Map<String,File> reports = new LinkedHashMap<>(3);
if (bqsrFile != null) {
reports.put("BQSR",bqsrFile);
}
if (beforeFile != null) {
reports.put("Before",beforeFile);
}
if (afterFile != null) {
reports.put("After",afterFile);
}
return reports;
}
/**
* Transforms a recalibration file map into a report object map.
*
* @param reportFileMap the file map to transforms.
* @return never <code>null</code>, a new map with the same size as
* <code>reportFileMap</code> and the same key set.
*/
@Requires("reportFileMap != null")
private Map<String, RecalibrationReport> buildReportMap(final Map<String, File> reportFileMap) {
final Map<String,RecalibrationReport> reports = new LinkedHashMap<>(reportFileMap.size());
for (final Map.Entry<String,File> e : reportFileMap.entrySet()) {
reports.put(e.getKey(),new RecalibrationReport(e.getValue()));
}
return reports;
}
/**
* Generates a flatter String array representation of recalibration argument differences.
* @param diffs the differences to represent.
*
* @return never <code>null</code>, an array of the same length as the size of the input <code>diffs</code>.
*/
@Requires("diffs != null")
private String[] reportDifferencesStringArray(final Map<String, ? extends CharSequence> diffs) {
final String[] result = new String[diffs.size()];
int i = 0;
for (final Map.Entry<String, ? extends CharSequence> e : diffs.entrySet()) {
result[i++] = capitalize(e.getKey()) + ": " + e.getValue();
}
return result;
}
/**
* Returns the input string capitalizing the first letter.
*
* @param str the string to capitalize
* @return never <code>null</code>.
*/
@Requires("str != null")
private String capitalize(final String str) {
if (str.isEmpty()) {
return str;
} else {
return Character.toUpperCase(str.charAt(0)) + str.substring(1);
}
}
/**
* Returns the csv file to use.
*
* This is the the one specified by the user if any or a temporary file
* that will be deleted as soon as the VM exists by default.
*
* @return never <code>null</code>.
*/
private File resolveCsvFile() {
if (csvFile != null) {
return csvFile;
} else {
try {
final File result = File.createTempFile("AnalyzeCovariates", ".csv");
result.deleteOnExit();
return result;
} catch (IOException e) {
throw new UserException("Could not create temporary Csv file",e);
}
}
}
/**
* Always return true, forcing the immediate termination of the travesal.
* @return
*/
@Override
public boolean isDone() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public None reduceInit() {
return new None();
}
/**
* Is not supposed to ever be called, thus it always results in an exception.
*
* @throws IllegalStateException always.
*/
@Override
public None reduce(None value, None sum) {
throw new IllegalStateException("AnalyzeCovariates reduce method is not supposed to be invoked ever");
}
/**
* Is not supposed to ever be called, thus it always results in an exception.
*
* @throws IllegalStateException always.
*/
@Override
public None map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
throw new IllegalStateException("AnalyzeCovariates map method is not supposed to be invoked ever");
}
/**
* Dummy map and reduce types for the {@link AnalyzeCovariates} tool that in fact does not do any traversal.
*/
protected static class None {
private None() {
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/GaussianMixtureModel.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantrecalibration;
import Jama.Matrix;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.Utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Created by IntelliJ IDEA.
* User: rpoplin
* Date: Mar 4, 2011
*/
public class GaussianMixtureModel {
protected final static Logger logger = Logger.getLogger(GaussianMixtureModel.class);
private final List<MultivariateGaussian> gaussians;
private final double shrinkage;
private final double dirichletParameter;
private final double priorCounts;
private final double[] empiricalMu;
private final Matrix empiricalSigma;
public boolean isModelReadyForEvaluation;
public boolean failedToConverge = false;
public GaussianMixtureModel( final int numGaussians, final int numAnnotations,
final double shrinkage, final double dirichletParameter, final double priorCounts ) {
gaussians = new ArrayList<>( numGaussians );
for( int iii = 0; iii < numGaussians; iii++ ) {
final MultivariateGaussian gaussian = new MultivariateGaussian( numAnnotations );
gaussian.pMixtureLog10 = Math.log10( 1.0 / ((double)numGaussians) );
gaussian.sumProb = 1.0 / ((double) numGaussians);
gaussian.hyperParameter_a = priorCounts;
gaussian.hyperParameter_b = shrinkage;
gaussian.hyperParameter_lambda = dirichletParameter;
gaussians.add( gaussian );
}
this.shrinkage = shrinkage;
this.dirichletParameter = dirichletParameter;
this.priorCounts = priorCounts;
empiricalMu = new double[numAnnotations];
empiricalSigma = new Matrix(numAnnotations, numAnnotations);
isModelReadyForEvaluation = false;
Arrays.fill(empiricalMu, 0.0);
empiricalSigma.setMatrix(0, empiricalMu.length - 1, 0, empiricalMu.length - 1, Matrix.identity(empiricalMu.length, empiricalMu.length).times(200.0).inverse());
}
//this is used for the model output unit test
protected GaussianMixtureModel(final List<MultivariateGaussian> gaussians, final double shrinkage, final double dirichletParameter, final double priorCounts ) {
this.gaussians = gaussians;
final int numAnnotations = gaussians.get(0).mu.length;
this.shrinkage = shrinkage;
this.dirichletParameter = dirichletParameter;
this.priorCounts = priorCounts;
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.hyperParameter_a = priorCounts;
gaussian.hyperParameter_b = shrinkage;
gaussian.hyperParameter_lambda = dirichletParameter;
}
empiricalMu = new double[numAnnotations];
empiricalSigma = new Matrix(numAnnotations, numAnnotations);
isModelReadyForEvaluation = false;
Arrays.fill(empiricalMu, 0.0);
empiricalSigma.setMatrix(0, empiricalMu.length - 1, 0, empiricalMu.length - 1, Matrix.identity(empiricalMu.length, empiricalMu.length).times(200.0).inverse());
}
public void initializeRandomModel( final List<VariantDatum> data, final int numKMeansIterations ) {
// initialize random Gaussian means // BUGBUG: this is broken up this way to match the order of calls to rand.nextDouble() in the old code
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.initializeRandomMu( Utils.getRandomGenerator() );
}
// initialize means using K-means algorithm
logger.info( "Initializing model with " + numKMeansIterations + " k-means iterations..." );
initializeMeansUsingKMeans( data, numKMeansIterations );
// initialize uniform mixture coefficients, random covariance matrices, and initial hyperparameters
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.pMixtureLog10 = Math.log10( 1.0 / ((double) gaussians.size()) );
gaussian.sumProb = 1.0 / ((double) gaussians.size());
gaussian.initializeRandomSigma( Utils.getRandomGenerator() );
gaussian.hyperParameter_a = priorCounts;
gaussian.hyperParameter_b = shrinkage;
gaussian.hyperParameter_lambda = dirichletParameter;
}
}
private void initializeMeansUsingKMeans( final List<VariantDatum> data, final int numIterations ) {
int ttt = 0;
while( ttt++ < numIterations ) {
// E step: assign each variant to the nearest cluster
for( final VariantDatum datum : data ) {
double minDistance = Double.MAX_VALUE;
MultivariateGaussian minGaussian = null;
datum.assignment = minGaussian;
for( final MultivariateGaussian gaussian : gaussians ) {
final double dist = gaussian.calculateDistanceFromMeanSquared( datum );
if( dist < minDistance ) {
minDistance = dist;
minGaussian = gaussian;
}
}
datum.assignment = minGaussian;
}
// M step: update gaussian means based on assigned variants
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.zeroOutMu();
int numAssigned = 0;
for( final VariantDatum datum : data ) {
if( datum.assignment.equals(gaussian) ) {
numAssigned++;
gaussian.incrementMu( datum );
}
}
if( numAssigned != 0 ) {
gaussian.divideEqualsMu( ((double) numAssigned) );
} else {
gaussian.initializeRandomMu( Utils.getRandomGenerator() );
}
}
}
}
public void expectationStep( final List<VariantDatum> data ) {
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.precomputeDenominatorForVariationalBayes( getSumHyperParameterLambda() );
}
for( final VariantDatum datum : data ) {
final double[] pVarInGaussianLog10 = new double[gaussians.size()];
int gaussianIndex = 0;
for( final MultivariateGaussian gaussian : gaussians ) {
final double pVarLog10 = gaussian.evaluateDatumLog10( datum );
pVarInGaussianLog10[gaussianIndex++] = pVarLog10;
}
final double[] pVarInGaussianNormalized = MathUtils.normalizeFromLog10( pVarInGaussianLog10, false );
gaussianIndex = 0;
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.assignPVarInGaussian( pVarInGaussianNormalized[gaussianIndex++] );
}
}
}
public void maximizationStep( final List<VariantDatum> data ) {
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.maximizeGaussian( data, empiricalMu, empiricalSigma, shrinkage, dirichletParameter, priorCounts);
}
}
private double getSumHyperParameterLambda() {
double sum = 0.0;
for( final MultivariateGaussian gaussian : gaussians ) {
sum += gaussian.hyperParameter_lambda;
}
return sum;
}
public void evaluateFinalModelParameters( final List<VariantDatum> data ) {
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.evaluateFinalModelParameters(data);
}
normalizePMixtureLog10();
}
public double normalizePMixtureLog10() {
double sumDiff = 0.0;
double sumPK = 0.0;
for( final MultivariateGaussian gaussian : gaussians ) {
sumPK += gaussian.sumProb;
}
int gaussianIndex = 0;
double[] pGaussianLog10 = new double[gaussians.size()];
for( final MultivariateGaussian gaussian : gaussians ) {
pGaussianLog10[gaussianIndex++] = Math.log10( gaussian.sumProb / sumPK );
}
pGaussianLog10 = MathUtils.normalizeFromLog10( pGaussianLog10, true );
gaussianIndex = 0;
for( final MultivariateGaussian gaussian : gaussians ) {
sumDiff += Math.abs( pGaussianLog10[gaussianIndex] - gaussian.pMixtureLog10 );
gaussian.pMixtureLog10 = pGaussianLog10[gaussianIndex++];
}
return sumDiff;
}
public void precomputeDenominatorForEvaluation() {
for( final MultivariateGaussian gaussian : gaussians ) {
gaussian.precomputeDenominatorForEvaluation();
}
isModelReadyForEvaluation = true;
}
/**
* A version of Log10SumLog10 that tolerates NaN values in the array
*
* In the case where one or more of the values are NaN, this function returns NaN
*
* @param values a non-null vector of doubles
* @return log10 of the sum of the log10 values, or NaN
*/
private double nanTolerantLog10SumLog10(final double[] values) {
for ( final double value : values )
if ( Double.isNaN(value) ) return Double.NaN;
return MathUtils.log10sumLog10(values);
}
public double evaluateDatum( final VariantDatum datum ) {
for( final boolean isNull : datum.isNull ) {
if( isNull ) { return evaluateDatumMarginalized( datum ); }
}
// Fill an array with the log10 probability coming from each Gaussian and then use MathUtils to sum them up correctly
final double[] pVarInGaussianLog10 = new double[gaussians.size()];
int gaussianIndex = 0;
for( final MultivariateGaussian gaussian : gaussians ) {
pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + gaussian.evaluateDatumLog10( datum );
}
return nanTolerantLog10SumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k))
}
// Used only to decide which covariate dimension is most divergent in order to report in the culprit info field annotation
public Double evaluateDatumInOneDimension( final VariantDatum datum, final int iii ) {
if(datum.isNull[iii]) { return null; }
final double[] pVarInGaussianLog10 = new double[gaussians.size()];
int gaussianIndex = 0;
for( final MultivariateGaussian gaussian : gaussians ) {
pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + MathUtils.normalDistributionLog10(gaussian.mu[iii], gaussian.sigma.get(iii, iii), datum.annotations[iii]);
}
return nanTolerantLog10SumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k))
}
public double evaluateDatumMarginalized( final VariantDatum datum ) {
int numRandomDraws = 0;
double sumPVarInGaussian = 0.0;
final int numIterPerMissingAnnotation = 20; // Trade off here between speed of computation and accuracy of the marginalization
final double[] pVarInGaussianLog10 = new double[gaussians.size()];
// for each dimension
for( int iii = 0; iii < datum.annotations.length; iii++ ) {
// if it is missing marginalize over the missing dimension by drawing X random values for the missing annotation and averaging the lod
if( datum.isNull[iii] ) {
for( int ttt = 0; ttt < numIterPerMissingAnnotation; ttt++ ) {
datum.annotations[iii] = Utils.getRandomGenerator().nextGaussian(); // draw a random sample from the standard normal distribution
// evaluate this random data point
int gaussianIndex = 0;
for( final MultivariateGaussian gaussian : gaussians ) {
pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + gaussian.evaluateDatumLog10( datum );
}
// add this sample's probability to the pile in order to take an average in the end
sumPVarInGaussian += Math.pow(10.0, nanTolerantLog10SumLog10(pVarInGaussianLog10)); // p = 10 ^ Sum(pi_k * p(v|n,k))
numRandomDraws++;
}
}
}
return Math.log10( sumPVarInGaussian / ((double) numRandomDraws) );
}
protected List<MultivariateGaussian> getModelGaussians() {return Collections.unmodifiableList(gaussians);}
protected int getNumAnnotations() {return empiricalMu.length;}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/graphs/Path.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs;
import com.google.java.contract.Ensures;
import htsjdk.samtools.Cigar;
import org.apache.commons.lang.ArrayUtils;
import org.broadinstitute.gatk.utils.sam.CigarUtils;
import java.util.*;
/**
* A path thought a BaseGraph
*
* class to keep track of paths
*
* User: depristo
* Date: 3/19/13
* Time: 2:34 PM
*
*/
public class Path<T extends BaseVertex, E extends BaseEdge> {
// the last vertex seen in the path
protected final T lastVertex;
protected final List<E> edgesInOrder;
// the scores for the path
protected final int totalScore;
// the graph from which this path originated
protected final BaseGraph<T, E> graph;
/**
* Create a new Path containing no edges and starting at initialVertex
* @param initialVertex the starting vertex of the path
* @param graph the graph this path will follow through
*/
public Path(final T initialVertex, final BaseGraph<T, E> graph) {
if ( initialVertex == null ) throw new IllegalArgumentException("initialVertex cannot be null");
if ( graph == null ) throw new IllegalArgumentException("graph cannot be null");
if ( ! graph.containsVertex(initialVertex) ) throw new IllegalArgumentException("Vertex " + initialVertex + " must be part of graph " + graph);
lastVertex = initialVertex;
edgesInOrder = Collections.emptyList();
totalScore = 0;
this.graph = graph;
}
protected Path(final BaseGraph<T, E> graph, final List<E> edges, final T lastVertex, final int totalScore) {
this.graph = graph;
edgesInOrder = edges;
this.lastVertex = lastVertex;
this.totalScore = totalScore;
}
/**
* Convenience constructor for testing that creates a path through vertices in graph
*
* @deprecated use {@link PathBuilder}.
*/
protected static <T extends BaseVertex, E extends BaseEdge> Path<T,E> makePath(final List<T> vertices, final BaseGraph<T, E> graph) {
Path<T,E> path = new Path<T,E>(vertices.get(0), graph);
for ( int i = 1; i < vertices.size(); i++ )
path = new Path<T,E>(path, graph.getEdge(path.lastVertex, vertices.get(i)));
return path;
}
/**
* Create a new path with the same field values.
*
* @param p the template path.
*
* @throws NullPointerException if {@code p} is {@code null}.
*/
protected Path(final Path<T,E> p) {
this.edgesInOrder = p.edgesInOrder;
this.lastVertex = p.lastVertex;
this.totalScore = p.totalScore;
this.graph = p.graph;
}
/**
* Create a new Path extending p with edge
*
* @param p the path to extend.
* @param edge the edge to extend path with.
*
* @throws IllegalArgumentException if {@code p} or {@code edge} are {@code null}, or {@code edge} is
* not part of {@code p}'s graph, or {@code edge} does not have as a source the last vertex in {@code p}.
*/
public Path(final Path<T,E> p, final E edge) {
if ( p == null ) throw new IllegalArgumentException("Path cannot be null");
if ( edge == null ) throw new IllegalArgumentException("Edge cannot be null");
if ( ! p.graph.containsEdge(edge) ) throw new IllegalArgumentException("Graph must contain edge " + edge + " but it doesn't");
if ( ! p.graph.getEdgeSource(edge).equals(p.lastVertex) ) { throw new IllegalStateException("Edges added to path must be contiguous."); }
graph = p.graph;
lastVertex = p.graph.getEdgeTarget(edge);
edgesInOrder = new ArrayList<>(p.length() + 1);
edgesInOrder.addAll(p.edgesInOrder);
edgesInOrder.add(edge);
totalScore = p.totalScore + edge.getMultiplicity();
}
/**
* Length of the path in edges.
*
* @return {@code 0} or greater.
*/
public int length() {
return edgesInOrder.size();
}
/**
* Prepend a path with an edge.
*
* @param edge the extending edge.
* @param p the original path.
*
* @throws IllegalArgumentException if {@code p} or {@code edge} are {@code null}, or {@code edge} is
* not part of {@code p}'s graph, or {@code edge} does not have as a target the first vertex in {@code p}.
*/
public Path(final E edge, final Path<T,E> p) {
if ( p == null ) throw new IllegalArgumentException("Path cannot be null");
if ( edge == null ) throw new IllegalArgumentException("Edge cannot be null");
if ( ! p.graph.containsEdge(edge) ) throw new IllegalArgumentException("Graph must contain edge " + edge + " but it doesn't");
if ( ! p.graph.getEdgeTarget(edge).equals(p.getFirstVertex())) { throw new IllegalStateException("Edges added to path must be contiguous."); }
graph = p.graph;
lastVertex = p.lastVertex;
edgesInOrder = new ArrayList<>(p.length() + 1);
edgesInOrder.add(edge);
edgesInOrder.addAll(p.getEdges());
totalScore = p.totalScore + edge.getMultiplicity();
}
/**
* Checks whether a given path is a suffix of this path.
*
* @param other the path to compare against.
* @throws IllegalArgumentException if <code>other</code> is <code>null</code>, or the come from
* different graphs.
* @return true if <code>other</code> is a suffix of this path.
*/
public boolean isSuffix(final Path<T, E> other) {
if ( other == null ) throw new IllegalArgumentException("path cannot be null");
if (other.getGraph() != this.getGraph()) throw new IllegalArgumentException("the other path most belong to the same path");
if (!lastVertex.equals(other.lastVertex))
return false;
final ListIterator<E> myIt = edgesInOrder.listIterator(edgesInOrder.size());
final ListIterator<E> otherIt = other.edgesInOrder.listIterator(other.edgesInOrder.size());
while (myIt.hasPrevious() && otherIt.hasPrevious())
if (otherIt.previous() != myIt.previous())
return false;
return !otherIt.hasPrevious();
}
/**
* Check that two paths have the same edges and total score
* @param path the other path we might be the same as
* @return true if this and path are the same
*/
protected boolean pathsAreTheSame(Path<T,E> path) {
return totalScore == path.totalScore && edgesInOrder.equals(path.edgesInOrder);
}
@Override
public String toString() {
final StringBuilder b = new StringBuilder("Path{score=" + totalScore + ", path=");
boolean first = true;
for ( final T v : getVertices() ) {
if ( first )
first = false;
else
b.append(" -> ");
b.append(v.getSequenceString());
}
b.append('}');
return b.toString();
}
/**
* Get the graph of this path
* @return a non-null graph
*/
@Ensures("result != null")
public BaseGraph<T, E> getGraph() {
return graph;
}
/**
* Get the edges of this path in order
* @return a non-null list of edges
*/
@Ensures("result != null")
public List<E> getEdges() { return edgesInOrder; }
/**
* Get the list of vertices in this path in order defined by the edges of the path
* @return a non-null, non-empty list of vertices
*/
@Ensures({"result != null", "!result.isEmpty()"})
public List<T> getVertices() {
if ( getEdges().isEmpty() )
return Collections.singletonList(lastVertex);
else {
final LinkedList<T> vertices = new LinkedList<T>();
boolean first = true;
for ( final E e : getEdges() ) {
if ( first ) {
vertices.add(graph.getEdgeSource(e));
first = false;
}
vertices.add(graph.getEdgeTarget(e));
}
return vertices;
}
}
/**
* Get the total score of this path (bigger is better)
* @return a positive integer
*/
@Ensures("result >= 0")
public int getScore() { return totalScore; }
/**
* Get the final vertex of the path
* @return a non-null vertex
*/
@Ensures("result != null")
public T getLastVertex() { return lastVertex; }
/**
* Get the first vertex in this path
* @return a non-null vertex
*/
public T getFirstVertex() {
if (edgesInOrder.size() == 0) {
return lastVertex;
} else {
return getGraph().getEdgeSource(edgesInOrder.get(0));
}
}
/**
* The base sequence for this path. Pull the full sequence for source nodes and then the suffix for all subsequent nodes
* @return non-null sequence of bases corresponding to this path
*/
@Ensures({"result != null"})
public byte[] getBases() {
if( getEdges().isEmpty() ) { return graph.getAdditionalSequence(lastVertex); }
byte[] bases = graph.getAdditionalSequence(graph.getEdgeSource(edgesInOrder.get(0)));
for( final E e : edgesInOrder ) {
bases = ArrayUtils.addAll(bases, graph.getAdditionalSequence(graph.getEdgeTarget(e)));
}
return bases;
}
/**
* Calculate the cigar elements for this path against the reference sequence
*
* @param refSeq the reference sequence that all of the bases in this path should align to
* @return a Cigar mapping this path to refSeq, or null if no reasonable alignment could be found
*/
public Cigar calculateCigar(final byte[] refSeq) {
return CigarUtils.calculateCigar(refSeq,getBases());
}
/**
* Tests that this and other have the same score and vertices in the same order with the same seq
* @param other the other path to consider. Cannot be null
* @return true if this and path are equal, false otherwise
*/
public boolean equalScoreAndSequence(final Path<T,E> other) {
if ( other == null ) throw new IllegalArgumentException("other cannot be null");
return getScore() == other.getScore() && equalSequence(other);
}
/**
* Tests that this and other have the same vertices in the same order with the same seq
* @param other the other path to consider. Cannot be null
* @return true if this and path are equal, false otherwise
*/
public boolean equalSequence(final Path<T,E> other) {
final List<T> mine = getVertices();
final List<T> yours = other.getVertices();
if ( mine.size() == yours.size() ) { // hehehe
for ( int i = 0; i < mine.size(); i++ )
if ( ! mine.get(i).seqEquals(yours.get(i)) )
return false;
}
return true;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/LocalAssemblyEngine.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs.*;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
import org.broadinstitute.gatk.utils.gga.GenotypingGivenAllelesUtils;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.sam.CigarUtils;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import htsjdk.variant.variantcontext.VariantContext;
import java.io.File;
import java.io.PrintStream;
import java.util.*;
/**
* Abstract base class for all HaplotypeCaller assemblers
*
* User: ebanks
* Date: Mar 14, 2011
*/
public abstract class LocalAssemblyEngine {
private final static Logger logger = Logger.getLogger(LocalAssemblyEngine.class);
/**
* If false, we will only write out a region around the reference source
*/
private final static boolean PRINT_FULL_GRAPH_FOR_DEBUGGING = true;
public static final byte DEFAULT_MIN_BASE_QUALITY_TO_USE = (byte) 10;
private static final int MIN_HAPLOTYPE_REFERENCE_LENGTH = 30;
protected final int numBestHaplotypesPerGraph;
protected boolean debug = false;
protected boolean allowCyclesInKmerGraphToGeneratePaths = false;
protected boolean debugGraphTransformations = false;
protected boolean recoverDanglingBranches = true;
protected int minDanglingBranchLength = 0;
protected byte minBaseQualityToUseInAssembly = DEFAULT_MIN_BASE_QUALITY_TO_USE;
protected int pruneFactor = 2;
protected boolean errorCorrectKmers = false;
private PrintStream graphWriter = null;
/**
* Create a new LocalAssemblyEngine with all default parameters, ready for use
* @param numBestHaplotypesPerGraph the number of haplotypes to generate for each assembled graph
*/
protected LocalAssemblyEngine(final int numBestHaplotypesPerGraph) {
if ( numBestHaplotypesPerGraph < 1 ) throw new IllegalArgumentException("numBestHaplotypesPerGraph should be >= 1 but got " + numBestHaplotypesPerGraph);
this.numBestHaplotypesPerGraph = numBestHaplotypesPerGraph;
}
/**
* Main subclass function: given reads and a reference haplotype give us graphs to use for constructing
* non-reference haplotypes.
*
* @param reads the reads we're going to assemble
* @param refHaplotype the reference haplotype
* @return a non-null list of reads
*/
protected abstract List<AssemblyResult> assemble(List<GATKSAMRecord> reads, Haplotype refHaplotype, List<Haplotype> givenHaplotypes);
/**
* Main entry point into the assembly engine. Build a set of deBruijn graphs out of the provided reference sequence and list of reads
* @param activeRegion ActiveRegion object holding the reads which are to be used during assembly
* @param refHaplotype reference haplotype object
* @param fullReferenceWithPadding byte array holding the reference sequence with padding
* @param refLoc GenomeLoc object corresponding to the reference sequence with padding
* @param givenAlleles the alleles to inject into the haplotypes during GGA mode
* @param readErrorCorrector a ReadErrorCorrector object, if read are to be corrected before assembly. Can be null if no error corrector is to be used.
* @return the resulting assembly-result-set
*/
public AssemblyResultSet runLocalAssembly(final ActiveRegion activeRegion,
final Haplotype refHaplotype,
final byte[] fullReferenceWithPadding,
final GenomeLoc refLoc,
final List<VariantContext> givenAlleles,
final ReadErrorCorrector readErrorCorrector) {
if( activeRegion == null ) { throw new IllegalArgumentException("Assembly engine cannot be used with a null ActiveRegion."); }
if( activeRegion.getExtendedLoc() == null ) { throw new IllegalArgumentException("Active region must have an extended location."); }
if( refHaplotype == null ) { throw new IllegalArgumentException("Reference haplotype cannot be null."); }
if( fullReferenceWithPadding.length != refLoc.size() ) { throw new IllegalArgumentException("Reference bases and reference loc must be the same size."); }
if( pruneFactor < 0 ) { throw new IllegalArgumentException("Pruning factor cannot be negative"); }
// create the list of artificial haplotypes that should be added to the graph for GGA mode
final List<Haplotype> givenHaplotypes = GenotypingGivenAllelesUtils.composeGivenHaplotypes(refHaplotype, givenAlleles, activeRegion.getExtendedLoc());
// error-correct reads before clipping low-quality tails: some low quality bases might be good and we want to recover them
final List<GATKSAMRecord> correctedReads;
if (readErrorCorrector != null) {
// now correct all reads in active region after filtering/downsampling
// Note that original reads in active region are NOT modified by default, since they will be used later for GL computation,
// and we only want the read-error corrected reads for graph building.
readErrorCorrector.addReadsToKmers(activeRegion.getReads());
correctedReads = new ArrayList<>(readErrorCorrector.correctReads(activeRegion.getReads()));
} else {
correctedReads = activeRegion.getReads();
}
final List<SeqGraph> nonRefGraphs = new LinkedList<>();
final AssemblyResultSet resultSet = new AssemblyResultSet();
resultSet.setRegionForGenotyping(activeRegion);
resultSet.setFullReferenceWithPadding(fullReferenceWithPadding);
resultSet.setPaddedReferenceLoc(refLoc);
final GenomeLoc activeRegionExtendedLocation = activeRegion.getExtendedLoc();
refHaplotype.setGenomeLocation(activeRegionExtendedLocation);
resultSet.add(refHaplotype);
final Map<SeqGraph,AssemblyResult> assemblyResultByGraph = new HashMap<>();
// create the graphs by calling our subclass assemble method
for ( final AssemblyResult result : assemble(correctedReads, refHaplotype, givenHaplotypes) ) {
if ( result.getStatus() == AssemblyResult.Status.ASSEMBLED_SOME_VARIATION ) {
// do some QC on the graph
sanityCheckGraph(result.getGraph(), refHaplotype);
// add it to graphs with meaningful non-reference features
assemblyResultByGraph.put(result.getGraph(),result);
nonRefGraphs.add(result.getGraph());
}
}
findBestPaths (nonRefGraphs, refHaplotype, refLoc, activeRegionExtendedLocation, assemblyResultByGraph, resultSet);
// print the graphs if the appropriate debug option has been turned on
if ( graphWriter != null ) { printGraphs(nonRefGraphs); }
return resultSet;
}
@Ensures({"result.contains(refHaplotype)"})
protected List<Haplotype> findBestPaths(final List<SeqGraph> graphs, final Haplotype refHaplotype, final GenomeLoc refLoc, final GenomeLoc activeRegionWindow,
final Map<SeqGraph,AssemblyResult> assemblyResultByGraph, final AssemblyResultSet assemblyResultSet) {
// add the reference haplotype separately from all the others to ensure that it is present in the list of haplotypes
final Set<Haplotype> returnHaplotypes = new LinkedHashSet<>();
final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef();
final ArrayList<KBestHaplotypeFinder> finders = new ArrayList<>(graphs.size());
int failedCigars = 0;
for( final SeqGraph graph : graphs ) {
final SeqVertex source = graph.getReferenceSourceVertex();
final SeqVertex sink = graph.getReferenceSinkVertex();
if ( source == null || sink == null ) throw new IllegalArgumentException("Both source and sink cannot be null but got " + source + " and sink " + sink + " for graph "+ graph);
final KBestHaplotypeFinder haplotypeFinder = new KBestHaplotypeFinder(graph,source,sink);
finders.add(haplotypeFinder);
final Iterator<KBestHaplotype> bestHaplotypes = haplotypeFinder.iterator(numBestHaplotypesPerGraph);
while (bestHaplotypes.hasNext()) {
final KBestHaplotype kBestHaplotype = bestHaplotypes.next();
final Haplotype h = kBestHaplotype.haplotype();
if( !returnHaplotypes.contains(h) ) {
final Cigar cigar = CigarUtils.calculateCigar(refHaplotype.getBases(),h.getBases());
if ( cigar == null ) {
failedCigars++; // couldn't produce a meaningful alignment of haplotype to reference, fail quietly
continue;
} else if( cigar.isEmpty() ) {
throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length " + cigar.getReferenceLength() +
" but expecting reference length of " + refHaplotype.getCigar().getReferenceLength());
} else if ( pathIsTooDivergentFromReference(cigar) || cigar.getReferenceLength() < MIN_HAPLOTYPE_REFERENCE_LENGTH ) {
// N cigar elements means that a bubble was too divergent from the reference so skip over this path
continue;
} else if( cigar.getReferenceLength() != refHaplotype.getCigar().getReferenceLength() ) { // SW failure
throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length "
+ cigar.getReferenceLength() + " but expecting reference length of " + refHaplotype.getCigar().getReferenceLength()
+ " ref = " + refHaplotype + " path " + new String(h.getBases()));
}
h.setCigar(cigar);
h.setAlignmentStartHapwrtRef(activeRegionStart);
h.setGenomeLocation(activeRegionWindow);
returnHaplotypes.add(h);
assemblyResultSet.add(h, assemblyResultByGraph.get(graph));
if ( debug )
logger.info("Adding haplotype " + h.getCigar() + " from graph with kmer " + graph.getKmerSize());
}
}
}
// Make sure that the ref haplotype is amongst the return haplotypes and calculate its score as
// the first returned by any finder.
if (!returnHaplotypes.contains(refHaplotype)) {
double refScore = Double.NaN;
for (final KBestHaplotypeFinder finder : finders) {
final double candidate = finder.score(refHaplotype);
if (Double.isNaN(candidate)) continue;
refScore = candidate;
break;
}
refHaplotype.setScore(refScore);
returnHaplotypes.add(refHaplotype);
}
if (failedCigars != 0)
logger.debug(String.format("failed to align some haplotypes (%d) back to the reference (loc=%s); these will be ignored.",failedCigars,refLoc.toString()));
if( debug ) {
if( returnHaplotypes.size() > 1 ) {
logger.info("Found " + returnHaplotypes.size() + " candidate haplotypes of " + returnHaplotypes.size() + " possible combinations to evaluate every read against.");
} else {
logger.info("Found only the reference haplotype in the assembly graph.");
}
for( final Haplotype h : returnHaplotypes ) {
logger.info( h.toString() );
logger.info( "> Cigar = " + h.getCigar() + " : " + h.getCigar().getReferenceLength() + " score " + h.getScore() + " ref " + h.isReference());
}
}
return new ArrayList<>(returnHaplotypes);
}
/**
* We use CigarOperator.N as the signal that an incomplete or too divergent bubble was found during bubble traversal
* @param c the cigar to test
* @return true if we should skip over this path
*/
@Requires("c != null")
private boolean pathIsTooDivergentFromReference( final Cigar c ) {
for( final CigarElement ce : c.getCigarElements() ) {
if( ce.getOperator().equals(CigarOperator.N) ) {
return true;
}
}
return false;
}
/**
* Print graph to file if debugGraphTransformations is enabled
* @param graph the graph to print
* @param file the destination file
*/
protected void printDebugGraphTransform(final BaseGraph graph, final File file) {
if ( debugGraphTransformations ) {
if ( PRINT_FULL_GRAPH_FOR_DEBUGGING )
graph.printGraph(file, pruneFactor);
else
graph.subsetToRefSource().printGraph(file, pruneFactor);
}
}
protected AssemblyResult cleanupSeqGraph(final SeqGraph seqGraph) {
printDebugGraphTransform(seqGraph, new File("sequenceGraph.1.dot"));
// the very first thing we need to do is zip up the graph, or pruneGraph will be too aggressive
seqGraph.zipLinearChains();
printDebugGraphTransform(seqGraph, new File("sequenceGraph.2.zipped.dot"));
// now go through and prune the graph, removing vertices no longer connected to the reference chain
seqGraph.removeSingletonOrphanVertices();
seqGraph.removeVerticesNotConnectedToRefRegardlessOfEdgeDirection();
printDebugGraphTransform(seqGraph, new File("sequenceGraph.3.pruned.dot"));
seqGraph.simplifyGraph();
printDebugGraphTransform(seqGraph, new File("sequenceGraph.4.merged.dot"));
// The graph has degenerated in some way, so the reference source and/or sink cannot be id'd. Can
// happen in cases where for example the reference somehow manages to acquire a cycle, or
// where the entire assembly collapses back into the reference sequence.
if ( seqGraph.getReferenceSourceVertex() == null || seqGraph.getReferenceSinkVertex() == null )
return new AssemblyResult(AssemblyResult.Status.JUST_ASSEMBLED_REFERENCE, seqGraph);
seqGraph.removePathsNotConnectedToRef();
seqGraph.simplifyGraph();
if ( seqGraph.vertexSet().size() == 1 ) {
// we've perfectly assembled into a single reference haplotype, add a empty seq vertex to stop
// the code from blowing up.
// TODO -- ref properties should really be on the vertices, not the graph itself
final SeqVertex complete = seqGraph.vertexSet().iterator().next();
final SeqVertex dummy = new SeqVertex("");
seqGraph.addVertex(dummy);
seqGraph.addEdge(complete, dummy, new BaseEdge(true, 0));
}
printDebugGraphTransform(seqGraph, new File("sequenceGraph.5.final.dot"));
return new AssemblyResult(AssemblyResult.Status.ASSEMBLED_SOME_VARIATION, seqGraph);
}
/**
* Perform general QC on the graph to make sure something hasn't gone wrong during assembly
* @param graph the graph to check
* @param refHaplotype the reference haplotype
*/
private <T extends BaseVertex, E extends BaseEdge> void sanityCheckGraph(final BaseGraph<T,E> graph, final Haplotype refHaplotype) {
sanityCheckReferenceGraph(graph, refHaplotype);
}
/**
* Make sure the reference sequence is properly represented in the provided graph
*
* @param graph the graph to check
* @param refHaplotype the reference haplotype
*/
private <T extends BaseVertex, E extends BaseEdge> void sanityCheckReferenceGraph(final BaseGraph<T,E> graph, final Haplotype refHaplotype) {
if( graph.getReferenceSourceVertex() == null ) {
throw new IllegalStateException("All reference graphs must have a reference source vertex.");
}
if( graph.getReferenceSinkVertex() == null ) {
throw new IllegalStateException("All reference graphs must have a reference sink vertex.");
}
if( !Arrays.equals(graph.getReferenceBytes(graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex(), true, true), refHaplotype.getBases()) ) {
throw new IllegalStateException("Mismatch between the reference haplotype and the reference assembly graph path. for graph " + graph +
" graph = " + new String(graph.getReferenceBytes(graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex(), true, true)) +
" haplotype = " + new String(refHaplotype.getBases())
);
}
}
/**
* Print the generated graphs to the graphWriter
* @param graphs a non-null list of graphs to print out
*/
private void printGraphs(final List<SeqGraph> graphs) {
final int writeFirstGraphWithSizeSmallerThan = 50;
graphWriter.println("digraph assemblyGraphs {");
for( final SeqGraph graph : graphs ) {
if ( debugGraphTransformations && graph.getKmerSize() >= writeFirstGraphWithSizeSmallerThan ) {
logger.info("Skipping writing of graph with kmersize " + graph.getKmerSize());
continue;
}
graph.printGraph(graphWriter, false, pruneFactor);
if ( debugGraphTransformations )
break;
}
graphWriter.println("}");
}
// -----------------------------------------------------------------------------------------------
//
// getter / setter routines for generic assembler properties
//
// -----------------------------------------------------------------------------------------------
public int getPruneFactor() {
return pruneFactor;
}
public void setPruneFactor(int pruneFactor) {
this.pruneFactor = pruneFactor;
}
public boolean shouldErrorCorrectKmers() {
return errorCorrectKmers;
}
public void setErrorCorrectKmers(boolean errorCorrectKmers) {
this.errorCorrectKmers = errorCorrectKmers;
}
public void setGraphWriter(PrintStream graphWriter) {
this.graphWriter = graphWriter;
}
public byte getMinBaseQualityToUseInAssembly() {
return minBaseQualityToUseInAssembly;
}
public void setMinBaseQualityToUseInAssembly(byte minBaseQualityToUseInAssembly) {
this.minBaseQualityToUseInAssembly = minBaseQualityToUseInAssembly;
}
public boolean isDebug() {
return debug;
}
public void setDebug(boolean debug) {
this.debug = debug;
}
public boolean isAllowCyclesInKmerGraphToGeneratePaths() {
return allowCyclesInKmerGraphToGeneratePaths;
}
public void setAllowCyclesInKmerGraphToGeneratePaths(boolean allowCyclesInKmerGraphToGeneratePaths) {
this.allowCyclesInKmerGraphToGeneratePaths = allowCyclesInKmerGraphToGeneratePaths;
}
public boolean isDebugGraphTransformations() {
return debugGraphTransformations;
}
public void setDebugGraphTransformations(boolean debugGraphTransformations) {
this.debugGraphTransformations = debugGraphTransformations;
}
public boolean isRecoverDanglingBranches() { return recoverDanglingBranches; }
public void setRecoverDanglingBranches(final boolean recoverDanglingBranches) {
this.recoverDanglingBranches = recoverDanglingBranches;
}
public void setMinDanglingBranchLength(final int minDanglingBranchLength) { this.minDanglingBranchLength = minDanglingBranchLength; }
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/GenotypeGVCFs.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantutils;
import htsjdk.variant.variantcontext.*;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.vcf.*;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.engine.GATKVCFUtils;
import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
import org.broadinstitute.gatk.engine.SampleUtils;
import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection;
import org.broadinstitute.gatk.engine.arguments.GenotypeCalculationArgumentCollection;
import org.broadinstitute.gatk.engine.walkers.Reference;
import org.broadinstitute.gatk.engine.walkers.RodWalker;
import org.broadinstitute.gatk.engine.walkers.TreeReducible;
import org.broadinstitute.gatk.engine.walkers.Window;
import org.broadinstitute.gatk.tools.walkers.annotator.RankSumTest;
import org.broadinstitute.gatk.tools.walkers.annotator.RMSAnnotation;
import org.broadinstitute.gatk.tools.walkers.annotator.VariantAnnotatorEngine;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AS_StandardAnnotation;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.StandardAnnotation;
import org.broadinstitute.gatk.tools.walkers.genotyper.OutputMode;
import org.broadinstitute.gatk.tools.walkers.genotyper.UnifiedArgumentCollection;
import org.broadinstitute.gatk.tools.walkers.genotyper.UnifiedGenotypingEngine;
import org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.GeneralPloidyFailOverAFCalculatorProvider;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.commandline.*;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.genotyper.IndexedSampleList;
import org.broadinstitute.gatk.utils.genotyper.SampleList;
import org.broadinstitute.gatk.utils.genotyper.SampleListUtils;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import java.util.*;
/**
* Perform joint genotyping on gVCF files produced by HaplotypeCaller
*
* <p>
* GenotypeGVCFs merges gVCF records that were produced as part of the Best Practices workflow for variant discovery
* (see Best Practices documentation for more details) using the '-ERC GVCF' or '-ERC BP_RESOLUTION' mode of the
* HaplotypeCaller, or result from combining such gVCF files using CombineGVCFs. This tool performs the multi-sample
* joint aggregation step and merges the records together in a sophisticated manner: at each position of the input
* gVCFs, this tool will combine all spanning records, produce correct genotype likelihoods, re-genotype the newly
* merged record, and then re-annotate it.</p>
*
* <h3>Input</h3>
* <p>
* One or more HaplotypeCaller gVCFs to genotype.
* </p>
*
* <h3>Output</h3>
* <p>
* A combined, genotyped VCF.
* </p>
*
* <h3>Usage example</h3>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T GenotypeGVCFs \
* -R reference.fasta \
* --variant sample1.g.vcf \
* --variant sample2.g.vcf \
* -o output.vcf
* </pre>
*
* <h3>Caveats</h3>
* <p>Only gVCF files produced by HaplotypeCaller (or CombineGVCFs) can be used as input for this tool. Some other
* programs produce files that they call gVCFs but those lack some important information (accurate genotype likelihoods
* for every position) that GenotypeGVCFs requires for its operation.</p>
* <p>If the gVCF files contain allele specific annotations, add -G Standard -G AS_Standard to the command line.</p>
*
* <h3>Special note on ploidy</h3>
* <p>This tool is able to handle any ploidy (or mix of ploidies) intelligently; there is no need to specify ploidy
* for non-diploid organisms.</p>
*
*/
@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} )
@Reference(window=@Window(start=-10,stop=10))
@SuppressWarnings("unused")
public class GenotypeGVCFs extends RodWalker<VariantContext, VariantContextWriter> implements AnnotatorCompatible, TreeReducible<VariantContextWriter> {
private static String GVCF_BLOCK = "GVCFBlock";
/**
* The gVCF files to merge together
*/
@Input(fullName="variant", shortName = "V", doc="One or more input gVCF files", required=true)
public List<RodBindingCollection<VariantContext>> variantCollections;
final private List<RodBinding<VariantContext>> variants = new ArrayList<>();
@Output(doc="File to which variants should be written")
protected VariantContextWriter vcfWriter = null;
@Argument(fullName="includeNonVariantSites", shortName="allSites", doc="Include loci found to be non-variant after genotyping", required=false)
public boolean INCLUDE_NON_VARIANTS = false;
/**
* Uniquify all sample names (intended for use with multiple inputs for the same sample)
*/
@Hidden
@Advanced
@Argument(fullName="uniquifySamples", shortName="uniquifySamples", doc="Assume duplicate samples are present and uniquify all names with '.variant' and file number index")
public boolean uniquifySamples = false;
@ArgumentCollection
public GenotypeCalculationArgumentCollection genotypeArgs = new GenotypeCalculationArgumentCollection();
/**
* Which annotations to recompute for the combined output VCF file.
*/
@Advanced
@Argument(fullName="annotation", shortName="A", doc="One or more specific annotations to recompute. The single value 'none' removes the default annotations", required=false)
protected List<String> annotationsToUse = new ArrayList<>();
/**
* Which groups of annotations to add to the output VCF file. The single value 'none' removes the default group. See
* the VariantAnnotator -list argument to view available groups. Note that this usage is not recommended because
* it obscures the specific requirements of individual annotations. Any requirements that are not met (e.g. failing
* to provide a pedigree file for a pedigree-based annotation) may cause the run to fail.
*/
@Argument(fullName="group", shortName="G", doc="One or more classes/groups of annotations to apply to variant calls", required=false)
protected List<String> annotationGroupsToUse = new ArrayList<>(Arrays.asList(new String[]{StandardAnnotation.class.getSimpleName()}));
/**
* The rsIDs from this file are used to populate the ID column of the output. Also, the DB INFO flag will be set when appropriate. Note that dbSNP is not used in any way for the calculations themselves.
*/
@ArgumentCollection
protected DbsnpArgumentCollection dbsnp = new DbsnpArgumentCollection();
public RodBinding<VariantContext> getDbsnpRodBinding() { return dbsnp.dbsnp; }
// the genotyping engine
private UnifiedGenotypingEngine genotypingEngine;
// the annotation engine
private VariantAnnotatorEngine annotationEngine;
// the INFO field annotation key names to remove
private final List<String> infoFieldAnnotationKeyNamesToRemove = new ArrayList<>();
public List<RodBinding<VariantContext>> getCompRodBindings() { return Collections.emptyList(); }
public RodBinding<VariantContext> getSnpEffRodBinding() { return null; }
public List<RodBinding<VariantContext>> getResourceRodBindings() { return Collections.emptyList(); }
public boolean alwaysAppendDbsnpId() { return false; }
// INFO Header names that require alt alleles
final Set<String> infoHeaderAltAllelesLineNames = new LinkedHashSet<>();
public void initialize() {
boolean inputsAreTagged = false;
// collect the actual rod bindings into a list for use later
for ( final RodBindingCollection<VariantContext> variantCollection : variantCollections ) {
variants.addAll(variantCollection.getRodBindings());
if (uniquifySamples) {
for (final RodBinding<VariantContext> rb : variantCollection.getRodBindings()) {
//are inputs passed in with -V:fileTag ?
if (!rb.getTags().isEmpty()) inputsAreTagged = true;
}
}
}
//RodBinding tags are used in sample uniquification
if (inputsAreTagged)
logger.warn("Output uniquified VCF may not be suitable for input to CombineSampleData because input VCF(s) contain tags.");
final GenomeAnalysisEngine toolkit = getToolkit();
final Map<String, VCFHeader> vcfRods = GATKVCFUtils.getVCFHeadersFromRods(toolkit, variants);
final GATKVariantContextUtils.GenotypeMergeType mergeType = uniquifySamples ?
GATKVariantContextUtils.GenotypeMergeType.UNIQUIFY : GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE;
final SampleList samples = new IndexedSampleList(SampleUtils.getSampleList(vcfRods, mergeType));
annotationEngine = new VariantAnnotatorEngine(annotationGroupsToUse, annotationsToUse, Collections.<String>emptyList(), this, toolkit);
// Request INFO field annotations inheriting from RankSumTest and RMSAnnotation added to remove list
for ( final InfoFieldAnnotation annotation : annotationEngine.getRequestedInfoAnnotations() ) {
if ( annotation instanceof RankSumTest || annotation instanceof RMSAnnotation ) {
final List<String> keyNames = annotation.getKeyNames();
if ( !keyNames.isEmpty() ) {
infoFieldAnnotationKeyNamesToRemove.add(keyNames.get(0));
}
}
}
// create the genotyping engine
// when checking for presence of AS_StandardAnnotation we must deal with annoying feature that
// the class name with or without the trailing "Annotation" are both valid command lines
boolean doAlleleSpecificGenotyping = annotationsToUse.contains(GATKVCFConstants.AS_QUAL_BY_DEPTH_KEY)
|| annotationGroupsToUse.contains(AS_StandardAnnotation.class.getSimpleName())
|| annotationGroupsToUse.contains(AS_StandardAnnotation.class.getSimpleName().replace("Annotation", ""));
genotypingEngine = new UnifiedGenotypingEngine(createUAC(), samples, toolkit.getGenomeLocParser(),
GeneralPloidyFailOverAFCalculatorProvider.createThreadSafeProvider(toolkit, genotypeArgs, logger), toolkit.getArguments().BAQMode, doAlleleSpecificGenotyping);
// take care of the VCF headers
final Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(vcfRods.values(), true);
// Remove GCVFBlocks
for ( final Iterator<VCFHeaderLine> iter = headerLines.iterator(); iter.hasNext(); ) {
if ( iter.next().getKey().contains(GVCF_BLOCK) ) {
iter.remove();
}
}
headerLines.addAll(annotationEngine.getVCFAnnotationDescriptions());
headerLines.addAll(genotypingEngine.getAppropriateVCFInfoHeaders());
// add headers for annotations added by this tool
headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.MLE_ALLELE_COUNT_KEY));
headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.MLE_ALLELE_FREQUENCY_KEY));
headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.REFERENCE_GENOTYPE_QUALITY));
headerLines.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.DEPTH_KEY)); // needed for gVCFs without DP tags
if ( INCLUDE_NON_VARIANTS ) {
// Save INFO header names that require alt alleles
for ( final VCFHeaderLine headerLine : headerLines ) {
if (headerLine instanceof VCFInfoHeaderLine ) {
if (((VCFInfoHeaderLine) headerLine).getCountType() == VCFHeaderLineCount.A) {
infoHeaderAltAllelesLineNames.add(((VCFInfoHeaderLine) headerLine).getID());
}
}
}
}
if ( dbsnp != null && dbsnp.dbsnp.isBound() )
VCFStandardHeaderLines.addStandardInfoLines(headerLines, true, VCFConstants.DBSNP_KEY);
final Set<String> sampleNameSet = SampleListUtils.asSet(samples);
final VCFHeader vcfHeader = new VCFHeader(headerLines, sampleNameSet);
vcfWriter.writeHeader(vcfHeader);
//now that we have all the VCF headers, initialize the annotations (this is particularly important to turn off RankSumTest dithering in integration tests)
annotationEngine.invokeAnnotationInitializationMethods(headerLines);
logger.info("Notice that the -ploidy parameter is ignored in " + getClass().getSimpleName() + " tool as this is automatically determined by the input variant files");
}
// get VariantContexts from input gVCFs, merge, and regenotype
public VariantContext map(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
if ( tracker == null ) // RodWalkers can make funky map calls
return null;
final GenomeLoc loc = ref.getLocus();
final List<VariantContext> vcsAtThisLocus = tracker.getPrioritizedValue(variants, loc);
final Byte refBase = INCLUDE_NON_VARIANTS ? ref.getBase() : null;
final boolean removeNonRefSymbolicAllele = !INCLUDE_NON_VARIANTS;
final VariantContext combinedVC = ReferenceConfidenceVariantContextMerger.merge(vcsAtThisLocus, loc,
refBase, removeNonRefSymbolicAllele, uniquifySamples, annotationEngine);
return combinedVC == null ? null : regenotypeVC(tracker, ref, combinedVC);
}
/**
* Re-genotype (and re-annotate) a combined genomic VC
*
* @param tracker the ref tracker
* @param ref the ref context
* @param originalVC the combined genomic VC
* @return a new VariantContext or null if the site turned monomorphic and we don't want such sites
*/
protected VariantContext regenotypeVC(final RefMetaDataTracker tracker, final ReferenceContext ref, final VariantContext originalVC) {
if ( originalVC == null ) {
throw new IllegalArgumentException("originalVC cannot be null");
} else if (!isProperlyPolymorphic(originalVC) && !INCLUDE_NON_VARIANTS) {
return null;
}
VariantContext result = originalVC;
//don't need to calculate quals for sites with no data whatsoever
if (result.getAttributeAsInt(VCFConstants.DEPTH_KEY,0) > 0 ) {
result = genotypingEngine.calculateGenotypes(originalVC);
}
if (result == null || (!isProperlyPolymorphic(result) && !INCLUDE_NON_VARIANTS)) {
return null;
}
result = addGenotypingAnnotations(originalVC.getAttributes(), result);
//At this point we should already have DP and AD annotated
result = annotationEngine.finalizeAnnotations(result, originalVC);
//do trimming after allele-specific annotation reduction or the mapping is difficult
result = GATKVariantContextUtils.reverseTrimAlleles(result);
// Re-annotate and fix/remove some of the original annotations.
// Note that the order of these actions matters and is different for polymorphic and monomorphic sites.
// For polymorphic sites we need to make sure e.g. the SB tag is sent to the annotation engine and then removed later.
// For monomorphic sites we need to make sure e.g. the hom ref genotypes are created and only then are passed to the annotation engine.
// We could theoretically make 2 passes to re-create the genotypes, but that gets extremely expensive with large sample sizes.
if (result.isPolymorphicInSamples()) {
result = annotationEngine.annotateContext(tracker, ref, null, result);
result = new VariantContextBuilder(result).genotypes(cleanupGenotypeAnnotations(result, false)).make();
} else if (INCLUDE_NON_VARIANTS) {
result = new VariantContextBuilder(result).genotypes(cleanupGenotypeAnnotations(result, true)).make();
result = annotationEngine.annotateContext(tracker, ref, null, result);
result = removeNonRefAlleles(result);
} else {
return null;
}
result = removeInfoAnnotationsIfNoAltAllele(result);
return result;
}
/**
* Remove INFO field annotations if no alternate alleles
*
* @param vc the variant context
* @return variant context with the INFO field annotations removed if no alternate alleles
*/
private VariantContext removeInfoAnnotationsIfNoAltAllele(final VariantContext vc) {
// If no alt alleles, remove any RankSumTest or RMSAnnotation attribute
if ( vc.getAlternateAlleles().isEmpty() ) {
final VariantContextBuilder builder = new VariantContextBuilder(vc);
for ( final String annotation : infoFieldAnnotationKeyNamesToRemove ) {
builder.rmAttribute(annotation);
}
return builder.make();
} else {
return vc;
}
}
/**
* Remove NON-REF alleles from the variant context
*
* @param vc the variant context
* @return variant context with the NON-REF alleles removed if multiallelic or replaced with NO-CALL alleles if biallelic
*/
private VariantContext removeNonRefAlleles(final VariantContext vc) {
// If NON_REF is the only alt allele, ignore this site
final List<Allele> newAlleles = new ArrayList<>();
// Only keep alleles that are not NON-REF
for ( final Allele allele : vc.getAlleles() ) {
if ( !allele.equals(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE) ) {
newAlleles.add(allele);
}
}
// If no alt allele, then remove INFO fields that require alt alleles
if ( newAlleles.size() == 1 ) {
final VariantContextBuilder builder = new VariantContextBuilder(vc).alleles(newAlleles);
for ( final String name : infoHeaderAltAllelesLineNames ) {
builder.rmAttributes(Arrays.asList(name));
}
return builder.make();
} else {
return vc;
}
}
/**
* Determines whether the provided VariantContext has real alternate alleles.
*
* There is a bit of a hack to handle the <NON-REF> case because it is not defined in htsjdk.Allele
* We check for this as a biallelic symbolic allele.
*
* @param vc the VariantContext to evaluate
* @return true if it has proper alternate alleles, false otherwise
*/
private boolean isProperlyPolymorphic(final VariantContext vc) {
//obvious cases
if (vc == null || vc.getAlternateAlleles().isEmpty()) {
return false;
} else if (vc.isBiallelic()) {
return !(vc.getAlternateAllele(0).equals(Allele.SPAN_DEL) ||
vc.getAlternateAllele(0).equals(GATKVCFConstants.SPANNING_DELETION_SYMBOLIC_ALLELE_DEPRECATED) ||
vc.isSymbolic());
} else {
return true;
}
}
/**
* Add genotyping-based annotations to the new VC
*
* @param originalAttributes the non-null annotations from the original VC
* @param newVC the new non-null VC
* @return a non-null VC
*/
private VariantContext addGenotypingAnnotations(final Map<String, Object> originalAttributes, final VariantContext newVC) {
// we want to carry forward the attributes from the original VC but make sure to add the MLE-based annotations
final Map<String, Object> attrs = new HashMap<>(originalAttributes);
attrs.put(GATKVCFConstants.MLE_ALLELE_COUNT_KEY, newVC.getAttribute(GATKVCFConstants.MLE_ALLELE_COUNT_KEY));
attrs.put(GATKVCFConstants.MLE_ALLELE_FREQUENCY_KEY, newVC.getAttribute(GATKVCFConstants.MLE_ALLELE_FREQUENCY_KEY));
if (newVC.hasAttribute(GATKVCFConstants.NUMBER_OF_DISCOVERED_ALLELES_KEY))
attrs.put(GATKVCFConstants.NUMBER_OF_DISCOVERED_ALLELES_KEY, newVC.getAttribute(GATKVCFConstants.NUMBER_OF_DISCOVERED_ALLELES_KEY));
if (newVC.hasAttribute(GATKVCFConstants.AS_QUAL_KEY))
attrs.put(GATKVCFConstants.AS_QUAL_KEY, newVC.getAttribute(GATKVCFConstants.AS_QUAL_KEY));
return new VariantContextBuilder(newVC).attributes(attrs).make();
}
/**
* Cleans up genotype-level annotations that need to be updated.
* 1. move MIN_DP to DP if present
* 2. propagate DP to AD if not present
* 3. remove SB if present
* 4. change the PGT value from "0|1" to "1|1" for homozygous variant genotypes
* 5. move GQ to RGQ if the site is monomorphic
*
* @param vc the VariantContext with the Genotypes to fix
* @param createRefGTs if true we will also create proper hom ref genotypes since we assume the site is monomorphic
* @return a new set of Genotypes
*/
private List<Genotype> cleanupGenotypeAnnotations(final VariantContext vc, final boolean createRefGTs) {
final GenotypesContext oldGTs = vc.getGenotypes();
final List<Genotype> recoveredGs = new ArrayList<>(oldGTs.size());
for ( final Genotype oldGT : oldGTs ) {
final Map<String, Object> attrs = new HashMap<>(oldGT.getExtendedAttributes());
final GenotypeBuilder builder = new GenotypeBuilder(oldGT);
int depth = oldGT.hasDP() ? oldGT.getDP() : 0;
// move the MIN_DP to DP
if ( oldGT.hasExtendedAttribute(GATKVCFConstants.MIN_DP_FORMAT_KEY) ) {
depth = Integer.parseInt((String)oldGT.getAnyAttribute(GATKVCFConstants.MIN_DP_FORMAT_KEY));
builder.DP(depth);
attrs.remove(GATKVCFConstants.MIN_DP_FORMAT_KEY);
}
// move the GQ to RGQ
if ( createRefGTs && oldGT.hasGQ() ) {
builder.noGQ();
attrs.put(GATKVCFConstants.REFERENCE_GENOTYPE_QUALITY, oldGT.getGQ());
}
// remove SB
attrs.remove(GATKVCFConstants.STRAND_BIAS_BY_SAMPLE_KEY);
// update PGT for hom vars
if ( oldGT.isHomVar() && oldGT.hasExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY) ) {
attrs.put(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY, "1|1");
}
// create AD if it's not there
if ( !oldGT.hasAD() && vc.isVariant() ) {
final int[] AD = new int[vc.getNAlleles()];
AD[0] = depth;
builder.AD(AD);
}
if ( createRefGTs ) {
final int ploidy = oldGT.getPloidy();
final List<Allele> refAlleles = Collections.nCopies(ploidy,vc.getReference());
//keep 0 depth samples and 0 GQ samples as no-call
if (depth > 0 && oldGT.hasGQ() && oldGT.getGQ() > 0) {
builder.alleles(refAlleles);
}
// also, the PLs are technically no longer usable
builder.noPL();
}
recoveredGs.add(builder.noAttributes().attributes(attrs).make());
}
return recoveredGs;
}
private void checkRODtags() {
}
/**
* Creates a UnifiedArgumentCollection with appropriate values filled in from the arguments in this walker
* @return a complete UnifiedArgumentCollection
*/
private UnifiedArgumentCollection createUAC() {
UnifiedArgumentCollection uac = new UnifiedArgumentCollection();
uac.genotypeArgs = genotypeArgs.clone();
//whether to emit non-variant sites is not contained in genotypeArgs and must be passed to uac separately
uac.outputMode = INCLUDE_NON_VARIANTS ? OutputMode.EMIT_ALL_CONFIDENT_SITES : OutputMode.EMIT_VARIANTS_ONLY;
return uac;
}
public VariantContextWriter reduceInit() {
return vcfWriter;
}
public VariantContextWriter reduce(final VariantContext vc, final VariantContextWriter writer) {
if ( vc != null )
writer.add(vc);
return writer;
}
@Override
public VariantContextWriter treeReduce(final VariantContextWriter lhs, final VariantContextWriter rhs) {
return lhs;
}
@Override
public void onTraversalDone(final VariantContextWriter writer) {}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AS_QualByDepth.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.annotator;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.GenotypesContext;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.vcf.VCFInfoHeaderLine;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AS_StandardAnnotation;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.ReducibleAnnotation;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
import java.util.*;
/**
* Allele-specific call confidence normalized by depth of sample reads supporting the allele
*
* <p>This annotation puts the variant confidence QUAL score into perspective by normalizing for the amount of coverage available. Because each read contributes a little to the QUAL score, variants in regions with deep coverage can have artificially inflated QUAL scores, giving the impression that the call is supported by more evidence than it really is. To compensate for this, we normalize the variant confidence by depth, which gives us a more objective picture of how well supported the call is.</p>
*
* <h3>Statistical notes</h3>
* <p>The QD is the QUAL score normalized by allele depth (AD) for a variant. For a single sample, the HaplotypeCaller calculates the QD by taking QUAL/AD. For multiple samples, HaplotypeCaller and GenotypeGVCFs calculate the QD by taking QUAL/AD of samples with a non hom-ref genotype call. The reason we leave out the samples with a hom-ref call is to not penalize the QUAL for the other samples with the variant call.</p>
* <h4>Here is a single-sample example:</h4>
* <pre>2 37629 . C G 1063.77 . AC=2;AF=1.00;AN=2;DP=31;FS=0.000;MLEAC=2;MLEAF=1.00;MQ=58.50;QD=34.32;SOR=2.376 GT:AD:DP:GQ:PL:QSS 1/1:0,31:31:93:1092,93,0:0,960</pre>
<p>QUAL/AD = 1063.77/31 = 34.32 = QD</p>
* <h4>Here is a multi-sample example:</h4>
* <pre>10 8046 . C T 4107.13 . AC=1;AF=0.167;AN=6;BaseQRankSum=-3.717;DP=1063;FS=1.616;MLEAC=1;MLEAF=0.167;QD=11.54
GT:AD:DP:GQ:PL:QSS 0/0:369,4:373:99:0,1007,12207:10548,98 0/0:331,1:332:99:0,967,11125:9576,27 0/1:192,164:356:99:4138,0,5291:5501,4505</pre>
* <p>QUAL/AD = 4107.13/356 = 11.54 = QD</p>
* <p>Note that currently, when HaplotypeCaller is run with `-ERC GVCF`, the QD calculation is invoked before AD itself has been calculated, due to a technical constraint. In that case, HaplotypeCaller uses the number of overlapping reads from the haplotype likelihood calculation in place of AD to calculate QD, which generally yields a very similar number. This does not cause any measurable problems, but can cause some confusion since the number may be slightly different than what you would expect to get if you did the calculation manually. For that reason, this behavior will be modified in an upcoming version.</p>
*
* <h3>Caveat</h3>
* <p>This annotation can only be calculated for sites for which at least one sample was genotyped as carrying a variant allele.</p>
*
* <h3>Related annotations</h3>
* <ul>
* <li><b><a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_AS_QualByDepth.php">AS_QualByDepth</a></b> outputs a version of this annotation that includes all alternate alleles in a single calculation.</li>
* <li><b><a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_Coverage.php">Coverage</a></b> gives the filtered depth of coverage for each sample and the unfiltered depth across all samples.</li>
* <li><b><a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_DepthPerAlleleBySample.php">DepthPerAlleleBySample</a></b> calculates depth of coverage for each allele per sample (AD).</li>
* </ul>
*/
public class AS_QualByDepth extends InfoFieldAnnotation implements ReducibleAnnotation, AS_StandardAnnotation {
@Override
public List<String> getKeyNames() { return Arrays.asList(GATKVCFConstants.AS_QUAL_BY_DEPTH_KEY); }
@Override
public String getRawKeyName() { return GATKVCFConstants.AS_QUAL_KEY; }
public List<VCFInfoHeaderLine> getDescriptions() {
//We only have the finalized key name here because the raw key is internal to GenotypeGVCFs and won't get output in any VCF
return Arrays.asList(GATKVCFHeaderLines.getInfoLine(getKeyNames().get(0)));
}
public Map<String, Object> annotate(final RefMetaDataTracker tracker,
final AnnotatorCompatible walker,
final ReferenceContext ref,
final Map<String, AlignmentContext> stratifiedContexts,
final VariantContext vc,
final Map<String, PerReadAlleleLikelihoodMap> perReadAlleleLikelihoodMap ) {
return null;
}
private List<Integer> getAlleleDepths(final GenotypesContext genotypes) {
int numAlleles = -1;
for (final Genotype genotype : genotypes) {
if (genotype.hasAD()) {
numAlleles = genotype.getAD().length;
break;
}
}
if (numAlleles == -1) //no genotypes have AD
return null;
Integer[] alleleDepths = new Integer[numAlleles];
for (int i = 0; i < alleleDepths.length; i++) {
alleleDepths[i] = 0;
}
for (final Genotype genotype : genotypes) {
// we care only about genotypes with variant alleles
if ( !genotype.isHet() && !genotype.isHomVar() )
continue;
// if we have the AD values for this sample, let's make sure that the variant depth is greater than 1!
if ( genotype.hasAD() ) {
final int[] AD = genotype.getAD();
final int totalADdepth = (int) MathUtils.sum(AD);
if ( totalADdepth - AD[0] > 1 ) {
for (int i = 0; i < AD.length; i++) {
alleleDepths[i] += AD[i];
}
}
}
}
return Arrays.asList(alleleDepths);
}
@Override
public Map<String, Object> annotateRawData(RefMetaDataTracker tracker, AnnotatorCompatible walker, ReferenceContext ref, Map<String, AlignmentContext> stratifiedContexts, VariantContext vc, Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap) {
return null;
}
@Override
public Map<String, Object> combineRawData(List<Allele> allelesList, List<? extends ReducibleAnnotationData> listOfRawData) {
return null;
}
@Override
public Map<String, Object> finalizeRawData(VariantContext vc, VariantContext originalVC) {
//we need to use the AS_QUAL value that was added to the VC by the GenotypingEngine
if ( !vc.hasAttribute(GATKVCFConstants.AS_QUAL_KEY) )
return null;
final GenotypesContext genotypes = vc.getGenotypes();
if ( genotypes == null || genotypes.isEmpty() )
return null;
final List<Integer> standardDepth = getAlleleDepths(genotypes);
if (standardDepth == null) //all no-calls and homRefs
return null;
//Parse the VC's allele-specific qual values
List<Object> alleleQualObjList = vc.getAttributeAsList(GATKVCFConstants.AS_QUAL_KEY);
if (alleleQualObjList.size() != vc.getNAlleles() -1)
throw new IllegalStateException("Number of AS_QUAL values doesn't match the number of alternate alleles.");
List<Double> alleleQualList = new ArrayList<>();
for (final Object obj : alleleQualObjList) {
alleleQualList.add(Double.parseDouble(obj.toString()));
}
// Don't normalize indel length for AS_QD because it will only be called from GenotypeGVCFs, never UG
List<Double> QDlist = new ArrayList<>();
double refDepth = (double)standardDepth.get(0);
for (int i = 0; i < alleleQualList.size(); i++) {
double AS_QD = -10.0 * alleleQualList.get(i) / ((double)standardDepth.get(i+1) + refDepth); //+1 to skip the reference field of the AD, add ref counts to each to match biallelic case
// Hack: see note in the fixTooHighQD method below
AS_QD = QualByDepth.fixTooHighQD(AS_QD);
QDlist.add(AS_QD);
}
final Map<String, Object> map = new HashMap<>();
map.put(getKeyNames().get(0), AnnotationUtils.encodeValueList(QDlist, "%.2f"));
return map;
}
@Override
public void calculateRawData(VariantContext vc, Map<String, PerReadAlleleLikelihoodMap> pralm, ReducibleAnnotationData rawAnnotations) {
//note that the "raw data" used here is calculated by the GenotypingEngine in GenotypeGVCFs and stored in the AS_QUAL info field
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/simulatereads/SimulateReadsForVariants.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.simulatereads;
import org.apache.log4j.Logger;
import cern.jet.random.Poisson;
import cern.jet.random.engine.MersenneTwister;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMProgramRecord;
import htsjdk.samtools.SAMReadGroupRecord;
import org.broadinstitute.gatk.engine.SampleUtils;
import org.broadinstitute.gatk.engine.walkers.Reference;
import org.broadinstitute.gatk.engine.walkers.RodWalker;
import org.broadinstitute.gatk.engine.walkers.Window;
import org.broadinstitute.gatk.utils.commandline.*;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.sam.GATKSAMFileWriter;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.utils.*;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import htsjdk.variant.vcf.VCFConstants;
import java.util.*;
/**
* Generate simulated reads for variants
*
* <p>Given a set of variants, this tool will generate simulated reads that support the input variants.</p>
*
* <h3>Caveat</h3>
* <p>For practical reasons, only bi-allelic variants that are not too close to the ends of contigs
* (< 1/2 read length) are supported; all others will simply be ignored.</p>
*
* <h3>Input</h3>
* <p>A VCF file containing variants.</p>
*
* <h3>Output</h3>
* <p>A BAM file containing simulated sequence reads that support the input variants, with the requested error rate
* and coverage depth.</p>
*
* <h3>Usage example</h3>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T SimulateReadsForVariants \
* -R reference.fasta \
* -V input_variants.vcf \
* -o simulated_reads.bam \
* --readDepth 50 \
* --errorRate 25
* </pre>
*
*/
@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class})
@Reference(window=@Window(start=-200,stop=200))
public class SimulateReadsForVariants extends RodWalker<Integer, Integer> {
private static Logger logger = Logger.getLogger(SimulateReadsForVariants.class);
@ArgumentCollection protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
/**
* The simulated reads will be written to a BAM file.
*/
@Output(doc="Reads corresponding to variants", required=true)
protected GATKSAMFileWriter readWriter;
/**
* Use this argument to set the desired target read depth. See the readSamplingMode argument for options that
* determine whether coverage distribution will be exactly this value or an approximation.
*/
@Argument(fullName="readDepth", shortName="DP", doc="Read depth to generate", required=false, minValue = 0, minRecommendedValue = 1, maxRecommendedValue = 1000, maxValue = Integer.MAX_VALUE)
public int readDepth = 20;
/**
* Errors will be generated at this rate in the simulated reads. Base qualities are therefore also assigned this value.
*/
@Argument(fullName="errorRate", shortName="ER", doc="Base error rate (Phred-scaled)", required=false, minValue = 0, maxValue = Integer.MAX_VALUE)
public int phredErrorRate = 20;
/**
* All simulated reads will be exactly this length.
*/
@Argument(fullName="readLength", shortName="RL", doc="Read lengths (bp)", required=false, minValue = 1, maxValue = Integer.MAX_VALUE)
public int readLength = 101;
/**
* Use this argument to simulate events at a non-50/50 allele fraction represented in the VCF by AF (used for somatic event simulation)
*/
@Hidden
@Argument(fullName="useAFAsAlleleFraction", shortName="AF", doc="Use AF in VCF as event allele fraction ", required=false)
public boolean useAFAsAlleleFraction = false;
/**
* The corresponding platform identifier will be specified in the simulated read group PL tag. This setting does not
* affect the properties of the simulated reads.
*/
@Advanced
@Argument(fullName="rgPlatform", shortName="RGPL", doc="Sequencing platform", required=false)
public NGSPlatform rgPlatform = NGSPlatform.ILLUMINA;
/**
* This determines how read sampling is achieved, and affects the coverage distribution of simulated reads.
* CONSTANT sampling will produce uniform depth at all positions, while POISSON sampling will produce a
* distribution of coverages around the requested value.
*/
@Advanced
@Argument(fullName="readSamplingMode", shortName="RSM", doc="Sampling mode", required=false)
public ReadSamplingMode samplingMode = ReadSamplingMode.CONSTANT;
public enum ReadSamplingMode { CONSTANT, POISSON };
@Hidden
@Argument(fullName = "no_pg_tag", shortName = "npt", doc ="Discard program tags, for integration tests", required=false)
public boolean NO_PG_TAG = false;
@Hidden
@Argument(fullName="verbose", shortName="verbose", doc="Verbose", required=false)
public boolean verbose = false;
public static final String PROGRAM_RECORD_NAME = "GATK SimulateReadsForVariants";
// variables used to store state
private long readNameCounter = 1;
private int halfReadLength;
private double errorRate;
private byte[] readQuals;
private SAMFileHeader header = null;
// randomness related variables
private static final long RANDOM_SEED = 1252863495;
private static final Random ran = Utils.getRandomGenerator();
private Poisson poissonRandom = null;
// samples and read groups
private final Map<String, SAMReadGroupRecord> sample2RG = new HashMap<String, SAMReadGroupRecord>();
private SAMReadGroupRecord sampleRG(String name) { return sample2RG.get(name); }
private SAMReadGroupRecord createRG(String name) {
SAMReadGroupRecord rg = new SAMReadGroupRecord(name);
rg.setPlatform(rgPlatform.getDefaultPlatform());
rg.setSample(name);
return rg;
}
// class to store the bases, offset, and representative CIGAR of a haplotype
private static class ArtificialHaplotype {
public final byte[] bases;
public final int offset;
public final String cigar;
public ArtificialHaplotype(final byte[] bases, final int offset, final String cigar) {
this.bases = bases;
this.offset = offset;
this.cigar = cigar;
}
}
@Override
public void initialize() {
// initialize sample -> read group map
final List<SAMReadGroupRecord> sampleRGs = new ArrayList<SAMReadGroupRecord>();
for ( final String sample : SampleUtils.getUniqueSamplesFromRods(getToolkit(), Arrays.asList(variantCollection.variants.getName())) ) {
final SAMReadGroupRecord rg = createRG(sample);
sampleRGs.add(rg);
sample2RG.put(sample, rg);
}
// initialize BAM headers
header = new SAMFileHeader();
header.setSequenceDictionary(getToolkit().getReferenceDataSource().getReference().getSequenceDictionary());
header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
header.setReadGroups(sampleRGs);
final SAMProgramRecord programRecord = new SAMProgramRecord(PROGRAM_RECORD_NAME);
if ( !NO_PG_TAG ) {
programRecord.setProgramVersion(CommandLineProgram.getVersionNumber());
programRecord.setCommandLine(getToolkit().createApproximateCommandLineArgumentString(getToolkit(), this));
}
header.setProgramRecords(Arrays.asList(programRecord));
readWriter.setPresorted(false);
readWriter.writeHeader(header);
halfReadLength = readLength / 2;
errorRate = QualityUtils.qualToErrorProb((byte)phredErrorRate);
readQuals = new byte[readLength];
Arrays.fill(readQuals, (byte)phredErrorRate);
if ( samplingMode == ReadSamplingMode.POISSON )
poissonRandom = new Poisson(readDepth, new MersenneTwister((int)RANDOM_SEED));
}
public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if ( tracker == null ) // RodWalkers can make funky map calls
return 0;
if ( ref.getLocus().getStart() < readLength || ! BaseUtils.isRegularBase(ref.getBase()) )
return 0;
final VariantContext vc = tracker.getFirstValue(variantCollection.variants, context.getLocation());
if ( vc == null || !vc.isBiallelic() )
return 0;
if ( !generateReadsForVariant(vc, ref, useAFAsAlleleFraction) )
return 0;
if ( verbose ) logger.info(String.format("Generating reads for %s", vc));
return 1;
}
/**
* Constructs an artificial haplotype given an allele and original reference context
*
* @param allele the allele to model (can be reference)
* @param refLength the length of the reference allele
* @param ref the original reference context
* @return the artificial haplotype or null if the readLength parameter is too small to hold the allele and reference
*/
private ArtificialHaplotype constructHaplotype(final Allele allele, final int refLength, final ReferenceContext ref) {
final byte[] haplotype = new byte[readLength];
final int alleleLength = allele.getBases().length;
final int halfAlleleLength = (alleleLength + 1) / 2;
final int refContextLength = ref.getBases().length;
// this is how far back to move from the event to start copying bases
final int offset = halfReadLength - halfAlleleLength;
// number of bases copied to the haplotype
int copiedCount = 0;
// copy bases before the event
final int locusPosOnRefContext = (int)(ref.getLocus().getStart() - ref.getWindow().getStart());
int posOnRefContext = locusPosOnRefContext - offset;
if ( offset >= 0 && posOnRefContext >= 0 && posOnRefContext + offset <= refContextLength )
{
System.arraycopy(ref.getBases(), posOnRefContext, haplotype, 0, offset);
copiedCount = offset;
}
else
{
String msg = new String("Can not copy reference bases to haplotype: ");
if ( offset < 0 )
msg += "Read length(" + readLength + ") < Allele length(" + alleleLength + ")";
else
msg += "Reference position(" + posOnRefContext + ") < 0";
logger.info(msg);
return null;
}
// copy the event bases
if ( copiedCount + alleleLength <= readLength )
{
System.arraycopy(allele.getBases(), 0, haplotype, copiedCount, alleleLength);
copiedCount += alleleLength;
}
else
{
String msg = new String("Can not copy allele bases to haplotype: ");
msg += "Read length(" + readLength + ") < Allele length(" + alleleLength + ") + copied count(" + copiedCount + ")";
logger.info(msg);
return null;
}
// copy bases after the event
posOnRefContext = locusPosOnRefContext + refLength;
final int remainder = readLength - copiedCount;
if ( remainder > 0 && posOnRefContext + remainder <= refContextLength )
{
System.arraycopy(ref.getBases(), posOnRefContext, haplotype, copiedCount, remainder);
copiedCount += remainder;
}
else
{
String msg = new String("Can not copy remaining reference bases to haplotype: ");
msg += "Read length(" + readLength + ") <= Copied count(" + copiedCount + ")";
logger.info(msg);
return null;
}
final String cigar;
if ( refLength == alleleLength )
cigar = readLength + "M";
else
cigar = (offset + 1) + "M" + Math.abs(refLength - alleleLength) + (refLength > alleleLength ? "D" : "I") + remainder + "M";
return new ArtificialHaplotype(haplotype, offset, cigar);
}
/**
* Generates the artificial reads for a given variant
*
* @param vc the (bi-allelic) variant context for which to generate artificial reads
* @param ref the original reference context
* @param useAFAsAlleleFraction use AF tag to indicate allele fraction
* @return true if successful generation of artificial reads for the variant, false otherwise
*/
private boolean generateReadsForVariant(final VariantContext vc, final ReferenceContext ref, final boolean useAFAsAlleleFraction) {
final int refLength = vc.getReference().getBases().length;
final ArtificialHaplotype refHap = constructHaplotype(vc.getReference(), refLength, ref);
if ( refHap == null )
return false;
final ArtificialHaplotype altHap = constructHaplotype(vc.getAlternateAllele(0), refLength, ref);
if ( altHap == null )
return false;
final double refAlleleFraction = (useAFAsAlleleFraction)?1-vc.getAttributeAsDouble(VCFConstants.ALLELE_FREQUENCY_KEY, 0.5):0.5;
if (refAlleleFraction < 0.0 || refAlleleFraction > 1.0 || Double.isNaN(refAlleleFraction) || Double.isInfinite(refAlleleFraction) ) {
throw new UserException.MalformedVCF("Error in AF, must be between 0 and 1 but was " + refAlleleFraction);
}
int gi = 0;
for ( final Genotype g : vc.getGenotypes() ) {
final int myDepth = sampleDepth();
for ( int d = 0; d < myDepth; d++ ) {
final ArtificialHaplotype haplotype = chooseRefHaplotype(g, refAlleleFraction) ? refHap : altHap;
final byte[] readBases = Arrays.copyOf(haplotype.bases, readLength);
addMachineErrors(readBases, errorRate);
writeRead(readBases, vc.getChr(), vc.getStart() - haplotype.offset, haplotype.cigar, g.getSampleName(), gi++ % 2 == 0);
}
}
return true;
}
/**
* Decides whether or not to choose the reference haplotype, depending on the given genotype
*
* @param g the genotype of the given sample
* @param pReferenceGivenHet probability of choosing reference for hets
*
* @return true if one should use the reference haplotype, false otherwise
*/
private boolean chooseRefHaplotype(final Genotype g, final double pReferenceGivenHet) {
final double refP;
if ( g.isHomRef() ) refP = 1;
else if ( g.isHet() ) refP = pReferenceGivenHet;
else refP = 0.0;
return ran.nextDouble() < refP;
}
/**
* Generates the artificial read depth
*
* @return a non-negative int
*/
private int sampleDepth() {
switch ( samplingMode ) {
case CONSTANT: return readDepth;
case POISSON: return poissonRandom.nextInt();
default:
throw new IllegalStateException("Unexpected DepthSamplingType " + samplingMode);
}
}
/**
* Creates and writes an artificial read given the appropriate data
*
* @param readBases the bases
* @param contig the contig
* @param start the read start
* @param cigar the cigar string
* @param sample the sample name (used to get the right read group)
* @param isNegStrand should this read be on the negative strand?
*/
private void writeRead(final byte[] readBases, final String contig, final int start,
final String cigar, final String sample, final boolean isNegStrand) {
final GATKSAMRecord read = new GATKSAMRecord(header);
read.setBaseQualities(readQuals);
read.setReadBases(readBases);
read.setReadName("" + readNameCounter++);
read.setCigarString(cigar);
read.setReadPairedFlag(false);
read.setAlignmentStart(start);
read.setMappingQuality(60);
read.setReferenceName(contig);
read.setReadNegativeStrandFlag(isNegStrand);
read.setAttribute("RG", sampleRG(sample).getReadGroupId());
readWriter.addAlignment(read);
}
/**
* Adds machine errors at the appropriate rate to the provided read bases
*
* @param readBases the read bases
* @param errorRate the rate at which to produce errors
*/
private void addMachineErrors(final byte[] readBases, final double errorRate) {
for ( int i = 0; i < readBases.length; i++ ) {
final double r = ran.nextDouble();
if ( r < errorRate ) {
byte errorBase = BaseUtils.baseIndexToSimpleBase(BaseUtils.getRandomBaseIndex(BaseUtils.simpleBaseToBaseIndex(readBases[i])));
if ( errorBase == readBases[i] ) throw new IllegalStateException("Read and error bases are the same");
readBases[i] = errorBase;
}
}
}
@Override
public Integer reduceInit() {
return 0;
}
@Override
public Integer reduce(Integer counter, Integer sum) {
return counter + sum;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ActiveRegionTrimmer.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.utils.commandline.Advanced;
import org.broadinstitute.gatk.utils.commandline.Argument;
import org.broadinstitute.gatk.utils.commandline.Hidden;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import htsjdk.variant.variantcontext.VariantContext;
import java.util.*;
/**
* Helper component to manage active region trimming
*
* <p/>
* It receives the user arguments that controls trimming and also performs the trimming region calculation.
*
* @author <NAME> <<EMAIL>>
*/
public class ActiveRegionTrimmer {
/**
* Genome location parser use in order to create and manipulate genomic intervals.
*/
private GenomeLocParser locParser;
/**
* Holds the debug flag. If {@code true} the trimmer will output debugging messages into the log.
*/
private boolean debug;
/**
* Holds the extension to be used based on whether GGA mode is on or off.
*/
private int usableExtension;
/**
* Records whether the trimming intervals are going to be used to emit reference confidence, {@code true},
* or regular HC output {@code false}.
*/
private boolean emitReferenceConfidence;
@Advanced
@Argument(fullName="dontTrimActiveRegions", shortName="dontTrimActiveRegions", doc="If specified, we will not trim down the active region from the full region (active + extension) to just the active interval for genotyping", required = false)
protected boolean dontTrimActiveRegions = false;
/**
* the maximum extent into the full active region extension that we're willing to go in genotyping our events
*/
@Hidden
@Argument(fullName="maxDiscARExtension", shortName="maxDiscARExtension", doc = "the maximum extent into the full active region extension that we're willing to go in genotyping our events for discovery", required=false)
protected int discoverExtension = 25;
@Hidden
@Argument(fullName="maxGGAARExtension", shortName="maxGGAARExtension", doc = "the maximum extent into the full active region extension that we're willing to go in genotyping our events for GGA mode", required=false)
protected int ggaExtension = 300;
/**
* Include at least this many bases around an event for calling it
*/
@Hidden
@Argument(fullName="paddingAroundIndels", shortName="paddingAroundIndels", doc = "Include at least this many bases around an event for calling indels", required=false)
public int indelPadding = 150;
@Hidden
@Argument(fullName="paddingAroundSNPs", shortName="paddingAroundSNPs", doc = "Include at least this many bases around an event for calling snps", required=false)
public int snpPadding = 20;
/**
* Holds a reference the trimmer logger.
*/
private final static Logger logger = Logger.getLogger(ActiveRegionTrimmer.class);
/**
* Initializes the trimmer.
*
* <p/>
* This method should be called once and only once before any trimming is performed.
*
*
* @param glp the genome-location-parser to be used when operating with genomic locations.
* @param debug whether to show extra debug log messages.
* @param isGGA whether the trimming region calculator should act as if we are in GGA mode or not.
* @param emitReferenceConfidence indicates whether we plan to use this trimmer to generate trimmed regions
* to be used for emitting reference confidence.
*
* @throws IllegalStateException if this trim calculator has already been initialized.
* @throws IllegalArgumentException if the input location parser is {@code null}.
* @throws UserException.BadArgumentValue if any of the user argument values is invalid.
*/
public void initialize(final GenomeLocParser glp, final boolean debug, final boolean isGGA, final boolean emitReferenceConfidence) {
if (locParser != null)
throw new IllegalStateException(getClass().getSimpleName() + " instance initialized twice");
if (glp == null)
throw new IllegalArgumentException("input genome-loc-parser cannot be null");
checkUserArguments();
locParser = glp;
this.debug = debug;
usableExtension = isGGA ? ggaExtension : discoverExtension;
this.emitReferenceConfidence = emitReferenceConfidence;
}
/**
* Checks user trimming argument values
*
* @throws UserException.BadArgumentValue if there is some problem with any of the arguments values.
*/
private void checkUserArguments() {
if ( snpPadding < 0 ) throw new UserException.BadArgumentValue("paddingAroundSNPs","" + snpPadding + "< 0");
if ( indelPadding < 0 ) throw new UserException.BadArgumentValue("paddingAroundIndels","" + indelPadding + "< 0");
if ( discoverExtension < 0) throw new UserException.BadArgumentValue("maxDiscARExtension","" + discoverExtension + "< 0");
if ( ggaExtension < 0) throw new UserException.BadArgumentValue("maxGGAAREExtension","" + ggaExtension + "< 0");
}
/**
* Holds the result of trimming.
*
*
*
*/
public static class Result {
/**
* Indicates whether trimming is required per data and user request.
*/
protected final boolean needsTrimming;
/**
* Holds the input active region.
*/
protected final ActiveRegion originalRegion;
/**
* Holds the smaller range that contain all relevant callable variants in the
* input active region (not considering the extension).
*
*/
protected final GenomeLoc callableSpan;
/**
* Maximum available range for the trimmed variant region.
*/
protected final GenomeLoc maximumSpan;
/**
* The trimmed variant region span including the extension.
*/
protected final GenomeLoc extendedSpan;
/**
* The ideal trimmer variant region span including the extension.
*/
protected final GenomeLoc idealSpan;
/**
* Returns the ideal trimming span.
*
* <p/>
* The ideal span is the one containing all callable variation overlapping the original active region span
* (without extension) and the applicable padding {@link #getPadding()} in both sides.
*
*
* @return never {@code null}.
*/
@SuppressWarnings("unused")
public GenomeLoc getIdealSpan() {
return idealSpan;
}
/**
* Holds the flanking spans that do not contain the callable variants.
* <p/>
* The first element of the pair is the left (up-stream) non-variant flank, whereas the second element is
* the right (down-stream) non-variant flank.
*/
protected final Pair<GenomeLoc,GenomeLoc> nonVariantFlanks;
/**
* Holds the collection of callable events within the variant trimming region.
*/
protected final List<VariantContext> callableEvents;
/**
* Required padding around the variant trimming region.
*/
protected final int padding;
/**
* Returns the required padding around callable variation.
*
* <p/>
* Notice that due to the limiting span of the original active region (including its extension) it
* is possible that the resulting final trimmed variant region span does not satisfies the padding. However
* that should be rare.
*
* @return 0 or greater.
*/
@SuppressWarnings("unused")
public int getPadding() {
return padding;
}
/**
* Holds the maximum extension around the original active region span considered for the trimmed
* variation region.
*/
protected final int usableExtension;
/**
* Returns the maximum extension around the original active region span considered for the trimmed
* variation region.
*
* <p/>
* From time to time, the trimmed region may require a span beyond the input original active region's.
* For example when there is a callable event close ot one of its ends and the required padding makes it
* round beyond that limit.
*
* <p/>
* Notice that due to the limiting span of the original active region (including its extended region) it
* is possible that the resulting final trimmed variant region span goes beyond this extension including more of
* the original active region own extension.
*
* @return 0 or greater.
*/
@SuppressWarnings("unused")
public int getUsableExtension() {
return usableExtension;
}
/**
* Holds variant-containing callable region.
* <p/>
* This is lazy-initialized using {@link #callableSpan}.
*/
protected ActiveRegion callableRegion;
/**
* Non-variant left flank region.
* <p/>
* This is lazy-initialized using
* {@link #nonVariantFlanks}.{@link Pair#getFirst() getFirst()}.
*/
private ActiveRegion leftFlankRegion;
/**
* Non-variant right flank region.
* <p/>
* This is lazy-initialized using
* {@link #nonVariantFlanks}.{@link Pair#getFirst() getSecond()}.
*/
private ActiveRegion rightFlankRegion;
/**
* Whether the variant trimmed region is going to be used for emitting reference confidence records.
*/
private final boolean emitReferenceConfidence;
/**
* Creates a trimming result given all its properties.
*
* @param emitReferenceConfidence whether reference confidence output modes are on.
* @param needsTrimming whether there is any trimming needed at all.
* @param originalRegion the original active region.
* @param padding padding around contained callable variation events.
* @param extension the extension applied to the trimmed variant span.
* @param overlappingEvents contained callable variation events.
* @param nonVariantFlanks pair of non-variant flank spans around the variant containing span.
* @param extendedSpan final trimmed variant span including the extension.
* @param idealSpan the ideal span, that contains.
* @param maximumSpan maximum possible trimmed span based on the input original active region extended span.
* @param callableSpan variant containing span without padding.
*/
protected Result(final boolean emitReferenceConfidence, final boolean needsTrimming, final ActiveRegion originalRegion,
final int padding, final int extension,
final List<VariantContext> overlappingEvents, final Pair<GenomeLoc,GenomeLoc> nonVariantFlanks,
final GenomeLoc extendedSpan,
final GenomeLoc idealSpan,
final GenomeLoc maximumSpan,
final GenomeLoc callableSpan) {
this.emitReferenceConfidence = emitReferenceConfidence;
this.needsTrimming = needsTrimming;
this.originalRegion = originalRegion;
this.nonVariantFlanks = nonVariantFlanks;
this.padding = padding;
this.usableExtension = extension;
this.callableEvents = overlappingEvents;
this.callableSpan = callableSpan;
this.idealSpan = idealSpan;
this.maximumSpan = maximumSpan;
this.extendedSpan = extendedSpan;
if (!extendedSpan.isUnmapped() && !callableSpan.isUnmapped() && !extendedSpan.containsP(callableSpan))
throw new IllegalArgumentException("the extended callable span must include the callable span");
}
/**
* Checks whether there is any variation present in the target region.
*
* @return {@code true} if there is any variant, {@code false} otherwise.
*/
public boolean isVariationPresent() {
return ! callableEvents.isEmpty();
}
/**
* Checks whether the active region needs trimming.
*/
public boolean needsTrimming() {
return needsTrimming;
}
/**
* Returns the trimmed variant containing region
*
* @throws IllegalStateException if there is no variation detected.
*
* @return never {@code null}.
*/
public ActiveRegion getCallableRegion() {
if (callableRegion == null && !extendedSpan.isUnmapped())
//TODO this conditional is a patch to retain the current standard HC run behaviour
//TODO we should simply remove this difference between trimming with or without GVCF
//TODO embracing slight changes in the standard HC output
callableRegion = emitReferenceConfidence ? originalRegion.trim(callableSpan, extendedSpan) : originalRegion.trim(extendedSpan);
else if (extendedSpan.isUnmapped())
throw new IllegalStateException("there is no variation thus no variant region");
return callableRegion;
}
/**
* Checks whether there is a non-empty left flanking non-variant trimmed out region.
* @return {@code true} if there is a non-trivial left flank region, {@code false} otherwise.
*/
public boolean hasLeftFlankingRegion() {
return ! nonVariantFlanks.getFirst().isUnmapped();
}
/**
* Checks whether there is a non-empty right flanking non-variant trimmed out region.
* @return {@code true} if there is a non-trivial right flank region, {@code false} otherwise.
*/
public boolean hasRightFlankingRegion() {
return ! nonVariantFlanks.getSecond().isUnmapped();
}
/**
* Returns the trimmed out left non-variant region.
* <p/>
* Notice that in case of no variation, the whole original region is considered the left flanking region.
*
* @throws IllegalStateException if there is not such as left flanking region.
*/
public ActiveRegion nonVariantLeftFlankRegion() {
if (leftFlankRegion == null && ! nonVariantFlanks.getFirst().isUnmapped())
leftFlankRegion = originalRegion.trim(nonVariantFlanks.getFirst(),originalRegion.getExtension());
else if (nonVariantFlanks.getFirst().isUnmapped())
throw new IllegalStateException("there is no left flank non-variant trimmed out region");
return leftFlankRegion;
}
/**
* Returns the trimmed out right non-variant region.
*/
public ActiveRegion nonVariantRightFlankRegion() {
if (rightFlankRegion == null && ! nonVariantFlanks.getSecond().isUnmapped())
rightFlankRegion = originalRegion.trim(nonVariantFlanks.getSecond(),originalRegion.getExtension());
else if (nonVariantFlanks.getSecond().isUnmapped())
throw new IllegalStateException("there is no right flank non-variant trimmed out region");
return rightFlankRegion;
}
/**
* Creates a result indicating that there was no trimming to be done.
*/
protected static Result noTrimming(final boolean emitReferenceConfidence,
final ActiveRegion targetRegion, final int padding,
final int usableExtension,final List<VariantContext> events) {
final GenomeLoc targetRegionLoc = targetRegion.getLocation();
final Result result = new Result(emitReferenceConfidence,false,targetRegion,padding,usableExtension,events,new Pair<>(GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED),
targetRegionLoc,targetRegionLoc,targetRegionLoc,targetRegionLoc);
result.callableRegion = targetRegion;
return result;
}
/**
* Creates a result indicating that no variation was found.
*/
protected static Result noVariation(final boolean emitReferenceConfidence, final ActiveRegion targetRegion,
final int padding, final int usableExtension) {
final Result result = new Result(emitReferenceConfidence,false,targetRegion,padding,usableExtension,
Collections.<VariantContext>emptyList(),new Pair<>(targetRegion.getLocation(),GenomeLoc.UNMAPPED),
GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED);
result.leftFlankRegion = targetRegion;
return result;
}
}
/**
* Returns a trimming result object from which the variant trimmed region and flanking non-variant sections
* can be recovered latter.
*
* @param originalRegion the genome location range to trim.
* @param allVariantsWithinExtendedRegion list of variants contained in the trimming location. Variants therein
* not overlapping with {@code originalRegion} are simply ignored.
* @return never {@code null}.
*/
public Result trim(final ActiveRegion originalRegion,
final TreeSet<VariantContext> allVariantsWithinExtendedRegion) {
if ( allVariantsWithinExtendedRegion.isEmpty() ) // no variants,
return Result.noVariation(emitReferenceConfidence,originalRegion,snpPadding, usableExtension);
final List<VariantContext> withinActiveRegion = new LinkedList<>();
final GenomeLoc originalRegionRange = originalRegion.getLocation();
boolean foundNonSnp = false;
GenomeLoc variantSpan = null;
for ( final VariantContext vc : allVariantsWithinExtendedRegion ) {
final GenomeLoc vcLoc = locParser.createGenomeLoc(vc);
if ( originalRegionRange.overlapsP(vcLoc) ) {
foundNonSnp = foundNonSnp || ! vc.isSNP();
variantSpan = variantSpan == null ? vcLoc : variantSpan.endpointSpan(vcLoc);
withinActiveRegion.add(vc);
}
}
final int padding = foundNonSnp ? indelPadding : snpPadding;
// we don't actually have anything in the region after skipping out variants that don't overlap
// the region's full location
if ( variantSpan == null )
return Result.noVariation(emitReferenceConfidence,originalRegion,padding, usableExtension);
if ( dontTrimActiveRegions)
return Result.noTrimming(emitReferenceConfidence,originalRegion, padding, usableExtension, withinActiveRegion);
final GenomeLoc maximumSpan = locParser.createPaddedGenomeLoc(originalRegionRange, usableExtension);
final GenomeLoc idealSpan = locParser.createPaddedGenomeLoc(variantSpan, padding);
final GenomeLoc finalSpan = maximumSpan.intersect(idealSpan).union(variantSpan);
// Make double sure that, if we are emitting GVCF we won't call non-variable positions beyond the target active region span.
// In regular call we don't do so so we don't care and we want to maintain behavior, so the conditional.
final GenomeLoc callableSpan = emitReferenceConfidence ? variantSpan.intersect(originalRegionRange) : variantSpan;
final Pair<GenomeLoc,GenomeLoc> nonVariantRegions = nonVariantTargetRegions(originalRegion, callableSpan);
if ( debug ) {
logger.info("events : " + withinActiveRegion);
logger.info("region : " + originalRegion);
logger.info("callableSpan : " + callableSpan);
logger.info("padding : " + padding);
logger.info("idealSpan : " + idealSpan);
logger.info("maximumSpan : " + maximumSpan);
logger.info("finalSpan : " + finalSpan);
}
return new Result(emitReferenceConfidence,true,originalRegion,padding, usableExtension,withinActiveRegion,nonVariantRegions,finalSpan,idealSpan,maximumSpan,variantSpan);
}
/**
* Calculates the list of region to trim away.
* @param targetRegion region for which to generate the flanking regions.
* @param variantSpan the span of the core region containing relevant variation and required padding.
* @return never {@code null}; 0, 1 or 2 element list.
*/
private Pair<GenomeLoc,GenomeLoc> nonVariantTargetRegions(final ActiveRegion targetRegion, final GenomeLoc variantSpan) {
final GenomeLoc targetRegionRange = targetRegion.getLocation();
final int finalStart = variantSpan.getStart();
final int finalStop = variantSpan.getStop();
final int targetStart = targetRegionRange.getStart();
final int targetStop = targetRegionRange.getStop();
final boolean preTrimmingRequired = targetStart < finalStart;
final boolean postTrimmingRequired = targetStop > finalStop;
if (preTrimmingRequired) {
final String contig = targetRegionRange.getContig();
return postTrimmingRequired ? new Pair<>(
locParser.createGenomeLoc(contig, targetStart, finalStart - 1),
locParser.createGenomeLoc(contig, finalStop + 1, targetStop)) :
new Pair<>(locParser.createGenomeLoc(contig, targetStart, finalStart - 1),GenomeLoc.UNMAPPED);
} else if (postTrimmingRequired)
return new Pair<>(GenomeLoc.UNMAPPED,locParser.createGenomeLoc(targetRegionRange.getContig(), finalStop + 1, targetStop));
else
return new Pair<>(GenomeLoc.UNMAPPED,GenomeLoc.UNMAPPED);
}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.AlignmentContextUtils;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.tools.walkers.indels.PairHMMIndelErrorModel;
import org.broadinstitute.gatk.utils.BaseUtils;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.gatk.utils.pileup.PileupElement;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import java.util.*;
public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsCalculationModel {
private static final int HAPLOTYPE_SIZE = 80;
private boolean DEBUG = false;
private boolean ignoreSNPAllelesWhenGenotypingIndels = false;
private PairHMMIndelErrorModel pairModel;
private LinkedHashMap<Allele, Haplotype> haplotypeMap;
private List<Allele> alleleList = new ArrayList<Allele>();
protected IndelGenotypeLikelihoodsCalculationModel(final UnifiedArgumentCollection UAC,
final Logger logger) {
super(UAC, logger);
pairModel = new PairHMMIndelErrorModel(UAC.INDEL_GAP_OPEN_PENALTY, UAC.INDEL_GAP_CONTINUATION_PENALTY,
UAC.OUTPUT_DEBUG_INDEL_INFO, UAC.pairHMM);
DEBUG = UAC.OUTPUT_DEBUG_INDEL_INFO;
haplotypeMap = new LinkedHashMap<Allele, Haplotype>();
ignoreSNPAllelesWhenGenotypingIndels = UAC.IGNORE_SNP_ALLELES;
}
protected static List<Allele> computeConsensusAlleles(final ReferenceContext ref,
final Map<String, AlignmentContext> contexts,
final AlignmentContextUtils.ReadOrientation contextType,
final UnifiedArgumentCollection UAC) {
ConsensusAlleleCounter counter = new ConsensusAlleleCounter(true, UAC.MIN_INDEL_COUNT_FOR_GENOTYPING, UAC.MIN_INDEL_FRACTION_PER_SAMPLE);
return counter.computeConsensusAlleles(ref, contexts, contextType);
}
private final static EnumSet<VariantContext.Type> allowableTypes = EnumSet.of(VariantContext.Type.INDEL, VariantContext.Type.MIXED);
public VariantContext getLikelihoods(final RefMetaDataTracker tracker,
final ReferenceContext ref,
final Map<String, AlignmentContext> contexts,
final AlignmentContextUtils.ReadOrientation contextType,
final List<Allele> allAllelesToUse,
final boolean useBAQedPileup,
final GenomeLocParser locParser,
final Map<String, PerReadAlleleLikelihoodMap> perReadAlleleLikelihoodMap) {
GenomeLoc loc = ref.getLocus();
// if (!ref.getLocus().equals(lastSiteVisited)) {
if (contextType == AlignmentContextUtils.ReadOrientation.COMPLETE) {
// starting a new site: clear allele list
haplotypeMap.clear();
perReadAlleleLikelihoodMap.clear(); // clean mapping sample-> per read, per allele likelihoods
alleleList = getInitialAlleleList(tracker, ref, contexts, contextType, UAC, ignoreSNPAllelesWhenGenotypingIndels);
if (alleleList.isEmpty())
return null;
}
getHaplotypeMapFromAlleles(alleleList, ref, loc, haplotypeMap); // will update haplotypeMap adding elements
if (haplotypeMap == null || haplotypeMap.isEmpty())
return null;
// start making the VariantContext
// For all non-snp VC types, VC end location is just startLocation + length of ref allele including padding base.
final int endLoc = loc.getStart() + alleleList.get(0).length() - 1;
final int eventLength = getEventLength(alleleList);
final VariantContextBuilder builder = new VariantContextBuilder("UG_call", loc.getContig(), loc.getStart(), endLoc, alleleList);
// create the genotypes; no-call everyone for now
GenotypesContext genotypes = GenotypesContext.create();
final int ploidy = UAC.genotypeArgs.samplePloidy;
final List<Allele> noCall = GATKVariantContextUtils.noCallAlleles(ploidy);
// For each sample, get genotype likelihoods based on pileup
// compute prior likelihoods on haplotypes, and initialize haplotype likelihood matrix with them.
for (Map.Entry<String, AlignmentContext> sample : contexts.entrySet()) {
AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);
if (!perReadAlleleLikelihoodMap.containsKey(sample.getKey())){
// no likelihoods have been computed for this sample at this site
perReadAlleleLikelihoodMap.put(sample.getKey(), new PerReadAlleleLikelihoodMap());
}
final ReadBackedPileup pileup = context.getBasePileup();
if (pileup != null) {
final GenotypeBuilder b = new GenotypeBuilder(sample.getKey());
final double[] genotypeLikelihoods = pairModel.computeDiploidReadHaplotypeLikelihoods(pileup, haplotypeMap, ref, eventLength, perReadAlleleLikelihoodMap.get(sample.getKey()), UAC.getSampleContamination().get(sample.getKey()));
b.PL(genotypeLikelihoods);
b.alleles(noCall);
b.DP(getFilteredDepth(pileup));
genotypes.add(b.make());
if (DEBUG) {
System.out.format("Sample:%s Alleles:%s GL:", sample.getKey(), alleleList.toString());
for (int k = 0; k < genotypeLikelihoods.length; k++)
System.out.format("%1.4f ", genotypeLikelihoods[k]);
System.out.println();
}
}
}
return builder.genotypes(genotypes).make();
}
public static void getHaplotypeMapFromAlleles(final List<Allele> alleleList,
final ReferenceContext ref,
final GenomeLoc loc,
final LinkedHashMap<Allele, Haplotype> haplotypeMap) {
// protect against having an indel too close to the edge of a contig
if (loc.getStart() <= HAPLOTYPE_SIZE)
haplotypeMap.clear();
// check if there is enough reference window to create haplotypes (can be an issue at end of contigs)
else if (ref.getWindow().getStop() < loc.getStop() + HAPLOTYPE_SIZE)
haplotypeMap.clear();
else if (alleleList.isEmpty())
haplotypeMap.clear();
else {
final int eventLength = getEventLength(alleleList);
final int hsize = ref.getWindow().size() - Math.abs(eventLength) - 1;
final int numPrefBases = ref.getLocus().getStart() - ref.getWindow().getStart() + 1;
if (hsize <= 0) // protect against event lengths larger than ref window sizes
haplotypeMap.clear();
else
haplotypeMap.putAll(Haplotype.makeHaplotypeListFromAlleles(alleleList, loc.getStart(),
ref, hsize, numPrefBases));
}
}
public static int getEventLength(List<Allele> alleleList) {
Allele refAllele = alleleList.get(0);
Allele altAllele = alleleList.get(1);
// look for alt allele that has biggest length distance to ref allele
int maxLenDiff = 0;
for (Allele a : alleleList) {
if (a.isNonReference()) {
int lenDiff = Math.abs(a.getBaseString().length() - refAllele.getBaseString().length());
if (lenDiff > maxLenDiff) {
maxLenDiff = lenDiff;
altAllele = a;
}
}
}
return altAllele.getBaseString().length() - refAllele.getBaseString().length();
}
public static List<Allele> getInitialAlleleList(final RefMetaDataTracker tracker,
final ReferenceContext ref,
final Map<String, AlignmentContext> contexts,
final AlignmentContextUtils.ReadOrientation contextType,
final UnifiedArgumentCollection UAC,
final boolean ignoreSNPAllelesWhenGenotypingIndels) {
List<Allele> alleles = new ArrayList<Allele>();
if (UAC.genotypingOutputMode == GenotypingOutputMode.GENOTYPE_GIVEN_ALLELES) {
VariantContext vc = null;
for (final VariantContext vc_input : tracker.getValues(UAC.alleles, ref.getLocus())) {
if (vc_input != null &&
allowableTypes.contains(vc_input.getType()) &&
ref.getLocus().getStart() == vc_input.getStart()) {
vc = vc_input;
break;
}
}
// ignore places where we don't have a variant
if (vc == null)
return alleles;
if (ignoreSNPAllelesWhenGenotypingIndels) {
// if there's an allele that has same length as the reference (i.e. a SNP or MNP), ignore it and don't genotype it
for (Allele a : vc.getAlleles())
if (a.isNonReference() && a.getBases().length == vc.getReference().getBases().length)
continue;
else
alleles.add(a);
} else {
alleles.addAll(vc.getAlleles());
}
} else {
alleles = computeConsensusAlleles(ref, contexts, contextType, UAC);
}
return alleles;
}
// Overload function in GenotypeLikelihoodsCalculationModel so that, for an indel case, we consider a deletion as part of the pileup,
// so that per-sample DP will include deletions covering the event.
protected int getFilteredDepth(ReadBackedPileup pileup) {
int count = 0;
for (PileupElement p : pileup) {
if (p.isDeletion() || BaseUtils.isRegularBase(p.getBase()))
count++;
}
return count;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/IndependentAllelesExactAFCalculator.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.tools.walkers.genotyper.GeneralPloidyGenotypeLikelihoods;
import org.broadinstitute.gatk.tools.walkers.genotyper.GenotypeAlleleCounts;
import org.broadinstitute.gatk.tools.walkers.genotyper.GenotypeLikelihoodCalculator;
import org.broadinstitute.gatk.tools.walkers.genotyper.GenotypeLikelihoodCalculators;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import java.util.*;
/**
* Independent allele exact AF calculator for any ploidy.
*
* <p>
* The method is described in {@link IndependentAllelesDiploidExactAFCalculator} for diploids.
* </p>
*
* @author <NAME> <<EMAIL>>
*/
public class IndependentAllelesExactAFCalculator extends ExactAFCalculator {
/**
* Array that caches the allele list that corresponds to the ith ploidy.
*
* <p>
* Each position of the array <i>i</i >makes reference to a list that contains <i>i</i> copies of {@link Allele#NO_CALL}.
* </p>
*
*<p>
* This array must be queried using {@link #biallelicNoCall(int)}, which will extend the cache
* to larger ploidies if needed.
* </p>
*/
private static volatile List<Allele>[] BIALLELIC_NOCALL = initialBiallelicNoCall(10);
/**
* Array that caches the allele list that corresponds to the ith ploidy.
*
* <p>
* Each position of the array <i>i</i >makes reference to an array that contains
* all-zero likelihoods with the number of genotypes that correspond
* to a biallelic variant with ploidy <i>i</i>.
* </p>
*
* <p>
* This array must be queried using {@link #biallelicNonInformativePls(int)}, which will extend the cache
* to larger ploidies if needed.
* </p>
*/
private static volatile int[][] BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY = initialBiallelicNonInformativePLsByPloidy(10);
private static final Comparator<AFCalculationResult> AFCALC_RESULT_BY_PNONREF_COMPARATOR = new Comparator<AFCalculationResult>() {
@Override
@Requires("o1 != null && o1 != null")
public int compare(final AFCalculationResult o1, final AFCalculationResult o2) {
return -1 * Double.compare(o1.getLog10PosteriorOfAFGT0(), o2.getLog10PosteriorOfAFGT0());
}
};
private final ExactAFCalculator biallelicExactAFCalculator;
protected IndependentAllelesExactAFCalculator(final ExactAFCalculator biallelicExactAFCalculator) {
if (biallelicExactAFCalculator == null)
throw new IllegalArgumentException("the biallelic exact AF calculator cannot be null");
this.biallelicExactAFCalculator = biallelicExactAFCalculator;
}
/**
* Creates a new calculator that delegates on {@link GeneralPloidyExactAFCalculator} to run
* the exact model per allele.
*
* <p>
* Note: this constructor may be called using reflexion.
* </p>
*/
@SuppressWarnings("unused")
protected IndependentAllelesExactAFCalculator() {
this(new GeneralPloidyExactAFCalculator());
}
@Override
@Requires("vc != null && likelihoodSums != null")
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
final int numOriginalAltAlleles = likelihoodSums.length;
final GenotypesContext genotypes = vc.getGenotypes();
for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) {
if (!genotype.hasPL())
continue;
final double[] gls = genotype.getLikelihoods().getAsVector();
if (MathUtils.sum(gls) >= GATKVariantContextUtils.SUM_GL_THRESH_NOCALL)
continue;
final int PLindexOfBestGL = MathUtils.maxElementIndex(gls);
final double bestToHomRefDiffGL = PLindexOfBestGL == PL_INDEX_OF_HOM_REF ? 0.0 : gls[PLindexOfBestGL] - gls[PL_INDEX_OF_HOM_REF];
final int declaredPloidy = genotype.getPloidy();
final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
final int[] acCount = GeneralPloidyGenotypeLikelihoods.getAlleleCountFromPLIndex(1 + numOriginalAltAlleles, ploidy, PLindexOfBestGL);
// by convention, first count coming from getAlleleCountFromPLIndex comes from reference allele
for (int k=1; k < acCount.length;k++)
if (acCount[k] > 0 )
likelihoodSums[k-1].sum += acCount[k] * bestToHomRefDiffGL;
}
}
@Override
protected GenotypesContext reduceScopeGenotypes(final VariantContext vc, final int defaultPloidy, final List<Allele> allelesToUse) {
return subsetAlleles(vc,defaultPloidy,allelesToUse,false);
}
@Override
@Requires("vc != null && log10AlleleFrequencyPriors != null && stateTracker != null")
protected AFCalculationResult computeLog10PNonRef(final VariantContext vc, final int defaultPloidy, final double[] log10AlleleFrequencyPriors, final StateTracker stateTracker) {
final List<AFCalculationResult> independentResultTrackers = computeAlleleIndependentExact(vc, defaultPloidy, log10AlleleFrequencyPriors);
// Paranoia check:
if ( independentResultTrackers.size() <= 1 )
throw new IllegalStateException("Independent alleles model returned an empty list of results at VC " + vc);
else if ( independentResultTrackers.size() == 2 ) {
// fast path for the very common bi-allelic use case
return independentResultTrackers.get(1);
} else {
final List<AFCalculationResult> alternativesOnly = new ArrayList<>(independentResultTrackers.size() - 1);
for (int i = 1; i < independentResultTrackers.size(); i++)
alternativesOnly.add(independentResultTrackers.get(i));
// we are a multi-allelic, so we need to actually combine the results
final List<AFCalculationResult> withMultiAllelicPriors = applyMultiAllelicPriors(alternativesOnly);
return combineIndependentPNonRefs(vc, withMultiAllelicPriors, independentResultTrackers.get(0));
}
}
@Requires("conditionalPNonRefResults != null and !conditionalPNonRefResults.empty()")
protected final List<AFCalculationResult> applyMultiAllelicPriors(final List<AFCalculationResult> conditionalPNonRefResults) {
final ArrayList<AFCalculationResult> sorted = new ArrayList<AFCalculationResult>(conditionalPNonRefResults);
// sort the results, so the most likely allele is first
Collections.sort(sorted, AFCALC_RESULT_BY_PNONREF_COMPARATOR);
double lastPosteriorGt0 = sorted.get(0).getLog10PosteriorOfAFGT0();
final double log10SingleAllelePriorOfAFGt0 = conditionalPNonRefResults.get(0).getLog10PriorOfAFGT0();
for ( int i = 0; i < sorted.size(); i++ ) {
if ( sorted.get(i).getLog10PosteriorOfAFGT0() > lastPosteriorGt0 )
throw new IllegalStateException("pNonRefResults not sorted: lastPosteriorGt0 " + lastPosteriorGt0 + " but current is " + sorted.get(i).getLog10PosteriorOfAFGT0());
final double log10PriorAFGt0 = (i + 1) * log10SingleAllelePriorOfAFGt0;
final double log10PriorAFEq0 = Math.log10(1 - Math.pow(10, log10PriorAFGt0));
final double[] thetaTONPriors = new double[] { log10PriorAFEq0, log10PriorAFGt0 };
// bind pNonRef for allele to the posterior value of the AF > 0 with the new adjusted prior
sorted.set(i, sorted.get(i).withNewPriors(MathUtils.normalizeFromLog10(thetaTONPriors, true)));
}
return sorted;
}
/**
* Take the independent estimates of pNonRef for each alt allele and combine them into a single result
*
* Given n independent calculations for each of n alternate alleles create a single
* combined AFCalcResult with:
*
* priors for AF == 0 equal to theta^N for the nth least likely allele
* posteriors that reflect the combined chance that any alleles are segregating and corresponding
* likelihoods
* combined MLEs in the order of the alt alleles in vc
*
* @param sortedResultsWithThetaNPriors the pNonRef result for each allele independently
*/
@Requires("vc != null && sortedResultsWithThetaNPriors != null && combinedAltAllelesResult != null")
protected AFCalculationResult combineIndependentPNonRefs(final VariantContext vc,
final List<AFCalculationResult> sortedResultsWithThetaNPriors,
final AFCalculationResult combinedAltAllelesResult) {
int nEvaluations = 0;
final int nAltAlleles = sortedResultsWithThetaNPriors.size();
final int[] alleleCountsOfMLE = new int[nAltAlleles];
final Map<Allele, Double> log10pRefByAllele = new HashMap<>(nAltAlleles);
// the sum of the log10 posteriors for AF == 0 and AF > 0 to determine joint probs
for ( final AFCalculationResult sortedResultWithThetaNPriors : sortedResultsWithThetaNPriors ) {
final Allele altAllele = sortedResultWithThetaNPriors.getAllelesUsedInGenotyping().get(1);
final int altI = vc.getAlleles().indexOf(altAllele) - 1;
// MLE of altI allele is simply the MLE of this allele in altAlleles
alleleCountsOfMLE[altI] = sortedResultWithThetaNPriors.getAlleleCountAtMLE(altAllele);
// bind pNonRef for allele to the posterior value of the AF > 0 with the new adjusted prior
log10pRefByAllele.put(altAllele, sortedResultWithThetaNPriors.getLog10PosteriorOfAFEq0());
// trivial -- update the number of evaluations
nEvaluations += sortedResultWithThetaNPriors.nEvaluations;
}
return new IndependentAlleleAFCalculationResult(alleleCountsOfMLE, nEvaluations, vc.getAlleles(),
// necessary to ensure all values < 0
MathUtils.normalizeFromLog10(new double[] { combinedAltAllelesResult.getLog10LikelihoodOfAFEq0(), combinedAltAllelesResult.getLog10LikelihoodOfAFGT0() }, true),
// priors incorporate multiple alt alleles, must be normalized
MathUtils.normalizeFromLog10(new double[] { combinedAltAllelesResult.getLog10PriorOfAFEq0(), combinedAltAllelesResult.getLog10PriorOfAFGT0() }, true),
log10pRefByAllele, sortedResultsWithThetaNPriors);
}
/**
* Compute the conditional exact AFCalcResult for each allele in vc independently, returning
* the result of each, in order of the alt alleles in VC
*
* @param vc the VariantContext we want to analyze, with at least 1 alt allele
* @param log10AlleleFrequencyPriors the priors
* @return a list of the AFCalcResults for each bi-allelic sub context of vc
*/
@Requires({"vc != null", "log10AlleleFrequencyPriors != null"})
@Ensures("goodIndependentResult(vc, result)")
protected final List<AFCalculationResult> computeAlleleIndependentExact(final VariantContext vc, final int defaultPloidy,
final double[] log10AlleleFrequencyPriors) {
final List<AFCalculationResult> results = new LinkedList<>();
for ( final VariantContext subvc : makeAlleleConditionalContexts(vc, defaultPloidy) ) {
final AFCalculationResult resultTracker = biallelicExactAFCalculator.getLog10PNonRef(subvc, defaultPloidy, vc.getNAlleles() - 1, log10AlleleFrequencyPriors);
results.add(resultTracker);
}
return results;
}
/**
* Returns the bi-allelic variant context for each alt allele in vc with bi-allelic likelihoods, in order
*
* @param vc the variant context to split. Must have n.alt.alleles > 1
* @return a bi-allelic variant context for each alt allele in vc
*/
@Requires({"vc != null", "vc.getNAlleles() > 1"})
@Ensures("result.size() == vc.getNAlleles() - 1")
protected final List<VariantContext> makeAlleleConditionalContexts(final VariantContext vc, final int defaultPloidy) {
final int nAlleles = vc.getNAlleles();
// go through the work of ripping up the VC into its biallelic components
final List<VariantContext> vcs = new LinkedList<>();
for ( int alleleIndex = 0; alleleIndex < nAlleles; alleleIndex++ ) {
vcs.add(biallelicCombinedGLs(vc, defaultPloidy, alleleIndex));
}
return vcs;
}
/**
* Create a single bi-allelic variant context from rootVC with alt allele with index altAlleleIndex
*
* @param rootVC the root (potentially multi-allelic) variant context
* @param alleleIndex index of the alt allele, from 0 == reference
* @return a bi-allelic variant context based on rootVC
*/
@Requires({"rootVC.getNAlleles() > 1", "altAlleleIndex < rootVC.getNAlleles()"})
@Ensures({"result.isBiallelic()"})
protected final VariantContext biallelicCombinedGLs(final VariantContext rootVC, final int defaultPloidy, final int alleleIndex) {
if ( rootVC.isBiallelic() ) {
return rootVC;
} else {
final int nAlleles = rootVC.getNAlleles();
final List<Genotype> biallelicGenotypes = new ArrayList<>(rootVC.getNSamples());
for ( final Genotype g : rootVC.getGenotypes() )
biallelicGenotypes.add(combineGLs(g, defaultPloidy, alleleIndex, nAlleles));
final VariantContextBuilder vcb = new VariantContextBuilder(rootVC);
final Allele allele = alleleIndex == 0 ? rootVC.getReference() : rootVC.getAlternateAllele(alleleIndex - 1);
vcb.alleles(alleleIndex == 0 ? Arrays.asList(allele, GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE) : Arrays.asList(rootVC.getReference(), allele));
vcb.genotypes(biallelicGenotypes);
return vcb.make();
}
}
/**
* Returns a new Genotype with the PLs of the multi-allelic original reduced to a bi-allelic case.
*
* <p>Uses the log-sum-exp trick in order to work well with very low PLs</p>
*
* <p>This is handled in the following way:</p>
*
* <p>Suppose we have for a A/B/C site the following GLs:</p>
*
* <p>AA AB BB AC BC CC</p>
*
* <p>and we want to get the bi-allelic GLs for X/B, where X is everything not B</p>
*
* <p>XX = AA + AC + CC (since X = A or C)<br/>
* XB = AB + BC <br/>
* BB = BB <br/>
* </p>
* <p>
* This implementation uses the log-sum-exp trick in order to avoid numeric instability (underflow).
* </p>
*
* @param original the original multi-allelic genotype
* @param alleleIndex the index of the alt allele we wish to keep in the bialleic case -- with ref == 0
* @param numberOfAlleles the total number of alleles (alternatives + the reference).
* @return a new biallelic genotype with appropriate PLs
*/
@Requires({"original.hasLikelihoods() && alleleIndex >= 0"})
@Ensures({"result.hasLikelihoods()"})
private Genotype combineGLs(final Genotype original, final int defaultPloidy, final int alleleIndex, final int numberOfAlleles ) {
final int declaredPloidy = original.getPloidy();
final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
if ( original.isNonInformative() )
return new GenotypeBuilder(original).PL(biallelicNonInformativePls(ploidy)).alleles(biallelicNoCall(ploidy)).make();
final int[] pls = original.getPL();
final GenotypeLikelihoodCalculator calculator = GenotypeLikelihoodCalculators.getInstance(ploidy, numberOfAlleles);
final double[] newPLs = new double[ploidy + 1];
Arrays.fill(newPLs, Double.NEGATIVE_INFINITY);
for (int i = 0; i < pls.length; i++) {
final GenotypeAlleleCounts alleleCounts = calculator.genotypeAlleleCountsAt(i);
final int alleleCount = alleleCounts.alleleCountFor(alleleIndex);
final int newPLIndex = alleleIndex == 0 ? ploidy - alleleCount : alleleCount;
newPLs[newPLIndex] = MathUtils.approximateLog10SumLog10(newPLs[newPLIndex], -.1 * pls[i]);
}
return new GenotypeBuilder(original).PL(newPLs).alleles(biallelicNoCall(ploidy)).make();
}
private static List<Allele>[] initialBiallelicNoCall(final int initialCapacity) {
final List<Allele>[] result = new List[initialCapacity + 1];
for (int i = 0; i < result.length; i++) {
result[i] = GATKVariantContextUtils.noCallAlleles(i);
}
return result;
}
private static int[][] initialBiallelicNonInformativePLsByPloidy(final int initialCapacity) {
final int[][] result = new int[initialCapacity + 1][];
for (int i = 0; i < result.length; i++)
result[i] = new int[i]; // { 0, 0, 0 ... 0} is the actual uninformative PL array.
return result;
}
/**
* Returns a cached array of non-informative PLs (all 0) for a given ploidy.
* <p>
* Calling code must never change its elements.
* </p>
* @param ploidy the required ploidy.
* @return never {@code null}.
*/
@Requires("ploidy >= 0")
private static int[] biallelicNonInformativePls (final int ploidy) {
if (ploidy >= BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY.length) {
return enlargeIfNecessaryBiallelicNonInformativePlsByPloidyAndGet(ploidy);
} else {
return BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY[ploidy];
}
}
/**
* Thread-safe expansion of {@link #BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY}.
* @param ploidy the requested ploidy.
* @return the uninformative likelihoods array for the requested ploidy.
*/
private static synchronized int[] enlargeIfNecessaryBiallelicNonInformativePlsByPloidyAndGet(final int ploidy) {
if (ploidy >= BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY.length) {
final int[][] newValue = Arrays.copyOf(BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY, ploidy * 2);
for (int i = newValue.length - 1; i >= BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY.length; i--)
newValue[i] = new int[i]; // { 0, 0, 0.. } is the actual uninformative PL array.
BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY = newValue;
}
return BIALLELIC_NON_INFORMATIVE_PLS_BY_PLOIDY[ploidy];
}
/**
* Returns a cached list of no-call alleles {@link Allele#NO_CALL} that correspond to a given ploidy.
* <p>
* Calling code must never change its elements.
* </p>
* @param ploidy the required ploidy.
* @return never {@code null}.
*/
private static List<Allele> biallelicNoCall (final int ploidy) {
if (ploidy >= BIALLELIC_NOCALL.length) {
return enlargeIfNecessaryBiallelicNoCallAndGet(ploidy);
} else {
return BIALLELIC_NOCALL[ploidy];
}
}
/**
* Thread-safe expansion of {@link #BIALLELIC_NOCALL}.
* @param ploidy the requested ploidy.
* @return the no-call allele list for the requested ploidy.
*/
private static synchronized List<Allele> enlargeIfNecessaryBiallelicNoCallAndGet(final int ploidy) {
if (ploidy >= BIALLELIC_NOCALL.length) {
final List<Allele>[] newValue = Arrays.copyOf(BIALLELIC_NOCALL, ploidy * 2);
for (int i = newValue.length - 1; i >= BIALLELIC_NOCALL.length; i--)
newValue[i] = GATKVariantContextUtils.noCallAlleles(i);
BIALLELIC_NOCALL = newValue;
}
return BIALLELIC_NOCALL[ploidy];
}
@Override
@Requires("vc != null && allelesToUse != null")
public GenotypesContext subsetAlleles(VariantContext vc, int defaultPloidy, List<Allele> allelesToUse, boolean assignGenotypes) {
// the genotypes with PLs
final GenotypesContext oldGTs = vc.getGenotypes();
// samples
final List<String> sampleIndices = oldGTs.getSampleNamesOrderedByName();
// the new genotypes to create
final GenotypesContext newGTs = GenotypesContext.create();
// we need to determine which of the alternate alleles (and hence the likelihoods) to use and carry forward
final int numOriginalAltAlleles = vc.getAlternateAlleles().size();
final int numNewAltAlleles = allelesToUse.size() - 1;
// create the new genotypes
for ( int k = 0; k < oldGTs.size(); k++ ) {
final Genotype g = oldGTs.get(sampleIndices.get(k));
final int declaredPloidy = g.getPloidy();
final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
if ( !g.hasLikelihoods() ) {
newGTs.add(GenotypeBuilder.create(g.getSampleName(),GATKVariantContextUtils.noCallAlleles(ploidy)));
continue;
}
// create the new likelihoods array from the alleles we are allowed to use
final double[] originalLikelihoods = g.getLikelihoods().getAsVector();
double[] newLikelihoods;
// Optimization: if # of new alt alleles = 0 (pure ref call), keep original likelihoods so we skip normalization
// and subsetting
if ( numOriginalAltAlleles == numNewAltAlleles || numNewAltAlleles == 0) {
newLikelihoods = originalLikelihoods;
} else {
newLikelihoods = GeneralPloidyGenotypeLikelihoods.subsetToAlleles(originalLikelihoods, ploidy, vc.getAlleles(), allelesToUse);
// might need to re-normalize
newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true);
}
// if there is no mass on the (new) likelihoods, then just no-call the sample
if ( MathUtils.sum(newLikelihoods) > GATKVariantContextUtils.SUM_GL_THRESH_NOCALL ) {
newGTs.add(GenotypeBuilder.create(g.getSampleName(), GATKVariantContextUtils.noCallAlleles(ploidy)));
} else {
final GenotypeBuilder gb = new GenotypeBuilder(g);
final String sampleName = g.getSampleName();
if ( numNewAltAlleles == 0 )
gb.noPL();
else
gb.PL(newLikelihoods);
// if we weren't asked to assign a genotype, then just no-call the sample
if ( !assignGenotypes || MathUtils.sum(newLikelihoods) > GATKVariantContextUtils.SUM_GL_THRESH_NOCALL )
gb.alleles(GATKVariantContextUtils.noCallAlleles(ploidy));
else
assignGenotype(gb, vc, sampleName, newLikelihoods, allelesToUse, ploidy);
newGTs.add(gb.make());
}
}
return GATKVariantContextUtils.fixADFromSubsettedAlleles(newGTs, vc, allelesToUse);
}
/**
* Assign genotypes (GTs) to the samples in the VariantContext greedily based on the PLs
*
* @param gb the GenotypeBuilder to modify
* @param vc the VariantContext
* @param sampleName the sample name
* @param newLikelihoods the PL array
* @param allelesToUse the list of alleles to choose from (corresponding to the PLs)
* @param numChromosomes Number of chromosomes per pool
*/
private void assignGenotype(final GenotypeBuilder gb,
final VariantContext vc,
final String sampleName,
final double[] newLikelihoods,
final List<Allele> allelesToUse,
final int numChromosomes) {
final int numNewAltAlleles = allelesToUse.size() - 1;
// find the genotype with maximum likelihoods
final int PLindex = numNewAltAlleles == 0 ? 0 : MathUtils.maxElementIndex(newLikelihoods);
final GenotypeLikelihoodCalculator calculator = GenotypeLikelihoodCalculators.getInstance(numChromosomes,allelesToUse.size());
final GenotypeAlleleCounts alleleCounts = calculator.genotypeAlleleCountsAt(PLindex);
gb.alleles(alleleCounts.asAlleleList(allelesToUse));
removePLsIfMaxNumPLValuesExceeded(gb, vc, sampleName, newLikelihoods);
if ( numNewAltAlleles > 0 )
gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods));
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/LDMerger.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.utils.GenomeLoc;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* Merges VariantContexts in a series of haplotypes according to their pairwise LD
*
* User: depristo
* Date: 3/28/13
* Time: 6:17 PM
*/
public class LDMerger extends MergeVariantsAcrossHaplotypes {
private final static Logger logger = Logger.getLogger(LDMerger.class);
private final boolean DEBUG;
private final int minSamplesToMergeSNPs;
private final int minSamplesToMergeOtherEvents;
public LDMerger(boolean DEBUG, int minSamplesToMergeSNPs, int minSamplesToMergeOtherEvents) {
super();
this.DEBUG = DEBUG;
this.minSamplesToMergeSNPs = minSamplesToMergeSNPs;
this.minSamplesToMergeOtherEvents = minSamplesToMergeOtherEvents;
}
protected LDMerger() {
this(false, 1, 1);
}
// TODO -- should be class arguments and static variables in HC
protected final static int MAX_DISTANCE_BETWEEN_SNPS_TO_MERGE = 6;
protected final static int MAX_DISTANCE_BETWEEN_OTHER_EVENTS_TO_MERGE = 25;
/**
* We require 99% confidence that only the phased haplotypes exist in the population to merge the records
*/
protected final static double MERGE_EVENTS_PROB_PHASED_THRESHOLD = 0.99;
/**
* Merge as many events among the haplotypes as possible based on pairwise LD among variants
*
* @param haplotypes a list of haplotypes whose events we want to merge
* @param readLikelihoods map from sample name -> read likelihoods for each haplotype
* @param startPosKeySet a set of starting positions of all events among the haplotypes
* @param ref the reference bases
* @param refLoc the span of the reference bases
*/
@Override
public boolean merge( final List<Haplotype> haplotypes,
final ReadLikelihoods<Haplotype> readLikelihoods,
final TreeSet<Integer> startPosKeySet,
final byte[] ref,
final GenomeLoc refLoc ) {
if ( haplotypes == null ) throw new IllegalArgumentException("haplotypes cannot be null");
if ( readLikelihoods == null ) throw new IllegalArgumentException("readLikelihoods cannot be null");
if ( startPosKeySet == null ) throw new IllegalArgumentException("startPosKeySet cannot be null");
if ( ref == null ) throw new IllegalArgumentException("ref cannot be null");
if ( refLoc == null ) throw new IllegalArgumentException("refLoc cannot be null");
if ( refLoc.size() != ref.length ) throw new IllegalArgumentException("refLoc size " + refLoc.size() + " != ref.length " + ref.length + " at " + refLoc);
if( startPosKeySet.size() <= 1 ) { return false; }
final int nSamples = readLikelihoods.sampleCount();
final HaplotypeLDCalculator r2Calculator = new HaplotypeLDCalculator(haplotypes, readLikelihoods);
boolean somethingWasMerged = false;
boolean mapWasUpdated = true;
while( mapWasUpdated ) {
mapWasUpdated = mergeConsecutiveEventsBasedOnLDOnce(haplotypes, r2Calculator, nSamples, startPosKeySet, ref, refLoc);
somethingWasMerged |= mapWasUpdated;
}
return somethingWasMerged;
}
/**
* Merge the next pair of events, if possible
*
* @param haplotypes a list of haplotypes whose events we want to merge
* @param ldCalculator calculates R^2 for pairs of events on demand
* @param startPosKeySet a set of starting positions of all events among the haplotypes
* @param ref the reference bases
* @param refLoc the span of the reference bases
* @return true if something was merged, false otherwise
*/
protected boolean mergeConsecutiveEventsBasedOnLDOnce( final List<Haplotype> haplotypes,
final HaplotypeLDCalculator ldCalculator,
final int nSamples,
final TreeSet<Integer> startPosKeySet,
final byte[] ref,
final GenomeLoc refLoc ) {
// loop over the set of start locations and consider pairs that start near each other
final Iterator<Integer> iter = startPosKeySet.iterator();
int thisStart = iter.next();
while( iter.hasNext() ) {
final int nextStart = iter.next();
final LDMergeData toMerge = getPairOfEventsToMerge(haplotypes, thisStart, nextStart);
if ( toMerge.canBeMerged(nSamples) ) {
final double pPhased = ldCalculator.computeProbOfBeingPhased(toMerge.firstVC, toMerge.secondVC);
if( DEBUG ) {
logger.info("Found consecutive biallelic events with R^2 = " + String.format("%.4f", pPhased));
logger.info("-- " + toMerge.firstVC);
logger.info("-- " + toMerge.secondVC);
}
if( pPhased > MERGE_EVENTS_PROB_PHASED_THRESHOLD) {
final VariantContext mergedVC = createMergedVariantContext(toMerge.firstVC, toMerge.secondVC, ref, refLoc);
// if for some reason the merging resulting in a bad allele, mergedVC will be null, and we will just remove first and second
replaceVariantContextsInMap(haplotypes, startPosKeySet, mergedVC, toMerge.firstVC, toMerge.secondVC);
return true; // break out of tree set iteration since it was just updated, start over from the beginning and keep merging events
}
}
thisStart = nextStart;
}
return false;
}
/**
* Info about potential LD merge of two variant contexts
*/
private class LDMergeData {
VariantContext firstVC = null, secondVC = null;
boolean canBeMerged = true;
/** Tell this object that it cant be merged for some reason */
public LDMergeData cantBeMerged() {
canBeMerged = false;
return this;
}
/**
* Can these two events be merged
* @param nSamples the number of samples we're considering
* @return true if we can merge our two variant contexts
*/
public boolean canBeMerged(final int nSamples) {
if ( ! canBeMerged || firstVC == null || secondVC == null )
return false;
final int distance = secondVC.getStart() - firstVC.getEnd();
if ( firstVC.isSNP() && secondVC.isSNP() ) {
return nSamples >= minSamplesToMergeSNPs && distance <= MAX_DISTANCE_BETWEEN_SNPS_TO_MERGE;
} else {
return nSamples >= minSamplesToMergeOtherEvents && distance <= MAX_DISTANCE_BETWEEN_OTHER_EVENTS_TO_MERGE;
}
}
}
/**
* Get the information about the potential merge of two events starting at thisStart and nextStart
* @param haplotypes our haplotypes
* @param thisStart the starting position of the first event to merge
* @param nextStart the starting position of the next event to merge
* @return never {@code null}.
*/
private LDMergeData getPairOfEventsToMerge(final List<Haplotype> haplotypes, final int thisStart, final int nextStart) {
final LDMergeData mergeData = new LDMergeData();
for( final Haplotype h : haplotypes ) {
// only make complex substitutions out of consecutive biallelic sites
final VariantContext thisHapVC = h.getEventMap().get(thisStart);
if( thisHapVC != null && !thisHapVC.isSymbolic() ) { // something was found at this location on this haplotype
if( mergeData.firstVC == null ) {
mergeData.firstVC = thisHapVC;
} else if( !thisHapVC.hasSameAllelesAs( mergeData.firstVC) ) {
return mergeData.cantBeMerged();
}
}
final VariantContext nextHapVC = h.getEventMap().get(nextStart);
if( nextHapVC != null && !nextHapVC.isSymbolic() ) { // something was found at the next location on this haplotype
if( mergeData.secondVC == null ) {
mergeData.secondVC = nextHapVC;
} else if( !nextHapVC.hasSameAllelesAs( mergeData.secondVC) ) {
return mergeData.cantBeMerged();
}
}
}
// don't try to merge overlapping events
if ( mergeData.firstVC != null && mergeData.secondVC != null && mergeData.firstVC.getEnd() >= mergeData.secondVC.getStart() )
return mergeData.cantBeMerged();
return mergeData;
}
// BUGBUG: make this merge function more general
protected VariantContext createMergedVariantContext( final VariantContext thisVC, final VariantContext nextVC, final byte[] ref, final GenomeLoc refLoc ) {
final int thisStart = thisVC.getStart();
final int nextStart = nextVC.getStart();
byte[] refBases = new byte[]{};
byte[] altBases = new byte[]{};
refBases = ArrayUtils.addAll(refBases, thisVC.getReference().getBases());
altBases = ArrayUtils.addAll(altBases, thisVC.getAlternateAllele(0).getBases());
int locus;
for( locus = thisStart + refBases.length; locus < nextStart; locus++ ) {
final byte refByte = ref[locus - refLoc.getStart()];
refBases = ArrayUtils.add(refBases, refByte);
altBases = ArrayUtils.add(altBases, refByte);
}
refBases = ArrayUtils.addAll(refBases, ArrayUtils.subarray(nextVC.getReference().getBases(), locus > nextStart ? 1 : 0, nextVC.getReference().getBases().length)); // special case of deletion including the padding base of consecutive indel
altBases = ArrayUtils.addAll(altBases, nextVC.getAlternateAllele(0).getBases());
int iii = 0;
if( refBases.length == altBases.length ) { // insertion + deletion of same length creates an MNP --> trim common prefix bases off the beginning of the allele
while( iii < refBases.length && refBases[iii] == altBases[iii] ) { iii++; }
if ( iii == refBases.length ) {
// we've become a null allele, such as with CA/C + A/AA -> CA/CA => after trimming there's nothing left
// so return a null variant context so we can eliminate the variants from consideration
return null;
}
}
final Allele refAllele = Allele.create( ArrayUtils.subarray(refBases, iii, refBases.length), true );
final Allele altAllele = Allele.create( ArrayUtils.subarray(altBases, iii, altBases.length), false );
return new VariantContextBuilder("merged", thisVC.getChr(), thisVC.getStart() + iii, nextVC.getEnd(), Arrays.asList(refAllele, altAllele)).make();
}
/**
* Update the event maps in all haplotypes to replace a replacement of update1 and 2 with replacement
*
* @param haplotypes the haplotypes whose event maps we need to update
* @param startPosKeySet a sorted set of start positions that we must update
* @param replacement a VariantContext to replace update1 and update2 with. Can be null, indicating that we just want to remove update1 and update2
* @param update1 the first VC we want to update
* @param update2 the second VC we want to update
*/
private void replaceVariantContextsInMap(final List<Haplotype> haplotypes,
final TreeSet<Integer> startPosKeySet,
final VariantContext replacement,
final VariantContext update1, final VariantContext update2) {
// remove the old event from the eventMap on every haplotype and the start pos key set, replace with merged event
for( final Haplotype h : haplotypes ) {
// if we had both events, add replacement. In some cases the haplotype may not have both
// events but they were still merged because the haplotype isn't a particularly informative
// haplotype in any case. The order of operations here is important because we are modifying the map
final boolean shouldAdd = h.getEventMap().containsKey(update1.getStart()) && h.getEventMap().containsKey(update2.getStart());
h.getEventMap().remove(update1.getStart());
h.getEventMap().remove(update2.getStart());
if ( shouldAdd && replacement != null ) {
h.getEventMap().addVC(replacement, false); // cannot merge we other events at the same position
}
}
startPosKeySet.remove(update1.getStart());
startPosKeySet.remove(update2.getStart());
if ( replacement != null ) startPosKeySet.add(replacement.getStart());
}
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCallerGenotypingEngineUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
/**
* Created by IntelliJ IDEA.
* User: rpoplin
* Date: 3/15/12
*/
import htsjdk.samtools.reference.ReferenceSequenceFile;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.tools.walkers.genotyper.*;
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.utils.*;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
import org.broadinstitute.gatk.utils.genotyper.AlleleList;
import org.broadinstitute.gatk.utils.genotyper.IndexedAlleleList;
import org.broadinstitute.gatk.utils.genotyper.IndexedSampleList;
import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods;
import org.broadinstitute.gatk.utils.haplotype.EventMap;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import org.broadinstitute.gatk.utils.smithwaterman.Parameters;
import org.broadinstitute.gatk.utils.smithwaterman.SWPairwiseAlignment;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.*;
/**
* Unit tests for {@link HaplotypeCallerGenotypingEngine}.
*/
public class HaplotypeCallerGenotypingEngineUnitTest extends BaseTest {
private static ReferenceSequenceFile seq;
private GenomeLocParser genomeLocParser;
@BeforeClass
public void init() throws FileNotFoundException {
// sequence
seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
genomeLocParser = new GenomeLocParser(seq);
}
private class BasicGenotypingTestProvider extends TestDataProvider {
byte[] ref;
byte[] hap;
Map<Integer,Byte> expected;
public BasicGenotypingTestProvider(String refString, String hapString, Map<Integer, Byte> expected) {
super(BasicGenotypingTestProvider.class, String.format("Haplotype to VCF test: ref = %s, alignment = %s", refString,hapString));
ref = refString.getBytes();
hap = hapString.getBytes();
this.expected = expected;
}
public Map<Integer,VariantContext> calcAlignment() {
final SWPairwiseAlignment alignment = new SWPairwiseAlignment(ref, hap, new Parameters(3,-1,-4, -1));
final Haplotype h = new Haplotype(hap, false, alignment.getAlignmentStart2wrt1(), alignment.getCigar());
return HaplotypeCallerGenotypingEngine.generateVCsFromAlignment(h, ref, genomeLocParser.createGenomeLoc("4", 1, 1 + ref.length), "name");
}
public String toString() {
return "REF:" + new String(ref) + ",ALT:" + new String(hap);
}
}
@DataProvider(name = "BasicGenotypingTestProvider")
public Object[][] makeBasicGenotypingTests() {
for( int contextSize : new int[]{0,1,5,9,24,36} ) {
Map<Integer, Byte> map = new HashMap<Integer, Byte>();
map.put(1 + contextSize, (byte)'M');
final String context = Utils.dupString('G', contextSize);
new BasicGenotypingTestProvider(context + "AGCTCGCATCGCGAGCATCGACTAGCCGATAG" + context, "CGCTCGCATCGCGAGCATCGACTAGCCGATAG", map);
}
for( int contextSize : new int[]{0,1,5,9,24,36} ) {
Map<Integer, Byte> map = new HashMap<Integer, Byte>();
map.put(2 + contextSize, (byte)'M');
map.put(21 + contextSize, (byte)'M');
final String context = Utils.dupString('G', contextSize);
new BasicGenotypingTestProvider(context + "AGCTCGCATCGCGAGCATCGACTAGCCGATAG", "ATCTCGCATCGCGAGCATCGCCTAGCCGATAG", map);
}
for( int contextSize : new int[]{0,1,5,9,24,36} ) {
Map<Integer, Byte> map = new HashMap<Integer, Byte>();
map.put(1 + contextSize, (byte)'M');
map.put(20 + contextSize, (byte)'I');
final String context = Utils.dupString('G', contextSize);
new BasicGenotypingTestProvider(context + "AGCTCGCATCGCGAGCATCGACTAGCCGATAG" + context, "CGCTCGCATCGCGAGCATCGACACTAGCCGATAG", map);
}
for( int contextSize : new int[]{0,1,5,9,24,36} ) {
Map<Integer, Byte> map = new HashMap<Integer, Byte>();
map.put(1 + contextSize, (byte)'M');
map.put(20 + contextSize, (byte)'D');
final String context = Utils.dupString('G', contextSize);
new BasicGenotypingTestProvider(context + "AGCTCGCATCGCGAGCATCGACTAGCCGATAG" + context, "CGCTCGCATCGCGAGCATCGCTAGCCGATAG", map);
}
for( int contextSize : new int[]{1,5,9,24,36} ) {
Map<Integer, Byte> map = new HashMap<Integer, Byte>();
map.put(1, (byte)'M');
map.put(20, (byte)'D');
final String context = Utils.dupString('G', contextSize);
new BasicGenotypingTestProvider("AGCTCGCATCGCGAGCATCGACTAGCCGATAG" + context, "CGCTCGCATCGCGAGCATCGCTAGCCGATAG", map);
}
for( int contextSize : new int[]{0,1,5,9,24,36} ) {
Map<Integer, Byte> map = new HashMap<Integer, Byte>();
map.put(2 + contextSize, (byte)'M');
map.put(20 + contextSize, (byte)'I');
map.put(30 + contextSize, (byte)'D');
final String context = Utils.dupString('G', contextSize);
new BasicGenotypingTestProvider(context + "AGCTCGCATCGCGAGCATCGACTAGCCGATAG" + context, "ACCTCGCATCGCGAGCATCGTTACTAGCCGATG", map);
}
for( int contextSize : new int[]{0,1,5,9,24,36} ) {
Map<Integer, Byte> map = new HashMap<Integer, Byte>();
map.put(1 + contextSize, (byte)'M');
map.put(20 + contextSize, (byte)'D');
map.put(28 + contextSize, (byte)'M');
final String context = Utils.dupString('G', contextSize);
new BasicGenotypingTestProvider(context + "AGCTCGCATCGCGAGCATCGACTAGCCGATAG" + context, "CGCTCGCATCGCGAGCATCGCTAGCCCATAG", map);
}
return BasicGenotypingTestProvider.getTests(BasicGenotypingTestProvider.class);
}
@Test(dataProvider = "BasicGenotypingTestProvider", enabled = true)
public void testHaplotypeToVCF(BasicGenotypingTestProvider cfg) {
Map<Integer,VariantContext> calculatedMap = cfg.calcAlignment();
Map<Integer,Byte> expectedMap = cfg.expected;
logger.warn(String.format("Test: %s", cfg.toString()));
if(!compareVCMaps(calculatedMap, expectedMap)) {
logger.warn("calc map = " + calculatedMap);
logger.warn("expected map = " + expectedMap);
}
Assert.assertTrue(compareVCMaps(calculatedMap, expectedMap),"" + cfg);
}
@Test(dataProvider="AddMiscellaneousDataProvider", enabled=false)
public void testAddMiscellaneousAllele(final String readBases, final int readOffset,
final String ref, final int refOffset,
final String referenceAllele, final String[] alternatives, final double[] likelihoods, final double[] expected) {
final byte baseQual = (byte)30;
final byte[] baseQuals = Utils.dupBytes(baseQual, readBases.length());
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(readBases.getBytes(), baseQuals, readBases.length() + "M");
final GenomeLoc loc = new UnvalidatingGenomeLoc("20",0,refOffset,refOffset);
final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc,Collections.singletonList(read),readOffset);
final VariantContextBuilder vcb = new VariantContextBuilder();
final GenotypeBuilder gb = new GenotypeBuilder();
final List<String> alleleStrings = new ArrayList<>( 1 + alternatives.length);
alleleStrings.add(referenceAllele);
alleleStrings.addAll(Arrays.asList(alternatives));
gb.AD(new int[] { 1 });
gb.DP(1);
gb.PL(likelihoods);
vcb.alleles(alleleStrings);
vcb.loc("20",refOffset,refOffset + referenceAllele.length() -1);
vcb.genotypes(gb.make());
final VariantContext vc = vcb.make();
final VariantContext updatedVc = null; // GenotypingEngine.addMiscellaneousAllele(vc,pileup,ref.getBytes(),0);
final GenotypeLikelihoods updatedLikelihoods = updatedVc.getGenotype(0).getLikelihoods();
Assert.assertEquals(updatedLikelihoods.getAsVector().length, expected.length);
final double[] updatedLikelihoodsArray = updatedVc.getGenotype(0).getLikelihoods().getAsVector();
for (int i = 0; i < updatedLikelihoodsArray.length; i++) {
Assert.assertEquals(updatedLikelihoodsArray[i],expected[i],0.0001);
}
Allele altAllele = null;
for (final Allele allele : updatedVc.getAlleles())
if (allele.isSymbolic() && allele.getBaseString().equals(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE_NAME))
altAllele = allele;
Assert.assertNotNull(altAllele);
}
@DataProvider(name="AddMiscellaneousDataProvider")
public Iterator<Object[]> addMiscellaneousAlleleDataProvider() {
return Arrays.asList(ADD_MISCELLANEOUS_ALLELE_DATA).iterator();
}
private static final double MATCH_LnLK = QualityUtils.qualToProbLog10((byte)30);
private static final double MISS_LnLK = QualityUtils.qualToErrorProbLog10((byte)30);
private static final Object[][] ADD_MISCELLANEOUS_ALLELE_DATA = new Object[][] {
new Object[] {"ACTG", 0,"ACTGTGAGTATTCC",0,"A",new String[]{}, new double[] {MATCH_LnLK * MATCH_LnLK}, 6,
new double[] {MATCH_LnLK * MATCH_LnLK,MATCH_LnLK * MISS_LnLK, MISS_LnLK * MISS_LnLK}}
};
/**
* Private function to compare Map of VCs, it only checks the types and start locations of the VariantContext
*/
private boolean compareVCMaps(Map<Integer, VariantContext> calc, Map<Integer, Byte> expected) {
if( !calc.keySet().equals(expected.keySet()) ) { return false; } // sanity check
for( Integer loc : expected.keySet() ) {
Byte type = expected.get(loc);
switch( type ) {
case 'I':
if( !calc.get(loc).isSimpleInsertion() ) { return false; }
break;
case 'D':
if( !calc.get(loc).isSimpleDeletion() ) { return false; }
break;
case 'M':
if( !(calc.get(loc).isMNP() || calc.get(loc).isSNP()) ) { return false; }
break;
default:
return false;
}
}
return true;
}
@DataProvider(name = "CreateHaplotypeMappingProvider")
public Object[][] makeCreateHaplotypeMappingData() {
List<Object[]> tests = new ArrayList<Object[]>();
final Set<Haplotype> haplotypes = new HashSet<>();
final Allele ref = Allele.create("A", true);
final Allele altC = Allele.create("C", false);
final Allele altT = Allele.create("T", false);
final Haplotype AtoC1 = new Haplotype("AACAA".getBytes());
final VariantContext vc1 = new VariantContextBuilder().chr("20").start(3).stop(3).alleles(Arrays.asList(ref, altC)).make();
AtoC1.setEventMap(new EventMap(Arrays.asList(vc1)));
AtoC1.getEventMap().put(3, vc1);
haplotypes.add(AtoC1);
final Haplotype AtoC2 = new Haplotype("AAACA".getBytes());
final VariantContext vc2 = new VariantContextBuilder().chr("20").start(4).stop(4).alleles(Arrays.asList(ref, altT)).make();
AtoC2.setEventMap(new EventMap(Arrays.asList(vc2)));
AtoC2.getEventMap().put(4, vc2);
haplotypes.add(AtoC2);
tests.add(new Object[]{vc1, haplotypes, AtoC1});
tests.add(new Object[]{vc2, haplotypes, AtoC2});
tests.add(new Object[]{new VariantContextBuilder().chr("20").start(1).stop(1).alleles(Arrays.asList(ref, altT)).make(), haplotypes, null});
return tests.toArray(new Object[][]{});
}
@Test(dataProvider="CreateHaplotypeMappingProvider")
public void testCreateHaplotypeMapping(final VariantContext vc, final Set<Haplotype> haplotypes, final Haplotype expected) {
final Map<VariantContext, Set<Haplotype>> mapping = HaplotypeCallerGenotypingEngine.constructHaplotypeMapping(Arrays.asList(vc), haplotypes);
final Set<Haplotype> actual = mapping.get(vc);
if ( expected == null )
Assert.assertTrue(actual.isEmpty(), actual.toString());
else {
Assert.assertEquals(actual.size(), 1);
Assert.assertEquals(actual.iterator().next(), expected);
}
}
@DataProvider(name = "ConstructPhaseSetMappingProvider")
public Object[][] makeConstructPhaseSetMappingData() {
List<Object[]> tests = new ArrayList<Object[]>();
final Allele ref = Allele.create("A", true);
final Allele altC = Allele.create("C", false);
final Allele altT = Allele.create("T", false);
final VariantContext vc1 = new VariantContextBuilder().chr("20").start(1).stop(1).alleles(Arrays.asList(ref, altC)).make();
final VariantContext vc2 = new VariantContextBuilder().chr("20").start(2).stop(2).alleles(Arrays.asList(ref, altC)).make();
final VariantContext vc3 = new VariantContextBuilder().chr("20").start(3).stop(3).alleles(Arrays.asList(ref, altT)).make();
final VariantContext vc4 = new VariantContextBuilder().chr("20").start(4).stop(4).alleles(Arrays.asList(ref, altC)).make();
final List<VariantContext> calls = Arrays.asList(vc2, vc3, vc4);
final Haplotype pos1 = new Haplotype("CAAAA".getBytes());
pos1.setEventMap(new EventMap(Arrays.asList(vc1)));
pos1.getEventMap().put(1, vc1);
final Haplotype pos2 = new Haplotype("ACAAA".getBytes());
pos2.setEventMap(new EventMap(Arrays.asList(vc2)));
pos2.getEventMap().put(2, vc2);
final Haplotype pos3 = new Haplotype("AACAA".getBytes());
pos3.setEventMap(new EventMap(Arrays.asList(vc3)));
pos3.getEventMap().put(3, vc3);
final Haplotype pos4 = new Haplotype("AAACA".getBytes());
pos4.setEventMap(new EventMap(Arrays.asList(vc4)));
pos4.getEventMap().put(4, vc4);
final Haplotype pos24 = new Haplotype("ACACA".getBytes());
pos24.setEventMap(new EventMap(Arrays.asList(vc2, vc4)));
pos24.getEventMap().put(2, vc2);
pos24.getEventMap().put(4, vc4);
final Haplotype pos34 = new Haplotype("AACCA".getBytes());
pos34.setEventMap(new EventMap(Arrays.asList(vc3, vc4)));
pos34.getEventMap().put(3, vc3);
pos34.getEventMap().put(4, vc4);
final Haplotype pos234 = new Haplotype("ACCCA".getBytes());
pos234.setEventMap(new EventMap(Arrays.asList(vc2, vc3, vc4)));
pos234.getEventMap().put(2, vc2);
pos234.getEventMap().put(3, vc3);
pos234.getEventMap().put(4, vc4);
final Map<VariantContext, Set<Haplotype>> haplotypeMap = new HashMap<>();
// test no phased variants #1
final Set<Haplotype> haplotypes2 = new HashSet<>();
haplotypes2.add(pos2);
haplotypeMap.put(vc2, haplotypes2);
tests.add(new Object[]{Arrays.asList(vc2), new HashMap<>(haplotypeMap), 2, 0, 0, 0, 0});
// test no phased variants #2
final Set<Haplotype> haplotypes3 = new HashSet<>();
haplotypes3.add(pos3);
haplotypeMap.put(vc3, haplotypes3);
tests.add(new Object[]{Arrays.asList(vc2, vc3), new HashMap<>(haplotypeMap), 3, 0, 0, 0, 0});
// test opposite phase
tests.add(new Object[]{Arrays.asList(vc2, vc3), new HashMap<>(haplotypeMap), 2, 2, 1, 1, 1});
// test no phased variants #3
final Set<Haplotype> haplotypes4 = new HashSet<>();
haplotypes4.add(pos4);
haplotypeMap.put(vc4, haplotypes4);
tests.add(new Object[]{calls, new HashMap<>(haplotypeMap), 3, 0, 0, 0, 0});
// test mixture
final Set<Haplotype> haplotypes24 = new HashSet<>();
haplotypes24.add(pos24);
haplotypeMap.put(vc2, haplotypes24);
haplotypeMap.put(vc4, haplotypes24);
tests.add(new Object[]{calls, new HashMap<>(haplotypeMap), 2, 3, 1, 2, 1});
// test 2 hets
haplotypeMap.remove(vc3);
tests.add(new Object[]{Arrays.asList(vc2, vc4), new HashMap<>(haplotypeMap), 1, 2, 1, 2, 0});
// test 2 with opposite phase
final Set<Haplotype> haplotypes1 = new HashSet<>();
haplotypes1.add(pos1);
haplotypeMap.put(vc1, haplotypes1);
tests.add(new Object[]{Arrays.asList(vc1, vc2, vc4), new HashMap<>(haplotypeMap), 2, 3, 1, 1, 2});
// test homs around a het
final Set<Haplotype> haplotypes2hom = new HashSet<>();
haplotypes2hom.add(pos24);
haplotypes2hom.add(pos234);
final Set<Haplotype> haplotypes4hom = new HashSet<>();
haplotypes4hom.add(pos24);
haplotypes4hom.add(pos234);
final Set<Haplotype> haplotypes3het = new HashSet<>();
haplotypes3het.add(pos234);
haplotypeMap.put(vc2, haplotypes2hom);
haplotypeMap.put(vc3, haplotypes3het);
haplotypeMap.put(vc4, haplotypes4hom);
tests.add(new Object[]{calls, new HashMap<>(haplotypeMap), 2, 3, 1, 3, 0});
// test hets around a hom
final Set<Haplotype> haplotypes2het = new HashSet<>();
haplotypes2het.add(pos234);
final Set<Haplotype> haplotypes4het = new HashSet<>();
haplotypes4het.add(pos234);
final Set<Haplotype> haplotypes3hom = new HashSet<>();
haplotypes3hom.add(pos3);
haplotypes3hom.add(pos234);
haplotypeMap.put(vc2, haplotypes2het);
haplotypeMap.put(vc3, haplotypes3hom);
haplotypeMap.put(vc4, haplotypes4het);
tests.add(new Object[]{calls, new HashMap<>(haplotypeMap), 2, 3, 1, 3, 0});
// test no phased variants around a hom
final Set<Haplotype> haplotypes2incomplete = new HashSet<>();
haplotypes2incomplete.add(pos24);
final Set<Haplotype> haplotypes3incomplete = new HashSet<>();
haplotypes3incomplete.add(pos34);
final Set<Haplotype> haplotypes4complete = new HashSet<>();
haplotypes4complete.add(pos24);
haplotypes4complete.add(pos34);
haplotypes4complete.add(pos234);
haplotypeMap.put(vc2, haplotypes2incomplete);
haplotypeMap.put(vc3, haplotypes3incomplete);
haplotypeMap.put(vc4, haplotypes4complete);
tests.add(new Object[]{calls, new HashMap<>(haplotypeMap), 0, 0, 0, 0, 0});
return tests.toArray(new Object[][]{});
}
@Test(dataProvider="ConstructPhaseSetMappingProvider")
public void testConstructPhaseSetMapping(final List<VariantContext> calls,
final Map<VariantContext, Set<Haplotype>> haplotypeMap,
final int totalHaplotypes,
final int expectedMapSize,
final int expectedNumGroups,
final int expectedNum01,
final int expectedNum10) {
final Map<VariantContext, Pair<Integer, String>> actualPhaseSetMapping = new HashMap<>();
final int actualNumGroups = HaplotypeCallerGenotypingEngine.constructPhaseSetMapping(calls, haplotypeMap, totalHaplotypes, actualPhaseSetMapping);
Assert.assertEquals(actualNumGroups, expectedNumGroups);
Assert.assertEquals(actualPhaseSetMapping.size(), expectedMapSize);
int num01 = 0, num10 = 0;
for ( final Pair<Integer, String> phase : actualPhaseSetMapping.values() ) {
if ( phase.second.equals("0|1") )
num01++;
else if ( phase.second.equals("1|0") )
num10++;
}
Assert.assertEquals(num01, expectedNum01);
Assert.assertEquals(num10, expectedNum10);
}
@DataProvider(name = "ConstructPhaseGroupsProvider")
public Object[][] makeConstructPhaseGroupsData() {
List<Object[]> tests = new ArrayList<Object[]>();
final Allele ref = Allele.create("A", true);
final Allele altC = Allele.create("C", false);
final Genotype g1 = new GenotypeBuilder().alleles(Arrays.asList(ref, altC)).make();
final VariantContext vc1 = new VariantContextBuilder().chr("20").start(1).stop(1).alleles(Arrays.asList(ref, altC)).genotypes(g1).make();
final Genotype g2 = new GenotypeBuilder().alleles(Arrays.asList(ref, altC)).make();
final VariantContext vc2 = new VariantContextBuilder().chr("20").start(2).stop(2).alleles(Arrays.asList(ref, altC)).genotypes(g2).make();
final Genotype g3 = new GenotypeBuilder().alleles(Arrays.asList(ref, altC)).make();
final VariantContext vc3 = new VariantContextBuilder().chr("20").start(3).stop(3).alleles(Arrays.asList(ref, altC)).genotypes(g3).make();
final List<VariantContext> calls = Arrays.asList(vc1, vc2, vc3);
// test no phased variants, empty map
final Map<VariantContext, Pair<Integer, String>> nonePhased1 = new HashMap<>();
tests.add(new Object[]{calls, nonePhased1, 0, 0, 0});
// test no phased variants, full map, exception expected
final Map<VariantContext, Pair<Integer, String>> nonePhased2 = new HashMap<>();
nonePhased2.put(vc1, new Pair<>(0, "0/1"));
nonePhased2.put(vc2, new Pair<>(1, "0/1"));
nonePhased2.put(vc3, new Pair<>(2, "0/1"));
tests.add(new Object[]{calls, nonePhased2, 3, -1, -1});
// test 2 phased variants
final Map<VariantContext, Pair<Integer, String>> twoPhased = new HashMap<>();
twoPhased.put(vc1, new Pair<>(0, "0/1"));
twoPhased.put(vc2, new Pair<>(0, "0/1"));
tests.add(new Object[]{calls, twoPhased, 1, 1, 2});
// test all phased variants
final Map<VariantContext, Pair<Integer, String>> allPhased = new HashMap<>();
allPhased.put(vc1, new Pair<>(0, "0/1"));
allPhased.put(vc2, new Pair<>(0, "0/1"));
allPhased.put(vc3, new Pair<>(0, "0/1"));
tests.add(new Object[]{calls, allPhased, 1, 1, 3});
return tests.toArray(new Object[][]{});
}
@Test(dataProvider="ConstructPhaseGroupsProvider")
public void testConstructPhaseGroups(final List<VariantContext> calls,
final Map<VariantContext, Pair<Integer, String>> phaseMap,
final int endIndex,
final int expectedNumGroups,
final int expectedGroupSize) {
final List<VariantContext> actualPhasedCalls;
try {
actualPhasedCalls = HaplotypeCallerGenotypingEngine.constructPhaseGroups(calls, phaseMap, endIndex);
} catch (IllegalStateException e) {
Assert.assertEquals(-1, expectedNumGroups);
return;
}
final Set<String> uniqueGroups = new HashSet<>();
int counter = 0;
for ( final VariantContext call : actualPhasedCalls ) {
for ( final Genotype g : call.getGenotypes() ) {
if ( g.hasExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_ID_KEY) ) {
uniqueGroups.add(g.getExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_ID_KEY).toString());
counter++;
}
}
}
Assert.assertEquals(uniqueGroups.size(), expectedNumGroups);
Assert.assertEquals(counter, expectedGroupSize);
}
@Test
public void testExcessAlternativeAllelesKeepRef(){
// prep data
final Allele ref = Allele.create("A", true);
final Allele altC = Allele.create("C", false);
final Allele altG = Allele.create("G", false);
final Allele altT = Allele.create("T", false);
final AlleleList<Allele> indexedAlleleList = new IndexedAlleleList<>(altC, altG, altT, ref);// specifically make the ref allele not at index 0
final IndexedSampleList indexedSampleList = new IndexedSampleList("Dummy");
final List<GATKSAMRecord> reads = new ArrayList<>();
for (int i=0; i<10; ++i) {
reads.add(GATKSAMRecord.createRandomRead(101));
}
final Map<String, List<GATKSAMRecord>> sampleToReads = Collections.singletonMap(indexedSampleList.sampleAt(0), reads);
final ReadLikelihoods<Allele> readLikelihoods = new ReadLikelihoods<>(indexedSampleList, indexedAlleleList, sampleToReads);
final PloidyModel ploidyModel = new HomogeneousPloidyModel(indexedSampleList, 2);
final GenotypingModel genotypingModel = new InfiniteRandomMatingPopulationModel();
final GenotypingLikelihoods<Allele> genotypeLikelihoods = genotypingModel.calculateLikelihoods(readLikelihoods, new GenotypingData<>(ploidyModel, readLikelihoods));
// test
final Set<Allele> excessAltAlleles = HaplotypeCallerGenotypingEngine.excessAlternativeAlleles(genotypeLikelihoods, 2);
Assert.assertFalse(excessAltAlleles.contains(ref));
Assert.assertEquals(excessAltAlleles.size(), 1);
}
@Test
public void testReduceNumberOfAlternativeAllelesBasedOnHaplotypesScores(){
// first have a list of alleles, one ref, several alt
final Allele ref = Allele.create("A", true);
final Allele altC = Allele.create("C", false);
final Allele altT = Allele.create("T", false);
final Allele altT2 = Allele.create("TT", false);
final Allele altG = Allele.create("G", false);
// then create several haplotypes, assign ad-hoc scores
final Haplotype hapRef = new Haplotype("AAAAA".getBytes());
hapRef.setScore(Double.MAX_VALUE);
// test case when both same best score and second best score are the same
final Haplotype hapT = new Haplotype("TAAAA".getBytes());
hapT.setScore(-2.0);
final Haplotype hapTAnother = new Haplotype("TAAAT".getBytes());
hapTAnother.setScore(-3.0);
final Haplotype hapT2 = new Haplotype("TTAAA".getBytes());
hapT2.setScore(-2.0);
final Haplotype hapT2Another = new Haplotype("TTAAT".getBytes());
hapT2Another.setScore(-3.0);
final Haplotype hapC = new Haplotype("CAAAA".getBytes());
hapC.setScore(-3.0);
// for case when there's tie in highest haplotype score
final Haplotype hapG = new Haplotype("GAAAA".getBytes());
hapG.setScore(-3.0);
final Haplotype hapGAnother = new Haplotype("GAAAG".getBytes());
hapGAnother.setScore(-5.0);
final Map<Allele, List<Haplotype>> alleleMapper = new LinkedHashMap<>();
alleleMapper.put(ref, Arrays.asList(hapRef));
alleleMapper.put(altC, Arrays.asList(hapC));
alleleMapper.put(altT, Arrays.asList(hapT, hapTAnother));
alleleMapper.put(altT2, Arrays.asList(hapT2, hapT2Another));
alleleMapper.put(altG, Arrays.asList(hapG, hapGAnother));
List<Allele> allelesToKeep = HaplotypeCallerGenotypingEngine.whichAllelesToKeepBasedonHapScores(alleleMapper, 5);
Assert.assertEquals(allelesToKeep.size(), 5);
Iterator<Allele> it = allelesToKeep.iterator();
Assert.assertEquals(it.next(), ref);
Assert.assertEquals(it.next(), altC);
Assert.assertEquals(it.next(), altT);
Assert.assertEquals(it.next(), altT2);
Assert.assertEquals(it.next(), altG);
allelesToKeep = HaplotypeCallerGenotypingEngine.whichAllelesToKeepBasedonHapScores(alleleMapper, 4);
Assert.assertEquals(allelesToKeep.size(), 4);
it = allelesToKeep.iterator();
Assert.assertEquals(it.next(), ref);
Assert.assertEquals(it.next(), altT);
Assert.assertEquals(it.next(), altT2);
Assert.assertEquals(it.next(), altG);
allelesToKeep = HaplotypeCallerGenotypingEngine.whichAllelesToKeepBasedonHapScores(alleleMapper, 3);
Assert.assertEquals(allelesToKeep.size(), 3);
it = allelesToKeep.iterator();
Assert.assertEquals(it.next(), ref);
Assert.assertEquals(it.next(), altT);
Assert.assertEquals(it.next(), altT2);
allelesToKeep = HaplotypeCallerGenotypingEngine.whichAllelesToKeepBasedonHapScores(alleleMapper, 2);
Assert.assertEquals(allelesToKeep.size(), 2);
it = allelesToKeep.iterator();
Assert.assertEquals(it.next(), ref);
Assert.assertEquals(it.next(), altT);
allelesToKeep = HaplotypeCallerGenotypingEngine.whichAllelesToKeepBasedonHapScores(alleleMapper, 1);
Assert.assertEquals(allelesToKeep.size(), 1);
it = allelesToKeep.iterator();
Assert.assertEquals(it.next(), ref);
}
@Test
public void testRemoveExcessiveAltAlleleFromVC(){
final VariantContext originalVC = new VariantContextBuilder("source", "1", 1000000, 1000000, Arrays.asList(Allele.create("A", true), Allele.create("T", false), Allele.create("C", false), Allele.create("G", false))).make();
final VariantContext reducedVC = HaplotypeCallerGenotypingEngine.removeExcessAltAllelesFromVC(originalVC, Arrays.asList(Allele.create("A", true), Allele.create("T", false), Allele.create("C", false)));
Assert.assertEquals(reducedVC.getNAlleles(), 3);
Assert.assertTrue(reducedVC.getAlleles().containsAll(Arrays.asList(Allele.create("A", true), Allele.create("T", false), Allele.create("C", false))));
}
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariantsIntegrationTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantutils;
import org.broadinstitute.gatk.engine.walkers.WalkerTest;
import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.util.Arrays;
public class SelectVariantsIntegrationTest extends WalkerTest {
public static String baseTestString(String args) {
return "-T SelectVariants -R " + b36KGReference + " -L 1 -o %s --no_cmdline_in_header" + args;
}
private static final String SAMPLE_EXCLUSION_MD5 = "df3b373d2ae302d075c07332a4b9438c";
private static final String INVERT_SELECTION_MD5 = "7f6288a198e618ad540fa9a8c7c1a031";
private static final String MAX_FILTERED_GT_SELECTION_MD5 = "5804066a1af1639d9c8bc4744928d80a";
private static final String MIN_FILTERED_GT_SELECTION_MD5 = "9b8003cb3d6457be2d0cc5b9b4f5ffe8";
private static final String NO_CALL_FILTERING_KEEP_ONE = "6e2401190c5ada6a3bed2640c068f43b";
private static final String NO_CALL_FILTERING_KEEP_TWO = "6bced1ab6a3d58f1fd905b7f601987a3";
@Test
public void testDiscordanceNoSampleSpecified() {
String testFile = privateTestDir + "NA12878.hg19.example1.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " -L 20:1012700-1020000 --variant "
+ b37hapmapGenotypes + " -disc " + testFile
+ " -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",
1,
Arrays.asList("9e08f761d2ba9a2bae9c279701aabc70")
);
spec.disableShadowBCF();
executeTest("testDiscordanceNoSampleSpecified--" + testFile, spec);
}
@Test
public void testExcludeIntervalsPadding(){
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " -L 1:1715011-1734970 -XL 1:1725305 -ip 200 --variant "
+ b37hapmapGenotypes + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("2e31c0be0d639d7110e639a11c03f4ca")
);
executeTest("testExcludeIntervalsPadding--", spec);
}
@Test
public void testRepeatedLineSelection() {
String testfile = privateTestDir + "test.dup.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" -sn A -sn B -sn C --variant " + testfile),
1,
Arrays.asList("496e17163d2608b86661518e333eadc4")
);
executeTest("testRepeatedLineSelection--" + testfile, spec);
}
@Test
public void testDiscordance() {
String testFile = privateTestDir + "NA12878.hg19.example1.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " -sn NA12878 -L 20:1012700-1020000 --variant "
+ b37hapmapGenotypes + " -disc " + testFile
+ " -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",
1,
Arrays.asList("c9aa80cabf036a268a032a61d398cdd5")
);
spec.disableShadowBCF();
executeTest("testDiscordance--" + testFile, spec);
}
@Test
public void testComplexSelection() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
String samplesFile = validationDataLocation + "SelectVariants.samples.txt";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" -sn A -se '[CDH]' -sf " + samplesFile + " -env -ef -select 'DP < 250' --variant " + testfile),
1,
Arrays.asList("46451eaf6b7d02a5462c0a2463db2402")
);
spec.disableShadowBCF();
executeTest("testComplexSelection--" + testfile, spec);
}
@Test
public void testComplexSelectionWithNonExistingSamples() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
String samplesFile = validationDataLocation + "SelectVariants.samples.txt";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" --ALLOW_NONOVERLAPPING_COMMAND_LINE_SAMPLES -sn A -se '[CDH]' -sn Z -sn T -sf " + samplesFile + " -env -ef -select 'DP < 250' --variant " + testfile),
1,
Arrays.asList("46451eaf6b7d02a5462c0a2463db2402")
);
spec.disableShadowBCF();
executeTest("testComplexSelectionWithNonExistingSamples--" + testfile, spec);
}
@Test
public void testNonExistingFieldSelection() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" -env -ef -select 'foo!=0||DP>0' --variant " + testfile),
1,
Arrays.asList("c98b30546c994aecd05c91dfbd64e665") // should yield empty vcf because the foo!=0 will yield complete expression false
);
spec.disableShadowBCF();
executeTest("testNonExistingSelection--" + testfile, spec);
}
/**
* Test excluding samples from file and sample name
*/
@Test
public void testSampleExclusionFromFileAndSeparateSample() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
String samplesFile = validationDataLocation + "SelectVariants.samples.txt";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -L 1:1-1000000 -o %s --no_cmdline_in_header -xl_sn A -xl_sf " + samplesFile + " --variant " + testfile,
1,
Arrays.asList("6837e14be8c53d4b065e9b087b1ea851")
);
spec.disableShadowBCF();
executeTest("testSampleExclusionFromFileAndSeparateSample--" + testfile, spec);
}
/**
* Test excluding samples from file
*/
@Test
public void testSampleExclusionJustFromFile() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
String samplesFile = validationDataLocation + "SelectVariants.samples.txt";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -L 1:1-1000000 -o %s --no_cmdline_in_header -xl_sf " + samplesFile + " --variant " + testfile,
1,
Arrays.asList("5e7114974aff723c7a04cde5c2e3f90c")
);
spec.disableShadowBCF();
executeTest("testSampleExclusionJustFromFile--" + testfile, spec);
}
/**
* Test excluding samples from expression
*/
@Test
public void testSampleExclusionJustFromExpression() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -L 1:1-1000000 -o %s --no_cmdline_in_header -xl_se '[CDH]' --variant " + testfile,
1,
Arrays.asList(SAMPLE_EXCLUSION_MD5)
);
spec.disableShadowBCF();
executeTest("testSampleExclusionJustFromExpression--" + testfile, spec);
}
/**
* Test excluding samples from negation expression
*/
@Test
public void testSampleExclusionJustFromNegationExpression() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -L 1:1-1000000 -o %s --no_cmdline_in_header -se '[^CDH]' --variant " + testfile,
1,
Arrays.asList(SAMPLE_EXCLUSION_MD5)
);
spec.disableShadowBCF();
executeTest("testSampleExclusionJustFromRegexExpression--" + testfile, spec);
}
/**
* Test including samples that are not in the VCF
*/
@Test
public void testSampleInclusionWithNonexistingSamples() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
String samplesFile = validationDataLocation + "SelectVariants.samples.txt";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -L 1:1-1000000 -o %s --no_cmdline_in_header -sn A -sn Z -sn Q -sf " + samplesFile + " --variant " + testfile,
1,
UserException.BadInput.class
);
spec.disableShadowBCF();
executeTest("testSampleInclusionWithNonexistingSamples--" + testfile, spec);
}
@Test
public void testConcordance() {
String testFile = privateTestDir + "NA12878.hg19.example1.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " -sn NA12878 -L 20:1012700-1020000 -conc "
+ b37hapmapGenotypes + " --variant " + testFile
+ " -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",
1,
Arrays.asList("24114c01b81fc0052ee36523ccd1d338")
);
spec.disableShadowBCF();
executeTest("testConcordance--" + testFile, spec);
}
/**
* Test including variant types.
*/
@Test
public void testVariantTypeSelection() {
String testFile = privateTestDir + "complexExample1.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -restrictAllelesTo MULTIALLELIC -selectType MIXED --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("2837de9b1fdde19a4692e7c648a26423")
);
executeTest("testVariantTypeSelection--" + testFile, spec);
}
/**
* Test excluding indels that are larger than the specified size
*/
@Test
public void testMaxIndelLengthSelection() {
String testFile = privateTestDir + "complexExample1.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -selectType INDEL --variant " + testFile + " -o %s --no_cmdline_in_header --maxIndelSize 2",
1,
Arrays.asList("2837de9b1fdde19a4692e7c648a26423")
);
executeTest("testMaxIndelLengthSelection--" + testFile, spec);
}
/**
* Test excluding indels that are smaller than the specified size
*/
@Test
public void testMinIndelLengthSelection() {
String testFile = privateTestDir + "complexExample1.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -selectType INDEL --variant " + testFile + " -o %s --no_cmdline_in_header --minIndelSize 2",
1,
Arrays.asList("0425c469e9f83aa33bc6d77586f97046")
);
executeTest("testMinIndelLengthSelection--" + testFile, spec);
}
@Test
public void testUsingDbsnpName() {
String testFile = privateTestDir + "combine.3.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -sn NA12892 --variant:dbsnp " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("384d267b4cbf22b380b5c78c2fd1ceb8")
);
executeTest("testUsingDbsnpName--" + testFile, spec);
}
@Test
public void testRemoveMLE() {
String testFile = privateTestDir + "vcfexample.withMLE.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -sn NA12892 --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("384d267b4cbf22b380b5c78c2fd1ceb8")
);
executeTest("testRemoveMLE--" + testFile, spec);
}
@Test
public void testKeepOriginalAC() {
String testFile = privateTestDir + "vcfexample.loseAlleleInSelection.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --keepOriginalAC -R " + b36KGReference + " -sn NA12892 --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("e37dbd8dd97ec6d7763ddefd4e0fb3f6")
);
executeTest("testKeepOriginalAC--" + testFile, spec);
}
@Test
public void testKeepOriginalACAndENV() {
String testFile = privateTestDir + "vcfexample.loseAlleleInSelection.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --keepOriginalAC -env -trimAlternates -R " + b36KGReference + " -sn NA12892 --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("02a57132d7a7a1d1e8a969c233b4881e")
);
executeTest("testKeepOriginalACAndENV--" + testFile, spec);
}
@Test
public void testKeepOriginalDP() {
String testFile = privateTestDir + "CEUtrioTest.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --keepOriginalDP -R " + b37KGReference + " -sn NA12892 --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("1dd3944a62db62fc163c3518fa828daf")
);
executeTest("testKeepOriginalDP--" + testFile, spec);
}
@Test
public void testMultipleRecordsAtOnePosition() {
String testFile = privateTestDir + "selectVariants.onePosition.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -select 'KG_FREQ < 0.5' --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("1b468e69d8df060c4dba006f00b8ea33")
);
executeTest("testMultipleRecordsAtOnePosition--" + testFile, spec);
}
@Test
public void testNoGTs() {
String testFile = privateTestDir + "vcf4.1.example.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("c21f7f6b5bd22a321de21539bc34c0aa")
);
executeTest("testNoGTs--" + testFile, spec);
}
@Test
public void testSelectFromMultiAllelic() {
String testfile = privateTestDir + "multi-allelic.bi-allelicInGIH.vcf";
String samplesFile = privateTestDir + "GIH.samples.list";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " -o %s --no_cmdline_in_header -sf " + samplesFile + " --excludeNonVariants -trimAlternates --variant " + testfile,
1,
Arrays.asList("496a6a3ea6097f62c6ac4a6e8503ed5d")
);
executeTest("test select from multi allelic with excludeNonVariants --" + testfile, spec);
}
@Test
public void testMultiAllelicAnnotationOrdering() {
String testfile = privateTestDir + "multi-allelic-ordering.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " -o %s --no_cmdline_in_header " +
"-sn SAMPLE-CC -sn SAMPLE-CT -sn SAMPLE-CA --excludeNonVariants --variant " + testfile,
1,
Arrays.asList("cd2e8a223140fa88a7bc049a990d571b")
);
executeTest("test multi allelic annotation ordering --" + testfile, spec);
}
@Test()
public void testFileWithoutInfoLineInHeader() {
testFileWithoutInfoLineInHeader("testFileWithoutInfoLineInHeader", IllegalStateException.class);
}
@Test()
public void testFileWithoutInfoLineInHeaderWithOverride() {
testFileWithoutInfoLineInHeader("testFileWithoutInfoLineInHeaderWithOverride", null);
}
private void testFileWithoutInfoLineInHeader(final String name, final Class expectedException) {
final String testFile = privateTestDir + "missingHeaderLine.vcf";
final String cmd = "-T SelectVariants -R " + b36KGReference + " -sn NA12892 --variant:dbsnp "
+ testFile + " -o %s --no_cmdline_in_header"
+ (expectedException == null ? " -U LENIENT_VCF_PROCESSING" : "");
WalkerTestSpec spec =
expectedException != null
? new WalkerTestSpec(cmd, 1, expectedException)
: new WalkerTestSpec(cmd, 1, Arrays.asList(""));
spec.disableShadowBCF();
executeTest(name, spec);
}
@Test
public void testInvalidJexl() {
// NOTE: JexlEngine singleton construction in VariantContextUtils sets silent to false.
// However VariantFiltration.initialize() sets setSilent(true) on the shared instance.
// Just in case this test runs after a VariantFiltration in the same VM, always set silent back to false.
htsjdk.variant.variantcontext.VariantContextUtils.engine.get().setSilent(false);
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants" +
" -R "+b37KGReference +
" -V "+privateTestDir+"ac0.vcf" +
" -select 'vc.getGenotype(\"FAKE_SAMPLE\").isHomRef()'" +
" -o %s",
1,
UserException.class);
executeTest("InvalidJexl", spec);
}
@Test
public void testAlleleTrimming() {
final String testFile = privateTestDir + "forHardLeftAlignVariantsTest.vcf";
final String cmd = "-T SelectVariants -R " + b37KGReference + " -sn NA12878 -env -trimAlternates "
+ "-V " + testFile + " -o %s --no_cmdline_in_header";
WalkerTestSpec spec = new WalkerTestSpec(cmd, 1, Arrays.asList("c4157e8f5b11bade08a67791d4bb7e40"));
executeTest("testAlleleTrimming", spec);
}
@DataProvider(name="unusedAlleleTrimmingProvider")
public Object[][] unusedAlleleTrimmingProvider() {
return new Object[][] {
{ privateTestDir+"forHardLeftAlignVariantsTest.vcf", "-trimAlternates", "c4157e8f5b11bade08a67791d4bb7e40"},
{ privateTestDir+"forHardLeftAlignVariantsTest.vcf", "", "a835454cbd132f2d56defb55ba13b2dd"},
{ privateTestDir+"multi-allelic-ordering.vcf", "-sn SAMPLE-CC -sn SAMPLE-CT", "b19e508640a89f176f7ea347babfcc66"},
{ privateTestDir+"multi-allelic-ordering.vcf", "-sn SAMPLE-CC -sn SAMPLE-CT -env", "15d982a280754804fa384ccc0f3a2ccf"},
{ privateTestDir+"multi-allelic-ordering.vcf", "-sn SAMPLE-CC -sn SAMPLE-CT -trimAlternates", "41ffddc776a2af55db297dbefc6d2097"},
{ privateTestDir+"multi-allelic-ordering.vcf", "-sn SAMPLE-CC -sn SAMPLE-CT -env -trimAlternates", "a9f448502a27e777b3112cf98e1d325f"}
};
}
@Test(dataProvider = "unusedAlleleTrimmingProvider")
public void testUnusedAlleleTrimming(final String vcf, final String extraArgs, final String md5) {
final WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants" +
" -R "+b37KGReference +
" -V "+vcf +
" -o %s --no_cmdline_in_header" +
" "+extraArgs,
1,
Arrays.asList(md5)
);
executeTest(String.format("testUnusedAlleleTrimming: (%s,%s)", new File(vcf).getName(), extraArgs), spec);
}
/**
* Test with an empty VCF file
*/
@Test
public void testEmptyVcfException(){
String testfile = privateTestDir + "reallyEmpty.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants" +
" -R " + b36KGReference +
" -V " + testfile +
" -o %s --no_cmdline_in_header",
1,
UserException.CommandLineException.class
);
spec.disableShadowBCF();
executeTest("testEmptyVcfException--" + testfile, spec);
}
/**
* Test with a VCF file that is not a file
*/
@Test
public void testNotFileVcfException(){
String testfile = privateTestDir;
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants" +
" -R " + b36KGReference +
" -V " + testfile +
" -o %s --no_cmdline_in_header",
1,
UserException.CouldNotReadInputFile.class
);
spec.disableShadowBCF();
executeTest("testNotFileVcfException--" + testfile, spec);
}
/**
* Test with a VCF file that does not exist
*/
@Test
public void testMissingVcfException(){
String testfile = "test.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants" +
" -R " + b36KGReference +
" -V " + testfile +
" -o %s --no_cmdline_in_header",
1,
UserException.CouldNotReadInputFile.class
);
spec.disableShadowBCF();
executeTest("testMissingVcfException--" + testfile, spec);
}
/**
* Test inverting the variant selection criteria by the -invertSelect argument
*/
@Test
public void testInvertSelection() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
String samplesFile = validationDataLocation + "SelectVariants.samples.txt";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" -sn A -se '[CDH]' -sf " + samplesFile + " -env -ef -select 'DP < 20000' -invertSelect --variant " + testfile),
1,
Arrays.asList(INVERT_SELECTION_MD5)
);
spec.disableShadowBCF();
executeTest("testInvertSelection--" + testfile, spec);
}
/**
* Test inverting the variant selection criteria by inverting the JEXL expression logic following -select
*/
@Test
public void testInvertJexlSelection() {
String testfile = validationDataLocation + "test.filtered.maf_annotated.vcf";
String samplesFile = validationDataLocation + "SelectVariants.samples.txt";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" -sn A -se '[CDH]' -sf " + samplesFile + " -env -ef -select 'DP >= 20000'--variant " + testfile),
1,
Arrays.asList(INVERT_SELECTION_MD5)
);
spec.disableShadowBCF();
executeTest("testInvertJexlSelection--" + testfile, spec);
}
/**
* Test selecting variants with IDs
*/
@Test
public void testKeepSelectionID() {
String testFile = privateTestDir + "complexExample1.vcf";
String idFile = privateTestDir + "complexExample1.vcf.id";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" -IDs " + idFile + " --variant " + testFile),
1,
Arrays.asList("29bc6716310aea154431716b8bc101c2")
);
spec.disableShadowBCF();
executeTest("testKeepSelectionID--" + testFile, spec);
}
/**
* Test excluding variants with IDs
* Also tests --forceValidOutput flag, which changes the GQ from floats to ints to match
* header spec.
*/
@Test
public void testExcludeSelectionID() {
String testFile = privateTestDir + "complexExample1.vcf";
String idFile = privateTestDir + "complexExample1.vcf.id";
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString(" -xlIDs " + idFile + " --variant " + testFile + " --forceValidOutput"),
1,
Arrays.asList("7d13000098708491fc27a16ae0034cb5")
);
spec.disableShadowBCF();
executeTest("testExcludeSelectionID--" + testFile, spec);
}
/**
* Test excluding variant types
*/
@Test
public void testExcludeSelectionType() {
String testFile = privateTestDir + "complexExample1.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b36KGReference + " -xlSelectType SNP --variant " + testFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("0425c469e9f83aa33bc6d77586f97046")
);
executeTest("testExcludeSelectionType--" + testFile, spec);
}
@Test
public void testMendelianViolationSelection() {
String testFile = privateTestDir + "CEUtrioTest.vcf";
String pedFile = privateTestDir + "CEUtrio.ped";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R "+b37KGReference + " -mv -mvq 0 --variant " + testFile + " -ped " + pedFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("f5b2178bf59f20911a809a50f92c8c35"));
executeTest("testMendelianViolationSelection--" + testFile, spec);
}
@Test
public void testInvertMendelianViolationSelection() {
String testFile = privateTestDir + "CEUtrioTest.vcf";
String pedFile = privateTestDir + "CEUtrio.ped";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R "+b37KGReference + " -mv -mvq 0 -invMv --variant " + testFile + " -ped " + pedFile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("01ee9eb113e8d6b5961b4eb8f1ca5d1e"));
executeTest("testInvertMendelianViolationSelection--" + testFile, spec);
}
@Test
public void testMaxFilteredGenotypesSelection() {
String testfile = privateTestDir + "filteredSamples.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --maxFilteredGenotypes 1 -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList(MAX_FILTERED_GT_SELECTION_MD5)
);
spec.disableShadowBCF();
executeTest("testMaxFilteredGenotypesSelection--" + testfile, spec);
}
@Test
public void testMinFilteredGenotypesSelection() {
String testfile = privateTestDir + "filteredSamples.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --minFilteredGenotypes 2 -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList(MIN_FILTERED_GT_SELECTION_MD5)
);
spec.disableShadowBCF();
executeTest("testMinFilteredGenotypesSelection--" + testfile, spec);
}
@Test
public void testMaxFractionFilteredGenotypesSelection() {
String testfile = privateTestDir + "filteredSamples.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --maxFractionFilteredGenotypes 0.4 -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList(MAX_FILTERED_GT_SELECTION_MD5)
);
spec.disableShadowBCF();
executeTest("testMaxFractionFilteredGenotypesSelection--" + testfile, spec);
}
@Test
public void testMinFractionFilteredGenotypesSelection() {
String testfile = privateTestDir + "filteredSamples.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --minFractionFilteredGenotypes 0.6 -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList(MIN_FILTERED_GT_SELECTION_MD5)
);
spec.disableShadowBCF();
executeTest("testMinFractionFilteredGenotypesSelection--" + testfile, spec);
}
@Test
public void testSetFilteredGtoNocall() {
String testfile = privateTestDir + "filteredSamples.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --setFilteredGtToNocall -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("fb3030518f7c4120989d37b2cca9abe6")
);
spec.disableShadowBCF();
executeTest("testSetFilteredGtoNocall--" + testfile, spec);
}
@Test
public void testSetFilteredGtoNocallUpdateInfo() {
String testfile = privateTestDir + "selectVariantsInfoField.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants --setFilteredGtToNocall --removeUnusedAlternates --excludeNonVariants -R " + b37KGReference + " --variant " +
testfile + " -o %s --no_cmdline_in_header",
1,
Arrays.asList("d43ff4701e3f42095059867e1a18857e"));
executeTest("testSetFilteredGtoNocallUpdateInfo", spec);
}
@Test
public void testSACSimpleDiploid() {
String testfile = privateTestDir + "261_S01_raw_variants_gvcf.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header -trimAlternates",
1,
Arrays.asList("beaa34a786d96796925093486558b103"));
spec.disableShadowBCF();
executeTest("testSACSimpleDiploid", spec);
}
@Test
public void testSACDiploid() {
String testfile = privateTestDir + "diploid-multisample-sac.g.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header -sn NA12891 -trimAlternates",
1,
Arrays.asList("f068e449cf3c142c8c5758c5eab38780"));
spec.disableShadowBCF();
executeTest("testSACDiploid", spec);
}
@Test
public void testSACNonDiploid() {
String testfile = privateTestDir + "tetraploid-multisample-sac.g.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header -sn NA12891 -trimAlternates",
1,
Arrays.asList("ade30e246b807e45cf6c54db96fc8627"));
spec.disableShadowBCF();
executeTest("testSACNonDiploid", spec);
}
@Test
public void testMaxNoCall1() {
final String testfile = privateTestDir + "vcfexample.forNoCallFiltering.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " --variant " + testfile + " -o %s --no_cmdline_in_header --maxNOCALLnumber 1",
1,
Arrays.asList(NO_CALL_FILTERING_KEEP_ONE));
spec.disableShadowBCF();
executeTest("testMaxNoCall1", spec);
}
@Test
public void testMaxNoCall0_25() {
final String testfile = privateTestDir + "vcfexample.forNoCallFiltering.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " --variant " + testfile + " -o %s --no_cmdline_in_header --maxNOCALLfraction 0.25",
1,
Arrays.asList(NO_CALL_FILTERING_KEEP_ONE));
spec.disableShadowBCF();
executeTest("testMaxNoCall0_25", spec);
}
@Test
public void testMaxNoCall2() {
final String testfile = privateTestDir + "vcfexample.forNoCallFiltering.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " --variant " + testfile + " -o %s --no_cmdline_in_header --maxNOCALLnumber 2",
1,
Arrays.asList(NO_CALL_FILTERING_KEEP_TWO));
spec.disableShadowBCF();
executeTest("testMaxNoCall2", spec);
}
@Test
public void testMaxNoCall0_5() {
final String testfile = privateTestDir + "vcfexample.forNoCallFiltering.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + hg19Reference + " --variant " + testfile + " -o %s --no_cmdline_in_header --maxNOCALLfraction 0.5",
1,
Arrays.asList(NO_CALL_FILTERING_KEEP_TWO));
spec.disableShadowBCF();
executeTest("testMaxNoCall0_5", spec);
}
@Test
public void testHaploid() {
final String testfile = privateTestDir + "haploid-multisample.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header -sn HG00610 -select 'DP > 7'",
1,
Arrays.asList("cdb7ca5a57a4afd49ad4513aa8487873"));
spec.disableShadowBCF();
executeTest("testHaploid", spec);
}
@Test
public void testTetraploid() {
final String testfile = privateTestDir + "tetraploid-multisample.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header -sn NA18486 -select 'DP > 19'",
1,
Arrays.asList("36f7508b0e1ebf02977235f901c1025e"));
spec.disableShadowBCF();
executeTest("testTetraploid", spec);
}
@Test
public void testTetraDiploid() {
final String testfile = privateTestDir + "tetra-diploid.vcf";
WalkerTestSpec spec = new WalkerTestSpec(
"-T SelectVariants -R " + b37KGReference + " --variant " + testfile + " -o %s --no_cmdline_in_header -sn NA12878 -select 'DP > 48' -trimAlternates",
1,
Arrays.asList("51c002569e91726008feb316032b55c4"));
spec.disableShadowBCF();
executeTest("testTetraDiploid", spec);
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/ErrorModel.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import com.google.java.contract.Requires;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.tools.walkers.indels.PairHMMIndelErrorModel;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.QualityUtils;
import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.gatk.utils.pileup.PileupElement;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
/**
* Created by IntelliJ IDEA.
* User: carneiro
* Date: 7/21/11
* Time: 2:21 PM
*
* This is a site based implementation of an Error Model. The error model is a probability
* distribution for the site given the phred scaled quality.
*/
public class ErrorModel {
private byte maxQualityScore;
private byte minQualityScore;
private byte phredScaledPrior;
private double log10minPower;
private int refDepth;
private boolean hasData = false;
private ProbabilityVector probabilityVector;
private static final boolean compressRange = false;
private static final double log10MinusE = Math.log10(Math.exp(1.0));
private static final boolean DEBUG = false;
/**
* Calculates the probability of the data (reference sample reads) given the phred scaled site quality score.
*
* @param UAC Argument Collection
* @param refSamplePileup Reference sample pileup
* @param refSampleVC VC with True alleles in reference sample pileup
*/
public ErrorModel (final UnifiedArgumentCollection UAC,
final ReadBackedPileup refSamplePileup,
VariantContext refSampleVC, final ReferenceContext refContext) {
this.maxQualityScore = UAC.maxQualityScore;
this.minQualityScore = UAC.minQualityScore;
this.phredScaledPrior = UAC.phredScaledPrior;
log10minPower = Math.log10(UAC.minPower);
PairHMMIndelErrorModel pairModel = null;
LinkedHashMap<Allele, Haplotype> haplotypeMap = null;
double[][] perReadLikelihoods = null;
double[] model = new double[maxQualityScore+1];
Arrays.fill(model,Double.NEGATIVE_INFINITY);
boolean hasCalledAlleles = false;
final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap();
if (refSampleVC != null) {
for (Allele allele : refSampleVC.getAlleles()) {
if (allele.isCalled()) {
hasCalledAlleles = true;
break;
}
}
haplotypeMap = new LinkedHashMap<Allele, Haplotype>();
if (refSampleVC.isIndel()) {
pairModel = new PairHMMIndelErrorModel(UAC.INDEL_GAP_OPEN_PENALTY, UAC.INDEL_GAP_CONTINUATION_PENALTY,
UAC.OUTPUT_DEBUG_INDEL_INFO, UAC.pairHMM);
IndelGenotypeLikelihoodsCalculationModel.getHaplotypeMapFromAlleles(refSampleVC.getAlleles(), refContext, refContext.getLocus(), haplotypeMap); // will update haplotypeMap adding elements
}
}
double p = QualityUtils.qualToErrorProbLog10((byte)(maxQualityScore-minQualityScore));
if (refSamplePileup == null || refSampleVC == null || !hasCalledAlleles) {
for (byte q=minQualityScore; q<=maxQualityScore; q++) {
// maximum uncertainty if there's no ref data at site
model[q] = p;
}
this.refDepth = 0;
}
else {
hasData = true;
int matches = 0;
int coverage = 0;
Allele refAllele = refSampleVC.getReference();
if ( refSampleVC.isIndel()) {
//perReadLikelihoods = new double[readCounts.length][refSampleVC.getAlleles().size()];
final int eventLength = IndelGenotypeLikelihoodsCalculationModel.getEventLength(refSampleVC.getAlleles());
if (!haplotypeMap.isEmpty())
perReadLikelihoods = pairModel.computeGeneralReadHaplotypeLikelihoods(refSamplePileup,haplotypeMap,refContext, eventLength, perReadAlleleLikelihoodMap);
}
int idx = 0;
for (PileupElement refPileupElement : refSamplePileup) {
if (DEBUG)
System.out.println(refPileupElement.toString());
boolean isMatch = false;
for (Allele allele : refSampleVC.getAlleles()) {
boolean m = pileupElementMatches(refPileupElement, allele, refAllele, refContext.getBase());
if (DEBUG) System.out.println(m);
isMatch |= m;
}
if (refSampleVC.isIndel() && !haplotypeMap.isEmpty()) {
// ignore match/mismatch if reads, as determined by their likelihood, are not informative
double[] perAlleleLikelihoods = perReadLikelihoods[idx++];
if (!isInformativeElement(perAlleleLikelihoods))
matches++;
else
matches += (isMatch?1:0);
} else {
matches += (isMatch?1:0);
}
coverage++;
}
int mismatches = coverage - matches;
//System.out.format("Cov:%d match:%d mismatch:%d\n",coverage, matches, mismatches);
for (byte q=minQualityScore; q<=maxQualityScore; q++) {
if (coverage==0)
model[q] = p;
else
model[q] = log10PoissonProbabilitySiteGivenQual(q,coverage, mismatches);
}
this.refDepth = coverage;
}
// compress probability vector
this.probabilityVector = new ProbabilityVector(model, compressRange);
}
@Requires("likelihoods.length>0")
private boolean isInformativeElement(double[] likelihoods) {
// if likelihoods are the same, they're not informative
final double thresh = 0.1;
int maxIdx = MathUtils.maxElementIndex(likelihoods);
int minIdx = MathUtils.minElementIndex(likelihoods);
if (likelihoods[maxIdx]-likelihoods[minIdx]< thresh)
return false;
else
return true;
}
/**
* Simple constructor that just takes a given log-probability vector as error model.
* Only intended for unit testing, not general usage.
* @param pvector Given vector of log-probabilities
*
*/
public ErrorModel(double[] pvector) {
this.maxQualityScore = (byte)(pvector.length-1);
this.minQualityScore = 0;
this.probabilityVector = new ProbabilityVector(pvector, compressRange);
this.hasData = true;
}
public static boolean pileupElementMatches(PileupElement pileupElement, Allele allele, Allele refAllele, byte refBase) {
if (DEBUG)
System.out.format("PE: base:%s isNextToDel:%b isNextToIns:%b eventBases:%s eventLength:%d Allele:%s RefAllele:%s\n",
pileupElement.getBase(), pileupElement.isBeforeDeletionStart(),
pileupElement.isBeforeInsertion(),pileupElement.getBasesOfImmediatelyFollowingInsertion(),pileupElement.getLengthOfImmediatelyFollowingIndel(), allele.toString(), refAllele.toString());
//pileupElement.
// if test allele is ref, any base mismatch, or any insertion/deletion at start of pileup count as mismatch
if (allele.isReference()) {
// for a ref allele, any base mismatch or new indel is a mismatch.
if(allele.getBases().length>0)
// todo - can't check vs. allele because allele is not padded so it doesn't include the reference base at this location
// could clean up/simplify this when unpadding is removed
return (pileupElement.getBase() == refBase && !pileupElement.isBeforeInsertion() && !pileupElement.isBeforeDeletionStart());
else
// either null allele to compare, or ref/alt lengths are different (indel by definition).
// if we have an indel that we are comparing against a REF allele, any indel presence (of any length/content) is a mismatch
return (!pileupElement.isBeforeInsertion() && !pileupElement.isBeforeDeletionStart());
}
// for non-ref alleles to compare:
if (refAllele.getBases().length == allele.getBases().length)
// alleles have the same length (eg snp or mnp)
return pileupElement.getBase() == allele.getBases()[0];
// for non-ref alleles,
byte[] alleleBases = allele.getBases();
int eventLength = alleleBases.length - refAllele.getBases().length;
if (eventLength < 0 && pileupElement.isBeforeDeletionStart() && pileupElement.getLengthOfImmediatelyFollowingIndel() == -eventLength)
return true;
if (eventLength > 0 && pileupElement.isBeforeInsertion() &&
Arrays.equals(pileupElement.getBasesOfImmediatelyFollowingInsertion().getBytes(),Arrays.copyOfRange(alleleBases,1,alleleBases.length))) // allele contains ref byte, but pileupElement's event bases doesn't
return true;
return false;
}
/**
* What's the log-likelihood that a site's quality is equal to q? If we see N observations and n mismatches,
* and assuming each match is independent of each other and that the match probability is just dependent of
* the site quality, so p = 10.^-q/10.
* Since we'll normally have relatively high Q sites and deep coverage in reference samples (ie p small, N high),
* to avoid underflows we'll use the Poisson approximation with lambda = N*p.
* Hence, the log-likelihood of q i.e. Pr(Nmismatches = n | SiteQ = q) ~ Poisson(n | lambda = p*N) with p as above.
* @param q Desired q to get likelihood from
* @param coverage Total coverage
* @param mismatches Number of mismatches
* @return Likelihood of observations as a function of q
*/
@Requires({
"q >= minQualityScore",
"q <= maxQualityScore",
"coverage >= 0",
"mismatches >= 0",
"mismatches <= coverage"
})
private double log10PoissonProbabilitySiteGivenQual(byte q, int coverage, int mismatches) {
// same as log10ProbabilitySiteGivenQual but with Poisson approximation to avoid numerical underflows
double lambda = QualityUtils.qualToErrorProb(q) * (double )coverage;
// log10(e^-lambda*lambda^k/k!) = -lambda + k*log10(lambda) - log10factorial(k)
return Math.log10(lambda)*mismatches - lambda*log10MinusE- MathUtils.log10Factorial(mismatches);
}
@Requires({"qual-minQualityScore <= maxQualityScore"})
public double getSiteLogErrorProbabilityGivenQual (int qual) {
return probabilityVector.getLogProbabilityForIndex(qual);
}
public byte getMaxQualityScore() {
return maxQualityScore;
}
public byte getMinQualityScore() {
return minQualityScore;
}
public int getMinSignificantQualityScore() {
return new ProbabilityVector(probabilityVector,true).getMinVal();
}
public int getMaxSignificantQualityScore() {
return new ProbabilityVector(probabilityVector,true).getMaxVal();
}
public int getReferenceDepth() {
return refDepth;
}
public boolean hasData() {
return hasData;
}
public ProbabilityVector getErrorModelVector() {
return probabilityVector;
}
public String toString() {
StringBuilder result = new StringBuilder("(");
boolean skipComma = true;
for (double v : probabilityVector.getProbabilityVector()) {
if (skipComma) {
skipComma = false;
}
else {
result.append(",");
}
result.append(String.format("%.4f", v));
}
result.append(")");
return result.toString();
}
public static int getTotalReferenceDepth(HashMap<String, ErrorModel> perLaneErrorModels) {
int n=0;
for (ErrorModel e : perLaneErrorModels.values()) {
n += e.getReferenceDepth();
}
return n;
}
/*
@Requires({"maxAlleleCount >= 0"})
//todo -- memoize this function
public boolean hasPowerForMaxAC (int maxAlleleCount) {
int siteQ = (int) Math.ceil(MathUtils.probabilityToPhredScale((double) 1/maxAlleleCount));
double log10CumSum = getCumulativeSum(siteQ);
return log10CumSum < log10minPower;
} */
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/ConcordanceMetricsUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantutils;
import htsjdk.samtools.reference.ReferenceSequenceFile;
import htsjdk.tribble.readers.PositionalBufferedStream;
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.utils.BaseUtils;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
import htsjdk.variant.variantcontext.*;
import htsjdk.variant.vcf.VCFCodec;
import htsjdk.variant.vcf.VCFHeader;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.StringBufferInputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class ConcordanceMetricsUnitTest extends BaseTest {
private static ReferenceSequenceFile seq;
private GenomeLocParser genomeLocParser;
@BeforeClass
public void init() throws FileNotFoundException {
// sequence
seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
genomeLocParser = new GenomeLocParser(seq);
}
public static String HEADER_BASE = "##fileformat=VCFv4.0\n" +
"##filedate=2010-06-21\n"+
"##reference=NCBI36\n"+
"##INFO=<ID=GC, Number=0, Type=Flag, Description=\"Overlap with Gencode CCDS coding sequence\">\n"+
"##INFO=<ID=DP, Number=1, Type=Integer, Description=\"Total number of reads in haplotype window\">\n"+
"##INFO=<ID=AF, Number=1, Type=Float, Description=\"Dindel estimated population allele frequency\">\n"+
"##FILTER=<ID=NoQCALL, Description=\"Variant called by Dindel but not confirmed by QCALL\">\n"+
"##FORMAT=<ID=GT, Number=1, Type=String, Description=\"Genotype\">\n"+
"##FORMAT=<ID=HQ, Number=2, Type=Integer, Description=\"Haplotype quality\">\n"+
"##FORMAT=<ID=GQ, Number=1, Type=Integer, Description=\"Genotype quality\">\n" +
"#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\t";
public static String TEST_1_HEADER = HEADER_BASE + "test1_sample1\ttest1_sample2\ttest1_sample3\n";
public static String TEST_2_HEADER = HEADER_BASE + "test2_sample1\ttest2_sample2\n";
public static String TEST_3_HEADER_1 = HEADER_BASE + "test3_sample1\ttest3_sample2\ttest3_sample3\ttest3_sample4\ttest3_sample5\n";
public static String TEST_3_HEADER_2 = HEADER_BASE + "test3_sample6\ttest3_sample7\ttest3_sample8\ttest3_sample9\ttest3_sample10\n";
public static String TEST_3_HEADER_3 = HEADER_BASE + "test3_sample3\ttest3_sample6\ttest3_sample7\ttest3_sample8\ttest3_sample9\ttest3_sample10\n";
private Pair<VariantContext,VariantContext> getData1() {
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_C));
Genotype sam_1_3_eval = GenotypeBuilder.create("test1_sample3", Arrays.asList(reference_A,alt_C));
Genotype sam_1_1_truth = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_truth = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,reference_A));
Genotype sam_1_3_truth = GenotypeBuilder.create("test1_sample3", Arrays.asList(alt_C,alt_C));
GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 3, 3);
VariantContextBuilder eval_1_builder = new VariantContextBuilder();
VariantContextBuilder truth_1_builder = new VariantContextBuilder();
eval_1_builder.alleles(Arrays.asList(reference_A,alt_C));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_C));
eval_1_builder.genotypes(Arrays.asList(sam_1_1_eval,sam_1_2_eval,sam_1_3_eval));
truth_1_builder.genotypes(Arrays.asList(sam_1_1_truth,sam_1_2_truth,sam_1_3_truth));
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
Pair<VariantContext,VariantContext> testData = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
return testData;
}
@Test(enabled=true)
public void testSimpleComparison() {
Pair<VariantContext,VariantContext> data = getData1();
VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
metrics.update(eval,truth);
Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2);
Assert.assertEquals(truth.getGenotype("test1_sample2").getType().ordinal(),1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getnMismatchingAlt(),0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[2][1],1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][3],1);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getTable()[1][1],1);
}
private Pair<VariantContext,VariantContext> getData2() {
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_T));
Genotype sam_1_3_eval = GenotypeBuilder.create("test1_sample3", Arrays.asList(reference_A,alt_C));
Genotype sam_1_1_truth = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_truth = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_C));
Genotype sam_1_3_truth = GenotypeBuilder.create("test1_sample3", Arrays.asList(alt_C,alt_C));
GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 3, 3);
VariantContextBuilder eval_1_builder = new VariantContextBuilder();
VariantContextBuilder truth_1_builder = new VariantContextBuilder();
eval_1_builder.alleles(Arrays.asList(reference_A,alt_C,alt_T));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_C));
eval_1_builder.genotypes(Arrays.asList(sam_1_1_eval,sam_1_2_eval,sam_1_3_eval));
truth_1_builder.genotypes(Arrays.asList(sam_1_1_truth,sam_1_2_truth,sam_1_3_truth));
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
Pair<VariantContext,VariantContext> testData = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
return testData;
}
@Test(enabled=true)
public void testMismatchingAlleleInAlleleSubset() {
Pair<VariantContext,VariantContext> data = getData2();
VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
metrics.update(eval,truth);
Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2);
Assert.assertEquals(truth.getGenotype("test1_sample2").getType().ordinal(),2);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getnMismatchingAlt(),1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][3],1);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getTable()[1][1],1);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.EVAL_SUPERSET_TRUTH.ordinal()],1);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.ALLELES_DO_NOT_MATCH.ordinal()],0);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.ALLELES_MATCH.ordinal()],0);
// now flip them around
eval = data.getSecond();
truth = data.getFirst();
codec = new VCFCodec();
evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
metrics.update(eval,truth);
Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2);
Assert.assertEquals(truth.getGenotype("test1_sample2").getType().ordinal(),2);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getnMismatchingAlt(),1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[1][2],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[1][2],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[3][2],1);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getTable()[1][1],1);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.EVAL_SUPERSET_TRUTH.ordinal()],0);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.EVAL_SUBSET_TRUTH.ordinal()],1);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.ALLELES_DO_NOT_MATCH.ordinal()],0);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.ALLELES_MATCH.ordinal()],0);
}
private Pair<VariantContext,VariantContext> getData3() {
Allele reference_ACT = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.C.base,BaseUtils.Base.T.base},true);
Allele alt_AC = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.C.base});
Allele alt_A = Allele.create(BaseUtils.Base.A.base);
Allele alt_ATT = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.T.base,BaseUtils.Base.T.base});
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_ACT,alt_ATT));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(alt_A,alt_A));
Genotype sam_1_3_eval = GenotypeBuilder.create("test1_sample3", Arrays.asList(reference_ACT,alt_A));
Genotype sam_1_1_truth = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_ACT,alt_AC));
Genotype sam_1_2_truth = GenotypeBuilder.create("test1_sample2", Arrays.asList(alt_A,alt_A));
Genotype sam_1_3_truth = GenotypeBuilder.create("test1_sample3", Arrays.asList(reference_ACT,alt_A));
GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 3, 5);
VariantContextBuilder eval_1_builder = new VariantContextBuilder();
VariantContextBuilder truth_1_builder = new VariantContextBuilder();
eval_1_builder.alleles(Arrays.asList(reference_ACT,alt_ATT,alt_A));
truth_1_builder.alleles(Arrays.asList(reference_ACT,alt_AC,alt_A));
eval_1_builder.genotypes(Arrays.asList(sam_1_1_eval,sam_1_2_eval,sam_1_3_eval));
truth_1_builder.genotypes(Arrays.asList(sam_1_1_truth,sam_1_2_truth,sam_1_3_truth));
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
Pair<VariantContext,VariantContext> testData = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
return testData;
}
@Test(enabled=true)
public void testComplex() {
Pair<VariantContext,VariantContext> data = getData3();
VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
metrics.update(eval,truth);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample1").getnMismatchingAlt(),1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[3][3],1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[1][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][2],1);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getTable()[3][3],1);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.EVAL_SUPERSET_TRUTH.ordinal()],0);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.ALLELES_DO_NOT_MATCH.ordinal()],1);
Assert.assertEquals(metrics.getOverallSiteConcordance().getSiteConcordance()[ConcordanceMetrics.SiteConcordanceType.ALLELES_MATCH.ordinal()],0);
}
private Pair<VariantContext,VariantContext> getData4() {
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(Allele.NO_CALL,Allele.NO_CALL));
Genotype sam_1_3_eval = GenotypeBuilder.create("test1_sample3", Arrays.asList(reference_A,alt_C));
Genotype sam_1_1_truth = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_truth = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_C));
Genotype sam_1_3_truth = GenotypeBuilder.create("test1_sample3", Arrays.asList(Allele.NO_CALL,Allele.NO_CALL));
GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 3, 3);
VariantContextBuilder eval_1_builder = new VariantContextBuilder();
VariantContextBuilder truth_1_builder = new VariantContextBuilder();
eval_1_builder.alleles(Arrays.asList(reference_A,alt_C,alt_T));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_C));
eval_1_builder.genotypes(Arrays.asList(sam_1_1_eval,sam_1_2_eval,sam_1_3_eval));
truth_1_builder.genotypes(Arrays.asList(sam_1_1_truth,sam_1_2_truth,sam_1_3_truth));
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
Pair<VariantContext,VariantContext> testData = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
return testData;
}
@Test(enabled=true)
public void testNoCalls() {
Pair<VariantContext,VariantContext> data = getData4();
VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
metrics.update(eval,truth);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getnMismatchingAlt(),0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[0][2],1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][3],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][0],1);
}
private Pair<VariantContext,VariantContext> getData5() {
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", new ArrayList<Allele>(0));
Genotype sam_1_3_eval = GenotypeBuilder.create("test1_sample3", Arrays.asList(reference_A,alt_C));
Genotype sam_1_1_truth = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_truth = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_C));
Genotype sam_1_3_truth = GenotypeBuilder.create("test1_sample3", new ArrayList<Allele>(0));
GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 3, 3);
VariantContextBuilder eval_1_builder = new VariantContextBuilder();
VariantContextBuilder truth_1_builder = new VariantContextBuilder();
eval_1_builder.alleles(Arrays.asList(reference_A,alt_C,alt_T));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_C));
eval_1_builder.genotypes(Arrays.asList(sam_1_1_eval,sam_1_2_eval,sam_1_3_eval));
truth_1_builder.genotypes(Arrays.asList(sam_1_1_truth,sam_1_2_truth,sam_1_3_truth));
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
Pair<VariantContext,VariantContext> testData = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
return testData;
}
@Test(enabled=true)
public void testMissing() {
Pair<VariantContext,VariantContext> data = getData5();
VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
metrics.update(eval,truth);
Assert.assertTrue(eval.getGenotype("test1_sample2").getType().equals(GenotypeType.UNAVAILABLE));
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getnMismatchingAlt(),0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[0][2],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getTable()[4][2],1);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][1],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][3],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][0],0);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample3").getTable()[2][4],1);
}
private List<Pair<VariantContext,VariantContext>> getData6() {
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
// site 1 -
// sample 1: hom-ref/hom-ref
// sample 2: het/hom-ref
Genotype sam_2_1_1_eval = GenotypeBuilder.create("test2_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_2_2_1_eval = GenotypeBuilder.create("test2_sample2", Arrays.asList(reference_A,alt_C));
Genotype sam_2_1_1_truth = GenotypeBuilder.create("test2_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_2_2_1_truth = GenotypeBuilder.create("test2_sample2", Arrays.asList(reference_A,reference_A));
GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 3, 3);
VariantContextBuilder eval_1_builder = new VariantContextBuilder();
VariantContextBuilder truth_1_builder = new VariantContextBuilder();
eval_1_builder.alleles(Arrays.asList(reference_A,alt_C));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_C));
eval_1_builder.genotypes(Arrays.asList(sam_2_1_1_eval,sam_2_2_1_eval));
truth_1_builder.genotypes(Arrays.asList(sam_2_1_1_truth,sam_2_2_1_truth));
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
Pair<VariantContext,VariantContext> testDataSite1 = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
// site 2 -
// sample 1: no-call/hom-ref
// sample 2: hom-var/hom-var
Genotype sam_2_1_2_eval = GenotypeBuilder.create("test2_sample1",Arrays.asList(Allele.NO_CALL,Allele.NO_CALL));
Genotype sam_2_2_2_eval = GenotypeBuilder.create("test2_sample2",Arrays.asList(alt_T,alt_T));
Genotype sam_2_1_2_truth = GenotypeBuilder.create("test2_sample1",Arrays.asList(reference_A,reference_A));
Genotype sam_2_2_2_truth = GenotypeBuilder.create("test2_sample2",Arrays.asList(alt_T,alt_T));
loc = genomeLocParser.createGenomeLoc("chr1", 4, 4);
eval_1_builder = new VariantContextBuilder();
truth_1_builder = new VariantContextBuilder();
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
eval_1_builder.alleles(Arrays.asList(reference_A,alt_T));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_T));
eval_1_builder.genotypes(Arrays.asList(sam_2_1_2_eval,sam_2_2_2_eval));
truth_1_builder.genotypes(Arrays.asList(sam_2_1_2_truth,sam_2_2_2_truth));
Pair<VariantContext,VariantContext> testDataSite2 = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
Allele alt_G = Allele.create(BaseUtils.Base.G.base);
// site 3 -
// sample 1: alleles do not match
// sample 2: het/het
Genotype sam_2_1_3_eval = GenotypeBuilder.create("test2_sample1",Arrays.asList(alt_G,alt_T));
Genotype sam_2_2_3_eval = GenotypeBuilder.create("test2_sample2",Arrays.asList(reference_A,alt_T));
Genotype sam_2_1_3_truth = GenotypeBuilder.create("test2_sample1",Arrays.asList(alt_T,alt_T));
Genotype sam_2_2_3_truth = GenotypeBuilder.create("test2_sample2",Arrays.asList(reference_A,alt_T));
loc = genomeLocParser.createGenomeLoc("chr1",5,5);
eval_1_builder = new VariantContextBuilder();
truth_1_builder = new VariantContextBuilder();
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
eval_1_builder.alleles(Arrays.asList(reference_A,alt_T,alt_G));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_T));
eval_1_builder.genotypes(Arrays.asList(sam_2_1_3_eval,sam_2_2_3_eval));
truth_1_builder.genotypes(Arrays.asList(sam_2_1_3_truth,sam_2_2_3_truth));
Pair<VariantContext,VariantContext> testDataSite3 = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
// site 4 -
// sample 1: unavailable/het
// sample 2: unavailable/ref
Genotype sam_2_1_4_eval = GenotypeBuilder.create("test2_sample1",new ArrayList<Allele>(0));
Genotype sam_2_2_4_eval = GenotypeBuilder.create("test2_sample2",new ArrayList<Allele>(0));
Genotype sam_2_1_4_truth = GenotypeBuilder.create("test2_sample1",Arrays.asList(reference_A,alt_T));
Genotype sam_2_2_4_truth = GenotypeBuilder.create("test2_sample2",Arrays.asList(reference_A,reference_A));
loc = genomeLocParser.createGenomeLoc("chr1",6,6);
eval_1_builder = new VariantContextBuilder();
truth_1_builder = new VariantContextBuilder();
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
eval_1_builder.alleles(Arrays.asList(reference_A,alt_T));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_T));
eval_1_builder.genotypes(Arrays.asList(sam_2_1_4_eval,sam_2_2_4_eval));
truth_1_builder.genotypes(Arrays.asList(sam_2_1_4_truth,sam_2_2_4_truth));
Pair<VariantContext,VariantContext> testDataSite4 = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
// site 5 -
// sample 1: hom-var/no-call
// sample 2: het/het
Genotype sam_2_1_5_eval = GenotypeBuilder.create("test2_sample1",Arrays.asList(alt_C,alt_C));
Genotype sam_2_2_5_eval = GenotypeBuilder.create("test2_sample2",Arrays.asList(reference_A,alt_C));
Genotype sam_2_1_5_truth = GenotypeBuilder.create("test2_sample1",Arrays.asList(Allele.NO_CALL,Allele.NO_CALL));
Genotype sam_2_2_5_truth = GenotypeBuilder.create("test2_sample2",Arrays.asList(reference_A,alt_C));
loc = genomeLocParser.createGenomeLoc("chr1",7,7);
eval_1_builder = new VariantContextBuilder();
truth_1_builder = new VariantContextBuilder();
eval_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
truth_1_builder.loc(loc.getContig(),loc.getStart(),loc.getStop());
eval_1_builder.alleles(Arrays.asList(reference_A,alt_C));
truth_1_builder.alleles(Arrays.asList(reference_A,alt_C));
eval_1_builder.genotypes(Arrays.asList(sam_2_1_5_eval,sam_2_2_5_eval));
truth_1_builder.genotypes(Arrays.asList(sam_2_1_5_truth,sam_2_2_5_truth));
Pair<VariantContext,VariantContext> testDataSite5 = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
return Arrays.asList(testDataSite1,testDataSite2,testDataSite3,testDataSite4,testDataSite5);
}
@Test(enabled=true)
public void testMultiSite() {
int[][] sample1_expected = new int[GenotypeType.values().length][GenotypeType.values().length];
int[][] sample2_expected = new int[GenotypeType.values().length][GenotypeType.values().length];
// order: no-call,ref,het,hom-var,unavailable,mixed
sample1_expected[0] = new int[]{0,1,0,0,0,0};
sample2_expected[0] = new int[]{0,0,0,0,0,0};
sample1_expected[1] = new int[]{0,1,0,0,0,0};
sample2_expected[1] = new int[]{0,0,0,0,0,0};
sample1_expected[2] = new int[]{0,0,0,0,0,0};
sample2_expected[2] = new int[]{0,1,2,0,0,0};
sample1_expected[3] = new int[]{1,0,0,0,0,0};
sample2_expected[3] = new int[]{0,0,0,1,0,0};
sample1_expected[4] = new int[]{0,0,1,0,0,0};
sample2_expected[4] = new int[]{0,1,0,0,0,0};
List<Pair<VariantContext,VariantContext>> data = getData6();
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
for ( Pair<VariantContext,VariantContext> contextPair : data ) {
VariantContext eval = contextPair.getFirst();
VariantContext comp = contextPair.getSecond();
logger.warn(eval.toString());
logger.warn(comp.toString());
Assert.assertTrue(eval != null);
Assert.assertTrue(comp != null);
Assert.assertTrue(eval.getGenotype("test2_sample1") != null);
Assert.assertTrue(comp.getGenotype("test2_sample1") != null);
Assert.assertTrue(eval.getGenotype("test2_sample2") != null);
Assert.assertTrue(comp.getGenotype("test2_sample2") != null);
metrics.update(eval,comp);
}
int[][] sample1_observed = metrics.getGenotypeConcordance("test2_sample1").getTable();
int[][] sample2_observed = metrics.getGenotypeConcordance("test2_sample2").getTable();
for ( GenotypeType eType : GenotypeType.values() ) {
for ( GenotypeType cType : GenotypeType.values() ) {
Assert.assertEquals(sample1_expected[eType.ordinal()][cType.ordinal()],sample1_observed[eType.ordinal()][cType.ordinal()]);
Assert.assertEquals(sample2_expected[eType.ordinal()][cType.ordinal()],sample2_observed[eType.ordinal()][cType.ordinal()]);
}
}
}
@Test(enabled=true)
public void testNRD_testNRS_testMargins() {
Pair<VariantContext,VariantContext> data = getData3();
VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
int[][] table = metrics.getOverallGenotypeConcordance().getTable();
// set up the table
table[0] = new int[] {30, 12, 7, 5, 6, 0};
table[1] = new int[] {10, 100, 5, 1, 7, 1};
table[2] = new int[] {5, 7, 150, 3, 3, 1};
table[3] = new int[] {3, 2, 6, 50, 1, 0};
table[4] = new int[] {10, 6, 3, 3, 2, 0};
table[5] = new int[] {12, 0, 34, 20, 10, 0};
double EXPEC_NRS = 0.8969957;
double EXPEC_NRD = 0.1071429;
double EXPEC_OGC = 0.92592592; // (100+150+50)/(100+5+1+150+7+3+50+2+6)
Assert.assertEquals(EXPEC_NRS,metrics.getOverallNRS(),1e-7);
Assert.assertEquals(EXPEC_NRD,metrics.getOverallNRD(),1e-7);
Assert.assertEquals(EXPEC_OGC,metrics.getOverallOGC(),1e-7);
int EXPEC_EVAL_REF = 124;
int EXPEC_EVAL_HET = 169;
int EXPEC_EVAL_VAR = 62;
int EXPEC_COMP_REF = 127;
int EXPEC_COMP_HET = 205;
int EXPEC_COMP_VAR = 82;
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getnEvalGenotypes(GenotypeType.HOM_REF),EXPEC_EVAL_REF);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getnEvalGenotypes(GenotypeType.HET),EXPEC_EVAL_HET);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getnEvalGenotypes(GenotypeType.HOM_VAR),EXPEC_EVAL_VAR);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getnCompGenotypes(GenotypeType.HOM_REF),EXPEC_COMP_REF);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getnCompGenotypes(GenotypeType.HET),EXPEC_COMP_HET);
Assert.assertEquals(metrics.getOverallGenotypeConcordance().getnCompGenotypes(GenotypeType.HOM_VAR),EXPEC_COMP_VAR);
}
@Test(enabled=true)
public void testRobustness() {
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_1))));
VCFHeader disjointCompHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_2))));
VCFHeader overlapCompHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_3))));
ConcordanceMetrics disjointMetrics = new ConcordanceMetrics(evalHeader,disjointCompHeader,null);
ConcordanceMetrics overlapMetrics = new ConcordanceMetrics(evalHeader,overlapCompHeader,null);
// test what happens if you put in disjoint sets and start making requests
Assert.assertEquals(0,disjointMetrics.getPerSampleGenotypeConcordance().size());
String msg = "No Exception Thrown";
try {
disjointMetrics.getGenotypeConcordance("test3_sample4");
} catch ( Exception e) {
msg = e.getMessage();
}
Assert.assertEquals("Attempted to request the concordance table for sample test3_sample4 on which it was not calculated",msg);
// test that the overlapping sample is in the overlapping table (basically do this without throwing an exception)
overlapMetrics.getGenotypeConcordance("test3_sample3");
String msg2 = "No Exception Thrown";
try {
disjointMetrics.getGenotypeConcordance("test3_sample4");
} catch ( Exception e) {
msg2 = e.getMessage();
}
Assert.assertEquals("Attempted to request the concordance table for sample test3_sample4 on which it was not calculated",msg2);
// test what happens if you try to calculate NRS and NRD on an empty table
Assert.assertEquals(disjointMetrics.getOverallNRD(), 1.0, 1e-16);
Assert.assertEquals(disjointMetrics.getOverallNRS(), 0.0, 1e-16);
}
public List<Pair<VariantContext,VariantContext>> getData7() {
Allele ref1 = Allele.create(BaseUtils.Base.T.base,true);
Allele alt1 = Allele.create(BaseUtils.Base.C.base);
Allele alt2 = Allele.create(BaseUtils.Base.G.base);
Allele alt3 = Allele.create(BaseUtils.Base.A.base);
GenomeLoc loc1 = genomeLocParser.createGenomeLoc("chr1",1,1);
VariantContextBuilder site1Eval = new VariantContextBuilder();
VariantContextBuilder site1Comp = new VariantContextBuilder();
// site 1: eval superset comp
site1Eval.loc(loc1.getContig(),loc1.getStart(),loc1.getStop());
site1Comp.loc(loc1.getContig(),loc1.getStart(),loc1.getStop());
site1Eval.alleles(Arrays.asList(ref1,alt1,alt2));
site1Comp.alleles(Arrays.asList(ref1,alt2));
site1Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt2)));
site1Comp.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt2)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt2)));
// site 2: eval subset comp
GenomeLoc loc2 = genomeLocParser.createGenomeLoc("chr1",2,2);
VariantContextBuilder site2Eval = new VariantContextBuilder();
VariantContextBuilder site2Comp = new VariantContextBuilder();
site2Eval.loc(loc2.getContig(),loc2.getStart(),loc2.getStop());
site2Comp.loc(loc2.getContig(),loc2.getStart(),loc2.getStop());
site2Eval.alleles(Arrays.asList(ref1,alt1));
site2Comp.alleles(Arrays.asList(ref1,alt1,alt3));
site2Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt1)));
site2Comp.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt3)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt1)));
// site 3: eval only
GenomeLoc loc3 = genomeLocParser.createGenomeLoc("chr1",3,3);
VariantContextBuilder site3Eval = new VariantContextBuilder();
VariantContextBuilder site3Comp = new VariantContextBuilder();
site3Eval.loc(loc3.getContig(),loc3.getStart(),loc3.getStop());
site3Comp.loc(loc3.getContig(),loc3.getStart(),loc3.getStop());
site3Eval.alleles(Arrays.asList(ref1,alt1));
site3Comp.alleles(Arrays.asList(ref1,alt1));
site3Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt1)));
site3Comp.genotypes(GenotypeBuilder.create("test2_sample1",new ArrayList<Allele>(0)),GenotypeBuilder.create("test2_sample2",new ArrayList<Allele>(0)));
// site 4: comp only - monomorphic
GenomeLoc loc4 = genomeLocParser.createGenomeLoc("chr1",4,4);
VariantContextBuilder site4Eval = new VariantContextBuilder();
VariantContextBuilder site4Comp = new VariantContextBuilder();
site4Eval.loc(loc4.getContig(),loc4.getStart(),loc4.getStop());
site4Comp.loc(loc4.getContig(),loc4.getStart(),loc4.getStop());
site4Eval.alleles(Arrays.asList(ref1,alt1));
site4Comp.alleles(Arrays.asList(ref1,alt1));
site4Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,ref1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,ref1)));
site4Comp.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt1)));
// site 5: overlapping
GenomeLoc loc5 = genomeLocParser.createGenomeLoc("chr1",5,5);
VariantContextBuilder site5Eval = new VariantContextBuilder();
VariantContextBuilder site5Comp = new VariantContextBuilder();
site5Eval.loc(loc5.getContig(),loc5.getStart(),loc5.getStop());
site5Comp.loc(loc5.getContig(),loc5.getStart(),loc5.getStop());
site5Eval.alleles(Arrays.asList(ref1,alt1,alt3));
site5Comp.alleles(Arrays.asList(ref1,alt1,alt3));
site5Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(alt1,alt3)));
site5Comp.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(alt1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(alt3,alt3)));
// site 6: some non-matching alts
GenomeLoc loc6 = genomeLocParser.createGenomeLoc("chr1",6,6);
VariantContextBuilder site6Eval = new VariantContextBuilder();
VariantContextBuilder site6Comp = new VariantContextBuilder();
site6Eval.loc(loc6.getContig(),loc6.getStart(),loc6.getStop());
site6Comp.loc(loc6.getContig(),loc6.getStart(),loc6.getStop());
site6Eval.alleles(Arrays.asList(ref1,alt1,alt2));
site6Comp.alleles(Arrays.asList(ref1,alt1,alt3));
site6Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt2)));
site6Comp.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt3)));
// site 7: matching with no-calls
GenomeLoc loc7 = genomeLocParser.createGenomeLoc("chr1",7,7);
VariantContextBuilder site7Eval = new VariantContextBuilder();
VariantContextBuilder site7Comp = new VariantContextBuilder();
site7Eval.loc(loc7.getContig(),loc7.getStart(),loc7.getStop());
site7Comp.loc(loc7.getContig(),loc7.getStart(),loc7.getStop());
site7Eval.alleles(Arrays.asList(ref1,alt1));
site7Comp.alleles(Arrays.asList(ref1,alt1));
site7Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(Allele.NO_CALL,Allele.NO_CALL)));
site7Comp.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(ref1,alt1)),GenotypeBuilder.create("test2_sample2",Arrays.asList(ref1,alt1)));
Pair<VariantContext,VariantContext> site1 = new Pair<VariantContext, VariantContext>(site1Eval.make(),site1Comp.make());
Pair<VariantContext,VariantContext> site2 = new Pair<VariantContext, VariantContext>(site2Eval.make(),site2Comp.make());
Pair<VariantContext,VariantContext> site3 = new Pair<VariantContext, VariantContext>(site3Eval.make(),site3Comp.make());
Pair<VariantContext,VariantContext> site4 = new Pair<VariantContext, VariantContext>(site4Eval.make(),site4Comp.make());
Pair<VariantContext,VariantContext> site5 = new Pair<VariantContext, VariantContext>(site5Eval.make(),site5Comp.make());
Pair<VariantContext,VariantContext> site6 = new Pair<VariantContext, VariantContext>(site6Eval.make(),site6Comp.make());
Pair<VariantContext,VariantContext> site7 = new Pair<VariantContext, VariantContext>(site7Eval.make(),site7Comp.make());
return Arrays.asList(site1,site2,site3,site4,site5,site6,site7);
}
@Test(enabled = true)
public void testSites() {
VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
List<Pair<VariantContext,VariantContext>> data = getData7();
int idx = 0;
int[] expecNotMatch = new int[]{0,0,0,0,0,1,1};
for ( Pair<VariantContext,VariantContext> varPair : data ) {
metrics.update(varPair.getFirst(),varPair.getSecond());
Assert.assertEquals(metrics.getOverallSiteConcordance().get(ConcordanceMetrics.SiteConcordanceType.ALLELES_DO_NOT_MATCH),expecNotMatch[idx]);
logger.info(idx);
idx++;
}
Assert.assertEquals(metrics.getOverallSiteConcordance().get(ConcordanceMetrics.SiteConcordanceType.ALLELES_DO_NOT_MATCH),1);
Assert.assertEquals(metrics.getOverallSiteConcordance().get(ConcordanceMetrics.SiteConcordanceType.ALLELES_MATCH),2);
Assert.assertEquals(metrics.getOverallSiteConcordance().get(ConcordanceMetrics.SiteConcordanceType.EVAL_ONLY),1);
Assert.assertEquals(metrics.getOverallSiteConcordance().get(ConcordanceMetrics.SiteConcordanceType.TRUTH_ONLY),1);
Assert.assertEquals(metrics.getOverallSiteConcordance().get(ConcordanceMetrics.SiteConcordanceType.EVAL_SUBSET_TRUTH),1);
Assert.assertEquals(metrics.getOverallSiteConcordance().get(ConcordanceMetrics.SiteConcordanceType.EVAL_SUPERSET_TRUTH),1);
}
private Pair<VariantContext,VariantContext> getMonoallelicData() {
final Allele ref = Allele.create(BaseUtils.Base.T.base,true);
final Allele alt = Allele.create(BaseUtils.Base.C.base);
//Site in eval is monoallelic, both samples are HOM_REF
//sample1 in comp is HOM_VAR, sample2 is NO_CALL
//None of these should trigger mismatching alleles
final GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1",1,1);
final VariantContextBuilder site1Comp = new VariantContextBuilder();
final VariantContextBuilder site1Eval = new VariantContextBuilder();
site1Comp.loc(loc.getContig(), loc.getStart(), loc.getStop());
site1Eval.loc(loc.getContig(), loc.getStart(), loc.getStop());
site1Comp.alleles(Arrays.asList(ref));
site1Eval.alleles(Arrays.asList(ref, alt));
site1Comp.genotypes(GenotypeBuilder.create("test2_sample1", Arrays.asList(ref, ref)),
GenotypeBuilder.create("test2_sample2", Arrays.asList(ref, ref)));
site1Eval.genotypes(GenotypeBuilder.create("test2_sample1",Arrays.asList(alt,alt)),
GenotypeBuilder.create("test2_sample2",Arrays.asList(Allele.NO_CALL,Allele.NO_CALL)));
return new Pair<>(site1Eval.make(), site1Comp.make());
}
@Test
public void testMonoallelicSite() {
final Pair<VariantContext,VariantContext> data = getMonoallelicData();
final VariantContext eval = data.getFirst();
final VariantContext truth = data.getSecond();
final VCFCodec codec = new VCFCodec();
final VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
final VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
final ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader,null);
metrics.update(eval,truth);
Assert.assertEquals(metrics.getGenotypeConcordance("test2_sample1").getnMismatchingAlt(),0);
Assert.assertEquals(metrics.getGenotypeConcordance("test2_sample2").getnMismatchingAlt(),0);
Assert.assertEquals(metrics.getGenotypeConcordance("test2_sample1").getTable()[3][1],1);
Assert.assertEquals(metrics.getGenotypeConcordance("test2_sample2").getTable()[0][1],1);
}
}<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ActiveRegionTestDataSet.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import htsjdk.samtools.GATKBin;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.SAMSequenceRecord;
import org.apache.commons.math.distribution.ExponentialDistribution;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading.ReadThreadingGraph;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.QualityUtils;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Mock-up active region data used in testing.
*
* @author <NAME> <<EMAIL>>
*/
public class ActiveRegionTestDataSet {
private final byte[] referenceBytes;
protected String reference;
protected String[] haplotypeCigars;
protected List<String> haplotypeStrings;
protected String[] readCigars;
protected byte[] bq;
protected byte[] dq;
protected byte[] iq;
protected int kmerSize;
private List<Haplotype> haplotypeList;
private List<GATKSAMRecord> readList;
private AssemblyResultSet assemblyResultSet;
private Map<String,GATKSAMRecord> readBySequence;
private String stringRepresentation;
private List<List<Civar.ElementOffset>> readEventOffsetList;
private GenomeLocParser genomeLocParser;
/** Create a new active region data test set */
public ActiveRegionTestDataSet(final int kmerSize, final String reference, final String[] haplotypes,
final String[] readCigars, final byte[] bq, final byte[] dq, final byte[] iq) {
this.reference = reference;
this.referenceBytes = reference.getBytes();
this.haplotypeCigars = haplotypes;
this.readCigars = readCigars;
this.bq = bq;
this.dq = dq;
this.iq = iq;
this.kmerSize = kmerSize;
this.genomeLocParser = new GenomeLocParser(ArtificialSAMUtils.createArtificialSamHeader(1,1,reference.length()).getSequenceDictionary());
}
public String getReference() {
return reference;
}
public String toString() {
if (stringRepresentation == null)
return super.toString();
else return stringRepresentation;
}
public AssemblyResultSet assemblyResultSet() {
if (assemblyResultSet == null) {
final ReadThreadingGraph rtg = new ReadThreadingGraph(kmerSize);
rtg.addSequence("anonymous", this.getReference().getBytes(), true);
for (final String haplotype : this.haplotypesStrings()) {
rtg.addSequence("anonymous", haplotype.getBytes(), false);
}
rtg.buildGraphIfNecessary();
if (rtg.hasCycles())
throw new RuntimeException("there is cycles in the reference with kmer size " + kmerSize + ". Don't use this size for the benchmark or change the reference");
List<Haplotype> haplotypeList = this.haplotypeList();
assemblyResultSet = new AssemblyResultSet();
final AssemblyResult ar = new AssemblyResult((haplotypeList.size() > 1 ?
AssemblyResult.Status.ASSEMBLED_SOME_VARIATION : AssemblyResult.Status.JUST_ASSEMBLED_REFERENCE),rtg.convertToSequenceGraph());
ar.setThreadingGraph(rtg);
for (final Haplotype h : haplotypeList)
assemblyResultSet.add(h, ar);
}
return assemblyResultSet;
}
public List<String> haplotypesStrings() {
if (haplotypeStrings != null) {
return haplotypeStrings;
}
final List<String> result = new ArrayList<>(haplotypeCigars.length);
String reference = this.reference;
for (final String cigar : haplotypeCigars) {
if (cigar.matches("^Civar:.*$")) {
stringRepresentation = cigar.substring(6);
result.addAll(expandAllCombinations(cigar.substring(6),reference));
} else if (cigar.matches("^.*\\d+.*$")) {
result.add(applyCigar(reference, cigar,0,true));
} else {
result.add(cigar);
}
}
haplotypeStrings = result;
return result;
}
private List<String> expandAllCombinations(final String cigarString, final String reference) {
final Civar civar = Civar.fromCharSequence(cigarString);
final List<Civar> unrolledCivars = civar.optionalizeAll().unroll();
List<String> result = new ArrayList<>(unrolledCivars.size());
for (final Civar c : unrolledCivars) {
result.add(c.applyTo(reference));
}
return result;
}
private List<Haplotype> expandAllHaplotypeCombinations(final String civarString, final String reference) {
final Civar civar = Civar.fromCharSequence(civarString);
final List<Civar> unrolledCivars = civar.optionalizeAll().unroll();
List<Haplotype> result = new ArrayList<>(unrolledCivars.size());
for (final Civar c : unrolledCivars) {
final String baseString = c.applyTo(reference);
final Haplotype haplotype = new Haplotype(baseString.getBytes(),baseString.equals(reference));
haplotype.setGenomeLocation(genomeLocParser.createGenomeLoc("chr1",1,reference.length()));
try {
haplotype.setCigar(c.toCigar(reference.length()));
} catch (final RuntimeException ex) {
c.applyTo(reference);
c.toCigar(reference.length());
throw new RuntimeException("" + c + " " + ex.getMessage(),ex);
}
result.add(haplotype);
}
return result;
}
public List<Haplotype> haplotypeList() {
if (haplotypeList == null) {
final List<Haplotype> result = new ArrayList<>(haplotypeCigars.length);
final String reference = this.reference;
for (final String cigar : haplotypeCigars) {
if (cigar.matches("^Civar:.*$")) {
stringRepresentation = cigar.substring(6);
result.addAll(expandAllHaplotypeCombinations(cigar.substring(6), reference));
} else if (cigar.matches("^.*\\d+.*$")) {
result.add(cigarToHaplotype(reference, cigar, 0, true));
} else {
final Haplotype h = new Haplotype(cigar.getBytes());
h.setGenomeLocation(genomeLocParser.createGenomeLoc("chr1",1,reference.length()));
result.add(h);
}
}
haplotypeList = result;
}
return haplotypeList;
}
protected SAMSequenceDictionary artificialSAMSequenceDictionary() {
return new SAMSequenceDictionary(Collections.singletonList(new SAMSequenceRecord("00",reference.length())));
}
protected SAMFileHeader artificialSAMFileHeader() {
return ArtificialSAMUtils.createArtificialSamHeader(artificialSAMSequenceDictionary());
}
public List<GATKSAMRecord> readList() {
if (readList == null) {
final SAMFileHeader header = artificialSAMFileHeader();
readList = new ArrayList<>(readCigars.length);
final List<String> haplotypes = haplotypesStrings();
int count = 0;
for (final String descr : readCigars) {
String sequence;
if (descr.matches("^\\d+:\\d+:.+$")) {
final String[] parts = descr.split(":");
int allele = Integer.valueOf(parts[0]);
int offset = Integer.valueOf(parts[1]);
final String cigar = parts[2];
final String base = allele == 0 ? reference : haplotypes.get(allele - 1);
sequence = applyCigar(base, cigar, offset, false);
final GATKSAMRecord samRecord = ArtificialSAMUtils.createArtificialRead(header, "read_" + count, 0, 1, sequence.getBytes(), Arrays.copyOf(bq, sequence.length()));
readList.add(new MyGATKSAMRecord(samRecord));
} else if (descr.matches("^\\*:\\d+:\\d+$")) {
int readCount = Integer.valueOf(descr.split(":")[1]);
int readLength = Integer.valueOf(descr.split(":")[2]);
readList.addAll(generateSamRecords(haplotypes, readCount, readLength, header, count));
} else {
sequence = descr;
final GATKSAMRecord samRecord = ArtificialSAMUtils.createArtificialRead(header, "read_" + count, 0, 1, sequence.getBytes(), Arrays.copyOf(bq, sequence.length()));
readList.add(new MyGATKSAMRecord(samRecord));
}
count = readList.size();
}
}
return readList;
}
public List<List<Civar.ElementOffset>> readEventOffsetList() {
if (haplotypeCigars.length != 1 || !haplotypeCigars[0].startsWith("Civar:"))
throw new UnsupportedOperationException();
if (readEventOffsetList == null) {
final Civar civar = Civar.fromCharSequence(haplotypeCigars[0].substring(6));
final List<Civar> unrolledCivars = civar.optionalizeAll().unroll();
readEventOffsetList = new ArrayList<>(readCigars.length);
int count = 0;
for (final String descr : readCigars) {
if (descr.matches("^\\d+:\\d+:.+$")) {
throw new UnsupportedOperationException();
} else if (descr.matches("^\\*:\\d+:\\d+$")) {
int readCount = Integer.valueOf(descr.split(":")[1]);
int readLength = Integer.valueOf(descr.split(":")[2]);
readEventOffsetList.addAll(generateElementOffsetRecords(haplotypesStrings(), unrolledCivars, readCount, readLength, count));
} else {
throw new UnsupportedOperationException();
}
count = readEventOffsetList.size();
}
readEventOffsetList = Collections.unmodifiableList(readEventOffsetList);
}
return readEventOffsetList;
}
@SuppressWarnings("unused")
public String cigarToSequence(final String cigar) {
String reference = this.reference;
return applyCigar(reference, cigar,0,true);
}
@SuppressWarnings("unused")
public GATKSAMRecord readFromString(final String readSequence) {
if (readBySequence == null) {
final List<GATKSAMRecord> readList = readList();
readBySequence = new HashMap<>(readList.size());
for (final GATKSAMRecord r : readList)
readBySequence.put(r.getReadString(),r);
}
return readBySequence.get(readSequence);
}
public List<Civar> unrolledCivars() {
if (haplotypeCigars.length != 1 || !haplotypeCigars[0].startsWith("Civar:"))
throw new UnsupportedOperationException();
final Civar civar = Civar.fromCharSequence(haplotypeCigars[0].substring(6));
return civar.optionalizeAll().unroll();
}
public void introduceErrors(final Random rnd) {
final List<GATKSAMRecord> reads = readList();
final ArrayList<GATKSAMRecord> result = new ArrayList<>(reads.size());
for (final GATKSAMRecord read : reads) {
result.add(new MyGATKSAMRecord(read,rnd));
}
readList = result;
}
private class MyGATKSAMRecord extends GATKSAMRecord {
protected MyGATKSAMRecord(final GATKSAMRecord r) {
super(r);
this.setMappingQuality(100);
GATKBin.setReadIndexingBin(this, -1);
}
ExponentialDistribution indelLengthDist = MathUtils.exponentialDistribution(1.0 / 0.9);
public MyGATKSAMRecord(final GATKSAMRecord r, final Random rnd) {
super(r);
this.setMappingQuality(100);
// setting read indexing bin last
final byte[] bases = new byte[r.getReadBases().length];
final byte[] readBases = r.getReadBases();
final byte[] bq = r.getBaseQualities();
final byte[] iq = r.getBaseInsertionQualities();
final byte[] dq = r.getBaseDeletionQualities();
int refOffset = r.getAlignmentStart() - 1;
int readOffset = 0;
for (int i = 0; i < r.getReadBases().length;) {
double p = rnd.nextDouble();
double iqp = QualityUtils.qualToErrorProb(iq[i]);
if (p < iqp) { // insertion
final int length = Math.min(generateIndelLength(rnd),r.getReadBases().length - i);
final int refStart = rnd.nextInt(reference.length() - length);
System.arraycopy(referenceBytes,refStart,bases,i,length);
i += length;
continue;
}
p -= iqp;
double dqp = QualityUtils.qualToErrorProb(dq[i]);
if (p < dqp) {
final int length = generateIndelLength(rnd);
refOffset += length;
refOffset = refOffset % referenceBytes.length;
readOffset += length;
continue;
}
p -= dqp;
double bqp = QualityUtils.qualToErrorProb(bq[i]);
byte b = readOffset < readBases.length ? readBases[readOffset] : referenceBytes[refOffset];
byte nb;
if (p < bqp) {
switch (b) {
case 'A': nb = 'C'; break;
case 'T': nb = 'A'; break;
case 'C': nb = 'G'; break;
case 'G': nb = 'B'; break;
default: nb = 'A';
}
} else
nb = b;
bases[i++] = nb;
refOffset++;
refOffset = refOffset % referenceBytes.length;
readOffset++;
}
this.setReadBases(bases);
this.setBaseQualities(r.getBaseQualities());
this.setReadName(r.getReadName());
GATKBin.setReadIndexingBin(this, -1);
}
private int generateIndelLength(final Random rnd) {
final int length;
try {
length = (int) Math.round(indelLengthDist.inverseCumulativeProbability(rnd.nextDouble()) + 1);
} catch (Exception e) {
throw new RuntimeException(e);
}
return length;
}
@Override
public byte[] getBaseDeletionQualities() {
return Arrays.copyOf(dq,getReadLength());
}
@Override
public byte[] getBaseInsertionQualities() {
return Arrays.copyOf(iq,getReadLength());
}
@Override
public int getMappingQuality() {
return 100;
}
@Override
public int hashCode() {
return getReadName().hashCode();
}
@Override
public boolean equals(Object o) {
if (o instanceof GATKSAMRecord) {
return getReadName().equals(((GATKSAMRecord)o).getReadName());
} else {
return false;
}
}
public String toString() {
return super.toString() + " " + this.getReadString();
}
}
public List<String> readStrings() {
final List<String> result = new ArrayList<>(readCigars.length);
final List<String> haplotypes = haplotypesStrings();
for (final String descr : readCigars) {
String sequence;
if (descr.matches("^\\d+:\\d+:.+$")) {
final String[] parts = descr.split(":");
int allele = Integer.valueOf(parts[0]);
int offset = Integer.valueOf(parts[1]);
final String cigar = parts[2];
final String base = allele == 0 ? reference : haplotypes.get(allele - 1);
sequence = applyCigar(base, cigar, offset, false);
result.add(sequence);
} else if (descr.matches("\\*:^\\d+:\\d+")) {
int readCount = Integer.valueOf(descr.split(":")[1]);
int readLength = Integer.valueOf(descr.split(":")[2]);
result.addAll(generateReads(haplotypes, readCount, readLength));
} else {
sequence = descr;
result.add(sequence);
}
}
return result;
}
private List<String> generateReads(final List<String> haplotypes, final int readCount, final int readLength) {
final List<String> result = new ArrayList<>(readCount);
for (int i = 0; i < readCount; i++) {
int hi = i % haplotypes.size();
final String h = haplotypes.get(hi);
int offset = i % h.length() - readLength;
result.add(h.substring(offset,offset + readLength));
}
return result;
}
private List<MyGATKSAMRecord> generateSamRecords(final List<String> haplotypes, final int readCount, final int readLength, final SAMFileHeader header, final int idStart) {
int id = idStart;
final List<MyGATKSAMRecord> result = new ArrayList<>(readCount);
for (int i = 0; i < readCount; i++) {
int hi = i % haplotypes.size();
final String h = haplotypes.get(hi);
int offset = h.length() <= readLength ? 0 : i % (h.length() - readLength);
int to = Math.min(h.length(),offset + readLength);
byte[] bases = h.substring(offset,to).getBytes();
byte[] quals = Arrays.copyOf(bq,to - offset);
final GATKSAMRecord samRecord = ArtificialSAMUtils.createArtificialRead(header,"read_" + id++,0,offset + 1,bases, quals);
result.add(new MyGATKSAMRecord(samRecord));
}
return result;
}
private List<List<Civar.ElementOffset>> generateElementOffsetRecords(final List<String> haplotypes, final List<Civar> unrolledCivars, final int readCount, final int readLength, final int count) {
final List<List<Civar.ElementOffset>> result = new ArrayList<>(readCount);
for (int i = 0; i < readCount; i++) {
int hi = i % unrolledCivars.size();
final Civar c = unrolledCivars.get(hi);
final String h = haplotypes.get(hi);
int offset = h.length() <= readLength ? 0 : i % (h.length() - readLength);
int to = Math.min(h.length(),offset + readLength);
result.add(c.eventOffsets(reference,offset,to));
}
return result;
}
private static final Pattern cigarPattern = Pattern.compile("(\\d+)([=A-Z])");
private Haplotype cigarToHaplotype(final String reference, final String cigar, final int offset, final boolean global) {
final String sequence = applyCigar(reference,cigar,offset,global);
final Haplotype haplotype = new Haplotype(sequence.getBytes(),reference.equals(sequence));
haplotype.setGenomeLocation(genomeLocParser.createGenomeLoc("chr1",1,reference.length()));
haplotype.setCigar(Civar.fromCharSequence(cigar).toCigar(reference.length()));
return haplotype;
}
private String applyCigar(final String reference, final String cigar, final int offset, final boolean global) {
final Matcher pm = cigarPattern.matcher(cigar);
StringBuffer sb = new StringBuffer();
int index = offset;
while (pm.find()) {
int length = Integer.valueOf(pm.group(1));
char operator = pm.group(2).charAt(0);
switch (operator) {
case '=' :
try {
sb.append(reference.substring(index, index + length));
} catch (Exception e) {
throw new RuntimeException(" " + index + " " + (index + length) + " " + reference.length() + " " + cigar,e);
}
index += length; break;
case 'D' :
index += length; break;
case 'I' :
String insert = cigar.substring(pm.end(),pm.end() + length).toUpperCase();
sb.append(insert); break;
case 'V' :
sb.append(transversionV(reference.charAt(index))); index++; break;
case 'W' :
sb.append(transversionW(reference.charAt(index))); index++; break;
case 'T' :
sb.append(transition(reference.charAt(index))); index++; break;
default:
throw new UnsupportedOperationException("cigar operator " + operator + " not supported.");
}
}
if (global && index != reference.length()) {
throw new RuntimeException(" haplotype cigar does not explain reference length (" + index + " != " + reference.length() + ") on cigar " + cigar);
} else if (index > reference.length()) {
throw new RuntimeException(" index beyond end ");
}
return sb.toString();
}
protected int kmerSize() {
return kmerSize;
}
private char transversionV(final char c) {
switch (Character.toUpperCase(c)) {
case 'A': return 'C';
case 'G': return 'T';
case 'C': return 'A';
case 'T': return 'G';
default:
return c;
}
}
private char transversionW(final char c) {
switch (Character.toUpperCase(c)) {
case 'A': return 'T';
case 'G': return 'C';
case 'T': return 'A';
case 'C': return 'G';
default:
return c;
}
}
private char transition(final char c) {
switch (Character.toUpperCase(c)) {
case 'A': return 'G';
case 'G': return 'A';
case 'T': return 'C';
case 'C': return 'T';
default:
return c;
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeResolver.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import org.broadinstitute.gatk.utils.commandline.Argument;
import org.broadinstitute.gatk.utils.commandline.Input;
import org.broadinstitute.gatk.utils.commandline.Output;
import org.broadinstitute.gatk.utils.commandline.RodBinding;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.engine.walkers.Reference;
import org.broadinstitute.gatk.engine.walkers.RodWalker;
import org.broadinstitute.gatk.engine.walkers.Window;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.smithwaterman.SWPairwiseAlignment;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import htsjdk.variant.vcf.VCFHeader;
import htsjdk.variant.vcf.VCFHeaderLine;
import htsjdk.variant.vcf.VCFHeaderLineType;
import htsjdk.variant.vcf.VCFInfoHeaderLine;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import htsjdk.variant.variantcontext.VariantContextUtils;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.variantcontext.writer.SortingVariantContextWriter;
import java.util.*;
/**
* Haplotype-based resolution of variants in separate callsets.
*
* <p>
* HaplotypeResolver is a tool that takes two VCF files and constructs haplotypes based on the variants inside them.
* From that, it can resolve potential differences in variant calls that are inherently the same (or similar) variants.
* Records are annotated with the set and status attributes.
* </p>
*
* <h3>Input</h3>
* <p>
* Two variant files to resolve.
* </p>
*
* <h3>Output</h3>
* <p>
* A single consensus VCF.
* </p>
*
* <h3>Usage example</h3>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T HaplotypeResolver \
* -R reference.fasta \
* -V:v1 input1.vcf \
* -V:v2 input2.vcf \
* -o output.vcf
* </pre>
*
*/
@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
@Reference(window=@Window(start=-HaplotypeResolver.ACTIVE_WINDOW,stop= HaplotypeResolver.ACTIVE_WINDOW))
public class HaplotypeResolver extends RodWalker<Integer, Integer> {
protected static final String INTERSECTION_SET = "intersection";
protected static final String SAME_STATUS = "same";
protected static final String SOME_ALLELES_MATCH_STATUS = "someAllelesMatch";
protected static final String SAME_START_DIFFERENT_ALLELES_STATUS = "sameStartDifferentAlleles";
protected static final String SAME_BY_HAPLOTYPE_STATUS = "sameByHaplotype";
protected static final String ONE_ALLELE_SUBSET_OF_OTHER_STATUS = "OneAlleleSubsetOfOther";
protected static final String OVERLAPPING_EVENTS_STATUS = "overlappingEvents";
protected final static int MAX_DISTANCE_BETWEEN_MERGED_RECORDS = 50;
protected final static int MAX_HAPLOTYPE_TO_CONSIDER = 1000;
protected final static int MAX_VARIANT_SIZE_TO_CONSIDER = 100;
protected final static int ACTIVE_WINDOW = MAX_HAPLOTYPE_TO_CONSIDER + MAX_VARIANT_SIZE_TO_CONSIDER;
@Input(fullName="variant", shortName = "V", doc="Input VCF file", required=true)
public List<RodBinding<VariantContext>> variants;
@Output(doc="File to which variants should be written")
protected VariantContextWriter baseWriter = null;
private VariantContextWriter writer;
/**
* Set to 'null' if you don't want the set field emitted.
*/
@Argument(fullName="setKey", shortName="setKey", doc="Key used in the INFO key=value tag emitted describing which set the combined VCF record came from", required=false)
protected String SET_KEY = "set";
/**
* Set to 'null' if you don't want the status field emitted.
*/
@Argument(fullName="statusKey", shortName="statusKey", doc="Key used in the INFO key=value tag emitted describing the extent to which records match", required=false)
protected String STATUS_KEY = "status";
private final LinkedList<VCcontext> queue = new LinkedList<VCcontext>();
private String source1, source2;
private final List<VariantContext> sourceVCs1 = new ArrayList<VariantContext>();
private final List<VariantContext> sourceVCs2 = new ArrayList<VariantContext>();
private class VCcontext {
public final Collection<VariantContext> vcs;
public final GenomeLoc loc;
public final ReferenceContext ref;
public VCcontext(final Collection<VariantContext> vcs, final ReferenceContext ref) {
this.vcs = vcs;
this.loc = getToolkit().getGenomeLocParser().createGenomeLoc(vcs.iterator().next());
this.ref = ref;
}
}
public void initialize() {
if ( variants.size() != 2 ) {
throw new UserException.BadArgumentValue("variant", "this tool requires exactly 2 input variant files");
}
source1 = variants.get(0).getName();
source2 = variants.get(1).getName();
if ( SET_KEY.toLowerCase().equals("null") )
SET_KEY = null;
if ( STATUS_KEY.toLowerCase().equals("null") )
STATUS_KEY = null;
// for now, INFO and FORMAT fields are not propagated to the output VCF (so they aren't put into the header)
Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>();
if ( SET_KEY != null )
headerLines.add(new VCFInfoHeaderLine(SET_KEY, 1, VCFHeaderLineType.String, "Source VCF for the merged record"));
if ( STATUS_KEY != null )
headerLines.add(new VCFInfoHeaderLine(STATUS_KEY, 1, VCFHeaderLineType.String, "Extent to which records match"));
final VCFHeader vcfHeader = new VCFHeader(headerLines, Collections.<String>emptySet());
baseWriter.writeHeader(vcfHeader);
writer = new SortingVariantContextWriter(baseWriter, ACTIVE_WINDOW);
}
public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if ( tracker == null )
return 0;
final Collection<VariantContext> VCs = tracker.getValues(variants, context.getLocation());
if ( VCs.size() == 0 )
return 0;
final VCcontext vc = new VCcontext(VariantContextUtils.sitesOnlyVariantContexts(VCs), ref);
// TODO -- what should we do about filtered records?
if ( !queue.isEmpty() ) {
final VCcontext previous = queue.getLast();
if ( !previous.loc.onSameContig(vc.loc) ||
previous.loc.distance(vc.loc) > MAX_DISTANCE_BETWEEN_MERGED_RECORDS ||
queue.getFirst().loc.distance(vc.loc) > MAX_HAPLOTYPE_TO_CONSIDER ) {
purgeQueue();
}
}
queue.addLast(vc);
return 0;
}
public Integer reduceInit() { return 0; }
public Integer reduce(Integer value, Integer sum) {
return sum + value;
}
public void onTraversalDone(Integer result) {
if ( !queue.isEmpty() )
purgeQueue();
writer.close();
}
private void purgeQueue() {
final ReferenceContext refContext = queue.getFirst().ref;
// divide them up by source
while ( !queue.isEmpty() ) {
VCcontext context = queue.removeFirst();
for ( final VariantContext vc: context.vcs ) {
if ( vc.getSource().equals(source1) )
sourceVCs1.add(vc);
else
sourceVCs2.add(vc);
}
}
writeAndPurgeAllEqualVariants(sourceVCs1, sourceVCs2, SAME_STATUS);
if ( sourceVCs1.isEmpty() ) {
writeAll(sourceVCs2, source2, null);
} else if ( sourceVCs2.isEmpty() ) {
writeAll(sourceVCs1, source1, null);
} else {
resolveByHaplotype(refContext);
}
// allow for GC of the data
sourceVCs1.clear();
sourceVCs2.clear();
}
private void writeAll(final List<VariantContext> sourceVCs, final String set, final String status) {
for ( final VariantContext vc : sourceVCs ) {
writeOne(vc, set, status);
}
}
private void writeOne(final VariantContext vc, final String set, final String status) {
final Map<String, Object> attrs = new HashMap<>();
if ( SET_KEY != null && set != null )
attrs.put(SET_KEY, set);
if ( STATUS_KEY != null && status != null )
attrs.put(STATUS_KEY, status);
writer.add(new VariantContextBuilder(vc).attributes(attrs).make());
}
private void writeAndPurgeAllEqualVariants(final List<VariantContext> sourceVCs1, final List<VariantContext> sourceVCs2, final String status) {
int currentIndex1 = 0, currentIndex2 = 0;
int size1 = sourceVCs1.size(), size2 = sourceVCs2.size();
VariantContext current1 = (currentIndex1 < size1 ? sourceVCs1.get(currentIndex1): null);
VariantContext current2 = (currentIndex2 < size2 ? sourceVCs2.get(currentIndex2): null);
while ( current1 != null && current2 != null ) {
final GenomeLoc loc1 = getToolkit().getGenomeLocParser().createGenomeLoc(current1);
final GenomeLoc loc2 = getToolkit().getGenomeLocParser().createGenomeLoc(current2);
if ( loc1.equals(loc2) ||
(loc1.getStart() == loc2.getStart() && (current1.getAlternateAlleles().size() > 1 || current2.getAlternateAlleles().size() > 1)) ) {
// test the alleles
if ( determineAndWriteOverlap(current1, current2, status) ) {
sourceVCs1.remove(currentIndex1);
sourceVCs2.remove(currentIndex2);
size1--;
size2--;
} else {
currentIndex1++;
currentIndex2++;
}
current1 = (currentIndex1 < size1 ? sourceVCs1.get(currentIndex1): null);
current2 = (currentIndex2 < size2 ? sourceVCs2.get(currentIndex2): null);
} else if ( loc1.isBefore(loc2) ) {
currentIndex1++;
current1 = (currentIndex1 < size1 ? sourceVCs1.get(currentIndex1): null);
} else {
currentIndex2++;
current2 = (currentIndex2 < size2 ? sourceVCs2.get(currentIndex2): null);
}
}
}
private boolean determineAndWriteOverlap(final VariantContext vc1, final VariantContext vc2, final String status) {
final int allelesFrom1In2 = findOverlap(vc1, vc2);
final int allelesFrom2In1 = findOverlap(vc2, vc1);
final int totalAllelesIn1 = vc1.getAlternateAlleles().size();
final int totalAllelesIn2 = vc2.getAlternateAlleles().size();
final boolean allAllelesFrom1Overlap = allelesFrom1In2 == totalAllelesIn1;
final boolean allAllelesFrom2Overlap = allelesFrom2In1 == totalAllelesIn2;
boolean thereIsOverlap = true;
if ( allAllelesFrom1Overlap && allAllelesFrom2Overlap ) {
writeOne(vc1, INTERSECTION_SET, status);
} else if ( allAllelesFrom1Overlap ) {
writeOne(vc2, INTERSECTION_SET, source1 + "IsSubsetOf" + source2);
} else if ( allAllelesFrom2Overlap ) {
writeOne(vc1, INTERSECTION_SET, source2 + "IsSubsetOf" + source1);
} else if ( allelesFrom1In2 > 0 ) {
writeOne(vc1, INTERSECTION_SET, SOME_ALLELES_MATCH_STATUS);
} else if ( totalAllelesIn1 > 1 || totalAllelesIn2 > 1 ) { // we don't handle multi-allelics in the haplotype-based reconstruction
writeOne(vc1, INTERSECTION_SET, SAME_START_DIFFERENT_ALLELES_STATUS);
} else {
thereIsOverlap = false;
}
return thereIsOverlap;
}
private static int findOverlap(final VariantContext target, final VariantContext comparison) {
int overlap = 0;
for ( final Allele allele : target.getAlternateAlleles() ) {
if ( comparison.hasAlternateAllele(allele) )
overlap++;
}
return overlap;
}
private static final int SW_MATCH = 40;
private static final int SW_MISMATCH = -100;
private static final int SW_GAP = -250;
private static final int SW_GAP_EXTEND = -13;
private void resolveByHaplotype(final ReferenceContext refContext) {
final byte[] source1Haplotype = generateHaplotype(sourceVCs1, refContext);
final byte[] source2Haplotype = generateHaplotype(sourceVCs2, refContext);
final SWPairwiseAlignment swConsensus1 = new SWPairwiseAlignment( refContext.getBases(), source1Haplotype, SW_MATCH, SW_MISMATCH, SW_GAP, SW_GAP_EXTEND );
final SWPairwiseAlignment swConsensus2 = new SWPairwiseAlignment( refContext.getBases(), source2Haplotype, SW_MATCH, SW_MISMATCH, SW_GAP, SW_GAP_EXTEND );
// protect against SW failures
if( swConsensus1.getCigar().toString().contains("S") || swConsensus1.getCigar().getReferenceLength() < 20 ||
swConsensus2.getCigar().toString().contains("S") || swConsensus2.getCigar().getReferenceLength() < 20 ) {
// TODO -- handle errors appropriately
logger.debug("Bad SW alignment; aborting at " + refContext.getLocus());
return;
}
// order results by start position
final TreeMap<Integer, VariantContext> source1Map = new TreeMap<Integer, VariantContext>(HaplotypeCallerGenotypingEngine.generateVCsFromAlignment(new Haplotype(source1Haplotype, false, 0, swConsensus1.getCigar()), refContext.getBases(), refContext.getWindow(), source1));
final TreeMap<Integer, VariantContext> source2Map = new TreeMap<Integer, VariantContext>(HaplotypeCallerGenotypingEngine.generateVCsFromAlignment(new Haplotype(source2Haplotype, false, 0, swConsensus2.getCigar()), refContext.getBases(), refContext.getWindow(), source2));
if ( source1Map.size() == 0 || source2Map.size() == 0 ) {
// TODO -- handle errors appropriately
logger.debug("No source alleles; aborting at " + refContext.getLocus());
return;
}
// create lists and test for equality
final List<VariantContext> source1Alleles = new ArrayList<VariantContext>(source1Map.values());
final List<VariantContext> source2Alleles = new ArrayList<VariantContext>(source2Map.values());
writeAndPurgeAllEqualVariants(source1Alleles, source2Alleles, SAME_BY_HAPLOTYPE_STATUS);
if ( source1Alleles.isEmpty() ) {
writeAll(source2Alleles, source2, null);
} else if ( source2Alleles.isEmpty() ) {
writeAll(source1Alleles, source1, null);
} else {
writeDifferences(source1Alleles, source2Alleles);
}
}
private byte[] generateHaplotype(final List<VariantContext> sourceVCs, final ReferenceContext refContext) {
final StringBuilder sb = new StringBuilder();
final int startPos = refContext.getWindow().getStart();
int currentPos = startPos;
final byte[] reference = refContext.getBases();
for ( final VariantContext vc : sourceVCs ) {
// add any missing reference context
int vcStart = vc.getStart();
final int refAlleleLength = vc.getReference().length();
if ( refAlleleLength == vc.getEnd() - vc.getStart() ) // this is a deletion (whereas for other events the padding base isn't part of the position)
vcStart++;
while ( currentPos < vcStart )
sb.append((char)reference[currentPos++ - startPos]);
// add the alt allele
sb.append(vc.getAlternateAllele(0).getBaseString());
// skip the reference allele
currentPos += refAlleleLength;
}
// add any missing reference context
final int stopPos = refContext.getWindow().getStop();
while ( currentPos < stopPos )
sb.append((char)reference[currentPos++ - startPos]);
return sb.toString().getBytes();
}
private void writeDifferences(final List<VariantContext> source1Alleles, final List<VariantContext> source2Alleles) {
int currentIndex1 = 0, currentIndex2 = 0;
final int size1 = source1Alleles.size(), size2 = source2Alleles.size();
VariantContext current1 = source1Alleles.get(0);
VariantContext current2 = source2Alleles.get(0);
while ( currentIndex1 < size1 || currentIndex2 < size2 ) {
if ( current1 == null ) {
writeOne(current2, source2, null);
currentIndex2++;
current2 = (currentIndex2 < size2 ? source2Alleles.get(currentIndex2): null);
} else if ( current2 == null ) {
writeOne(current1, source1, null);
currentIndex1++;
current1 = (currentIndex1 < size1 ? source1Alleles.get(currentIndex1): null);
} else {
final GenomeLoc loc1 = getToolkit().getGenomeLocParser().createGenomeLoc(current1);
final GenomeLoc loc2 = getToolkit().getGenomeLocParser().createGenomeLoc(current2);
if ( loc1.getStart() == loc2.getStart() || loc1.overlapsP(loc2) ) {
String status;
if ( loc1.getStart() == loc2.getStart() ) {
final String allele1 = current1.getAlternateAllele(0).getBaseString();
final String allele2 = current2.getAlternateAllele(0).getBaseString();
if ( allele1.indexOf(allele2) != -1 || allele2.indexOf(allele1) != -1 )
status = ONE_ALLELE_SUBSET_OF_OTHER_STATUS;
else
status = SAME_START_DIFFERENT_ALLELES_STATUS;
} else {
status = OVERLAPPING_EVENTS_STATUS;
}
writeOne(current1, INTERSECTION_SET, status);
currentIndex1++;
currentIndex2++;
current1 = (currentIndex1 < size1 ? source1Alleles.get(currentIndex1): null);
current2 = (currentIndex2 < size2 ? source2Alleles.get(currentIndex2): null);
} else if ( loc1.isBefore(loc2) ) {
writeOne(current1, source1, null);
currentIndex1++;
current1 = (currentIndex1 < size1 ? source1Alleles.get(currentIndex1): null);
} else {
writeOne(current2, source2, null);
currentIndex2++;
current2 = (currentIndex2 < size2 ? source2Alleles.get(currentIndex2): null);
}
}
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2/M2_Contamination_Evaluation.md
# M2 Contamination Correction Evaluation
In order to evaluate the efficacy of the contamination correction in M2 (adapted from HaplotypeCaller), we created synthetic data consisting of the four intra-run CRSP NA12878 replicates, each contaminated with 1-5% of the HCC-1143 normal.
### Creating Artificial Contamination Data
``` QUEUE_JAR=<your-queue-jar>
GSA_UNSTABLE_HOME=<path-to-your-gsa-unstable-checkout>
BASELINE_BAM=/crsp/picard_aggregation/000007820918/SM-612V3/current/SM-612V3.bam
CONTAMINANT_BAM=/seq/tier3b/picard_aggregation/C970/HCC1143_BL/v1/HCC1143_BL.bam
```
```
java -jar $QUEUE_JAR \
-S $GSA_UNSTABLE_HOME/private/gatk-queue-extensions-internal/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/dev/CreateSyntheticContaminationScript.scala \
-b2 CONTAMINANT_BAM -b1 BASELINE_BAM \
-f 0.01 -f 0.02 -f 0.03 -f 0.04 -f 0.05
```
Repeat for the other three NA12878 replicates.
BASELINE_BAM=/crsp/picard_aggregation/000007820818/SM-612V4/current/SM-612V4.bam
BASELINE_BAM=/crsp/picard_aggregation/000007820718/SM-612V5/current/SM-612V5.bam
BASELINE_BAM=/crsp/picard_aggregation/000007820618/SM-612V6/current/SM-612V6.bam
Use ContEst to get the contamination estimate in the data to be passed into M2. (Note that for these data, the ContEst estimate is on the order of 1% higher than the value used to generate the contaminated data.)
TEMPDIR=/broad/hptmp/$USER
BAM1=HCC1143_BL.small.0.04.contaminated.with.SM-612V3.small.0.96.bam
BAM2=/crsp/picard_aggregation/000007820818/SM-612V4/current/SM-612V4.bam
OUT_TXT=ContEst_0.04HCC1143inNA12878.txt
java -Djava.io.tmpdir=$TEMPDIR \
-Xmx512m -jar /xchip/tcga/gdac_prod/applications/process_mgmt/firehose_task_registry/cga/ContaminationAnalysis/broadinstitute.org/cancer.genome.analysis/00262/107//Queue-1.4-437-g6b8a9e1-svn-35362.jar \
-S /xchip/tcga/gdac_prod/applications/process_mgmt/firehose_task_registry/cga/ContaminationAnalysis/broadinstitute.org/cancer.genome.analysis/00262/107//ContaminationPipeline.scala \
-reference /seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta -interval /xchip/cga/reference/hg19/whole_exome_agilent_1.1_refseq_plus_3_boosters_plus_10bp_padding_minus_mito.Homo_sapiens_assembly19.targets.interval_list \
-out $OUT_TXT \
-bam $BAM1 -nbam $BAM2 \
-array /xchip/gcc_data/results2/production/TEENY/BySample/hg19.vcf.txt/TEENY_p_TCGA_302_303_N_GenomeWideSNP_6_F04_1344608.hg19.vcf.txt.store/verstore.00000.TEENY_p_TCGA_302_303_N_GenomeWideSNP_6_F04_1344608.hg19.vcf.txt \
-pop /xchip/cga/reference/hg19/hg19_population_stratified_af_hapmap_3.3.fixed.vcf -faf true -run -array_interval /xchip/cga/reference/hg19/SNP6.hg19.interval_list
BAM1=HCC1143_BL.small.0.03.contaminated.with.SM-612V3.small.0.97.bam
BAM2=/crsp/picard_aggregation/000007820818/SM-612V4/current/SM-612V4.bam
OUT_TXT=ContEst_0.03HCC1143inNA12878.txt
BAM1=HCC1143_BL.small.0.02.contaminated.with.SM-612V3.small.0.98.bam
BAM2=/crsp/picard_aggregation/000007820818/SM-612V4/current/SM-612V4.bam
OUT_TXT=ContEst_0.02HCC1143inNA12878.txt
BAM1=HCC1143_BL.small.0.01.contaminated.with.SM-612V3.small.0.99.bam
BAM2=/crsp/picard_aggregation/000007820818/SM-612V4/current/SM-612V4.bam
OUT_TXT=ContEst_0.01HCC1143inNA12878.txt
And so on for the other replicates.
ContEst estimates for the four replicates at the five contamination levels are as follows:
|Sample|Input Contamination Level|ContEst Estimate|
|------|-------------------------|----------------|
|SM-612V3|0.01|0.016|
|SM-612V3|0.02|0.030|
|SM-612V3|0.03|0.042|
|SM-612V3|0.04|0.055|
|SM-612V3|0.05|0.067|
|SM-612V4|0.01|0.015|
|SM-612V4|0.02|0.028|
|SM-612V4|0.03|0.039|
|SM-612V4|0.04|0.051|
|SM-612V4|0.05|0.063|
|SM-612V5|0.01|0.016|
|SM-612V5|0.02|0.030|
|SM-612V5|0.03|0.042|
|SM-612V5|0.04|0.054|
|SM-612V5|0.05|0.066|
|SM-612V6|0.01|0.015|
|SM-612V6|0.02|0.027|
|SM-612V6|0.03|0.040|
|SM-612V6|0.04|0.051|
|SM-612V6|0.05|0.062|
###Prepare the inputs for the normal-normal calling script
Create a list of all contamination levels for each replicate
ls -1 HCC*contam*V3*.bam > HCC1143withNA12878_3.bams.list
ls -1 HCC*contam*V4*.bam > HCC1143withNA12878_4.bams.list
ls -1 HCC*contam*V5*.bam > HCC1143withNA12878_5.bams.list
ls -1 HCC*contam*V6*.bam > HCC1143withNA12878_6.bams.list
Create a list of the other, uncontaminated normals to call against
ls -1 /humgen/gsa-hpprojects/NA12878Collection/bams/crsp_ice_validation/SM-612V[^37D].bam > /dsde/working/mutect/laura/contamination/NA12878_not3.list
ls -1 /humgen/gsa-hpprojects/NA12878Collection/bams/crsp_ice_validation/SM-612V[^47D].bam > /dsde/working/mutect/laura/contamination/NA12878_not4.list
ls -1 /humgen/gsa-hpprojects/NA12878Collection/bams/crsp_ice_validation/SM-612V[^57D].bam > /dsde/working/mutect/laura/contamination/NA12878_not5.list
ls -1 /humgen/gsa-hpprojects/NA12878Collection/bams/crsp_ice_validation/SM-612V[^67D].bam > /dsde/working/mutect/laura/contamination/NA12878_not6.list
###Run the Caller
Run M2 on contaminated bams versus other all other replicates. Run one loop for each contaminated replicate, passing in contamination estimates as given above
java -jar $QUEUE_JAR -S /dsde/working/mutect/laura/contamination/Qscript_M2_normalNormalLoop.scala -normal /dsde/working/mutect/laura/contamination/NA12878_not3.list -tumor /dsde/working/mutect/laura/contamination/HCC1143withNA12878_3.bams.list -o M2_NA12878run3_ -f 0.016 -f 0.03 -f 0.042 -f 0.055 -f 0.067
java -jar $QUEUE_JAR -S /dsde/working/mutect/laura/contamination/Qscript_M2_normalNormalLoop.scala -normal /dsde/working/mutect/laura/contamination/NA12878_not4.list -tumor /dsde/working/mutect/laura/contamination/HCC1143withNA12878_4.bams.list -o M2_NA12878run4_ -f 0.015 -f 0.028 -f 0.039 -f 0.051 -f 0.063
java -jar $QUEUE_JAR -S /dsde/working/mutect/laura/contamination/Qscript_M2_normalNormalLoop.scala -normal /dsde/working/mutect/laura/contamination/NA12878_not5.list -tumor /dsde/working/mutect/laura/contamination/HCC1143withNA12878_5.bams.list -o M2_NA12878run5_ -f 0.016 -f 0.030 -f 0.042 -f 0.054 -f 0.066
java -jar $QUEUE_JAR -S /dsde/working/mutect/laura/contamination/Qscript_M2_normalNormalLoop.scala -normal /dsde/working/mutect/laura/contamination/NA12878_not6.list -tumor /dsde/working/mutect/laura/contamination/HCC1143withNA12878_6.bams.list -o M2_NA12878run6_ -f 0.015 -f 0.027 -f 0.040 -f 0.051 -f 0.062
###Count the False Positives
Pull out passing SNPs not in PON for each contamination level:
for vcf in M2_NA12878run[0-9]_HCC1143_BL.small.0.01.contaminated.with.SM-612V*.bam.vcf
do
bedtools intersect -a $vcf -b ICE.corrected.bed | grep PASS | awk '{ if ( length($4) + length($5) == 2) print $0 }' | wc -l
done
for vcf in M2_NA12878run[0-9]_HCC1143_BL.small.0.02.contaminated.with.SM-612V*.bam.vcf
do
bedtools intersect -a $vcf -b ICE.corrected.bed | grep PASS | awk '{ if ( length($4) + length($5) == 2) print $0 }' | wc -l
done
for vcf in M2_NA12878run[0-9]_HCC1143_BL.small.0.03.contaminated.with.SM-612V*.bam.vcf
do
bedtools intersect -a $vcf -b ICE.corrected.bed | grep PASS | awk '{ if ( length($4) + length($5) == 2) print $0 }' | wc -l
done
for vcf in M2_NA12878run[0-9]_HCC1143_BL.small.0.04.contaminated.with.SM-612V*.bam.vcf
do
bedtools intersect -a $vcf -b ICE.corrected.bed | grep PASS | awk '{ if ( length($4) + length($5) == 2) print $0 }' | wc -l
done
for vcf in M2_NA12878run[0-9]_HCC1143_BL.small.0.05.contaminated.with.SM-612V*.bam.vcf
do
bedtools intersect -a $vcf -b ICE.corrected.bed | grep PASS | awk '{ if ( length($4) + length($5) == 2) print $0 }' | wc -l
done
(I pasted the results from the terminal into Excel because it's just so easy.)
###Comparison Without Downsampling
To run normal-normal contaminated calling without downsampling, the above /dsde/working/mutect/laura/contamination/Qscript_M2_normalNormalLoop.scala commands can be used, passing in -f 0 for each contamination level instead, e.g.:
java -jar $QUEUE_JAR -S /dsde/working/mutect/laura/contamination/Qscript_M2_normalNormalLoop.scala -normal /dsde/working/mutect/laura/contamination/NA12878_not3.list -tumor /dsde/working/mutect/laura/contamination/HCC1143withNA12878_3.bams.list -o M2_NA12878run3_noContam_ -f 0.0 -f 0.0 -f 0.0 -f 0.0 -f 0.0
###Comparison to M1
To run normal-normal contaminated calling using M1, run the above Queue commands using a Queue jar containing MuTect and passing in /dsde/working/mutect/laura/contamination/Qscript_M1_normalNormalLoop.scala instead of Qscript_M2_normalNormalLoop.scala, e.g.:
java -jar $QUEUE_JAR_WITH_M1 -S /dsde/working/mutect/laura/contamination/Qscript_M1_normalNormalLoop.scala -normal /dsde/working/mutect/laura/contamination/NA12878_not3.list -tumor /dsde/working/mutect/laura/contamination/HCC1143withNA12878_3.bams.list -o M1_NA12878run3_ -f 0.016 -f 0.03 -f 0.042 -f 0.055 -f 0.067
(The MuTect-containing Queue jar can be built from the gsa-unstable branch ldg_MuTect1.)
###Latest Results
|M2 SNPs no correction|M2 SNPs with correction|M1 SNPs no correction|M1 SNPs with correction|M2 INDELs no correction|M2 INDELs with correction|
|---------------------|-----------------------|---------------------|-----------------------|-----------------------|-------------------------|
|0%|93|93|181|181|25|25|
|1%|938|258|854|317|68|30|
|2%|2550|464|1941|385|92|21|
|3%|4171|596|3061|515|134|18|
|4%|5513|707|4002|589|162|21|
|5%|6475|794|4854|624|188|29|<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2/NA12878_Evaluations.md
# CRSP ICE NA12878 Specificity Evaluation
In order to evaluate the specificity of M2, we sequenced replicates of NA12878 using ICE (Illumina Content Exomes) all call all pairwise combinations as tumor-normals. By definition, everything called is a false positive.
The target territory is ```/dsde/working/mutect/crsp_nn/whole_exome_illumina_coding_v1.Homo_sapiens_assembly19.targets.no_empty.interval_list```
All scripts referenced here are relative to the current working directory of ```
/dsde/working/mutect/crsp_nn```
### Current M2 Performance
(gsa-unstable 7/13/15, commit:9e93a70)
| type | # of false positives |
|------|----------------------|
|SNP|99|
|INDEL|15|
TODO: write a simple tool to do this more easily
To calculate per pair-counts, run:
```
# for SNPs
for vcf in *.vcf
do
cat $vcf | grep PASS | awk '{ if ( length($4) + length($5) == 2) print $0 }' | wc -l
done > snp-fps.txt
cat snp-fps.txt | awk '{ sum += $1 } END { print sum }'
# for INDELs
for vcf in *.vcf
do
cat $vcf | grep PASS | awk '{ if ( length($4) + length($5) != 2) print $0 }' | wc -l
done > indel-fps.txt
cat indel-fps.txt | awk '{ sum += $1 } END { print sum }'
```
### Current M1 and Indelocator Performance
For comparison, the M1 & Indelocator calls have been made on this same data set in the Firehose workspace ```CRSP_ICE_NA1878_Production_Analysis``` in the pair set ```NA12878_Replicate_Pairs``` which contains 4 samples and 12 pairwise combinations.
| type | # of false positives |
|------|----------------------|
|SNP|181|
|INDEL|106|
These results can be obtained (from a LSF / CGA node running the FuSE daemon)
```
SNP:
cat /local/cga-fh/cga/CRSP_ICE_NA1878_Production_Analysis/Pair_Set/NA12878_Replicate_Pairs/Pair/*/jobs/capture/mut/calls/latest/*.call_stats.txt | grep KEEP | wc -l
INDEL (need to restrict to target territory):
reuse BEDTools
cat /dsde/working/mutect/crsp_nn/whole_exome_illumina_coding_v1.Homo_sapiens_assembly19.targets.no_empty.interval_list | grep -v "@" | awk '{ print $1 "\t" $2-1 "\t" $3 }' > ice.bed
cat /local/cga-fh/cga/CRSP_ICE_NA1878_Production_Analysis/Pair_Set/NA12878_Replicate_Pairs/Pair/*/jobs/capture/indel/maflite/latest/*.full.maf | grep KEEP | cut -f2-4 | awk '{ print $1 "\t" $2-1 "\t" $3 }' > /tmp/indels.bed
bedtools intersect -wa -a /tmp/ice.bed -b /tmp/indels.bed | wc -l
```
### How To Run
The SCALA script for running M2 can be found in the gsa-unstable repository under ```private/gatk-tools-private/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2```
First, chose the appropriate settings (runnable as environment variables here)
```
QUEUE_JAR=<your-queue-jar>
OUT_VCF=<your-output-vcf>
GSA_UNSTABLE_HOME=<path-to-your-gsa-unstable-checkout>
TEMPDIR=/broad/hptmp/$USER
```
and then run the following Queue command
```
java \
-Djava.io.tmpdir=$TEMPDIR \
-jar $QUEUE_JAR \
-S $GSA_UNSTABLE_HOME/private/gatk-queue-extensions-internal/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/m2/run_M2_ICE_NN.scala \
-sc 50 \
--job_queue gsa -qsub -jobResReq virtual_free=5G -startFromScratch \
--allbams /humgen/gsa-hpprojects/NA12878Collection/bams/crsp_ice_validation//NA12878.intra.flowcell.replicate.bam_list \
-o <your-output-prefix>
-run
```
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/bqsr/ReadRecalibrationInfo.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.bqsr;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import org.broadinstitute.gatk.utils.QualityUtils;
import org.broadinstitute.gatk.utils.recalibration.EventType;
import org.broadinstitute.gatk.engine.recalibration.ReadCovariates;
import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
/**
* Created with IntelliJ IDEA.
* User: depristo
* Date: 12/18/12
* Time: 3:50 PM
*
* TODO -- merge in ReadCovariates?
*/
public final class ReadRecalibrationInfo {
private final GATKSAMRecord read;
private final int length;
private final ReadCovariates covariates;
private final boolean[] skips;
private final byte[] baseQuals, insertionQuals, deletionQuals;
private final double[] snpErrors, insertionErrors, deletionErrors;
public ReadRecalibrationInfo(final GATKSAMRecord read,
final ReadCovariates covariates,
final boolean[] skips,
final double[] snpErrors,
final double[] insertionErrors,
final double[] deletionErrors) {
if ( read == null ) throw new IllegalArgumentException("read cannot be null");
if ( covariates == null ) throw new IllegalArgumentException("covariates cannot be null");
if ( skips == null ) throw new IllegalArgumentException("skips cannot be null");
if ( snpErrors == null ) throw new IllegalArgumentException("snpErrors cannot be null");
if ( insertionErrors == null ) throw new IllegalArgumentException("insertionErrors cannot be null");
if ( deletionErrors == null ) throw new IllegalArgumentException("deletionErrors cannot be null");
this.read = read;
this.baseQuals = read.getBaseQualities();
this.length = baseQuals.length;
this.covariates = covariates;
this.skips = skips;
this.insertionQuals = read.getExistingBaseInsertionQualities();
this.deletionQuals = read.getExistingBaseDeletionQualities();
this.snpErrors = snpErrors;
this.insertionErrors = insertionErrors;
this.deletionErrors = deletionErrors;
if ( skips.length != length ) throw new IllegalArgumentException("skips.length " + snpErrors.length + " != length " + length);
if ( snpErrors.length != length ) throw new IllegalArgumentException("snpErrors.length " + snpErrors.length + " != length " + length);
if ( insertionErrors.length != length ) throw new IllegalArgumentException("insertionErrors.length " + snpErrors.length + " != length " + length);
if ( deletionErrors.length != length ) throw new IllegalArgumentException("deletionErrors.length " + snpErrors.length + " != length " + length);
}
/**
* Get the qual score for event type at offset
*
* @param eventType the type of event we want the qual for
* @param offset the offset into this read for the qual
* @return a valid quality score for event at offset
*/
@Requires("validOffset(offset)")
@Ensures("validQual(result)")
public byte getQual(final EventType eventType, final int offset) {
switch ( eventType ) {
case BASE_SUBSTITUTION: return baseQuals[offset];
// note optimization here -- if we don't have ins/del quals we just return the default byte directly
case BASE_INSERTION: return insertionQuals == null ? GATKSAMRecord.DEFAULT_INSERTION_DELETION_QUAL : insertionQuals[offset];
case BASE_DELETION: return deletionQuals == null ? GATKSAMRecord.DEFAULT_INSERTION_DELETION_QUAL : deletionQuals[offset];
default: throw new IllegalStateException("Unknown event type " + eventType);
}
}
/**
* Get the error fraction for event type at offset
*
* The error fraction is a value between 0 and 1 that indicates how much certainty we have
* in the error occurring at offset. A value of 1 means that the error definitely occurs at this
* site, a value of 0.0 means it definitely doesn't happen here. 0.5 means that half the weight
* of the error belongs here
*
* @param eventType the type of event we want the qual for
* @param offset the offset into this read for the qual
* @return a fractional weight for an error at this offset
*/
@Requires("validOffset(offset)")
@Ensures("result >= 0.0 && result <= 1.0")
public double getErrorFraction(final EventType eventType, final int offset) {
switch ( eventType ) {
case BASE_SUBSTITUTION: return snpErrors[offset];
case BASE_INSERTION: return insertionErrors[offset];
case BASE_DELETION: return deletionErrors[offset];
default: throw new IllegalStateException("Unknown event type " + eventType);
}
}
/**
* Get the read involved in this recalibration info
* @return a non-null GATKSAMRecord
*/
@Ensures("result != null")
public GATKSAMRecord getRead() {
return read;
}
/**
* Should offset in this read be skipped (because it's covered by a known variation site?)
* @param offset a valid offset into this info
* @return true if offset should be skipped, false otherwise
*/
@Requires("validOffset(offset)")
public boolean skip(final int offset) {
return skips[offset];
}
/**
* Get the ReadCovariates object carrying the mapping from offsets -> covariate key sets
* @return a non-null ReadCovariates object
*/
@Ensures("result != null")
public ReadCovariates getCovariatesValues() {
return covariates;
}
/**
* Ensures an offset is valid. Used in contracts
* @param offset a proposed offset
* @return true if offset is valid w.r.t. the data in this object, false otherwise
*/
private boolean validOffset(final int offset) {
return offset >= 0 && offset < baseQuals.length;
}
private boolean validQual(final byte result) {
return result >= 0 && result <= QualityUtils.MAX_SAM_QUAL_SCORE;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/ApplyRecalibration.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantrecalibration;
import htsjdk.variant.variantcontext.Allele;
import org.broadinstitute.gatk.tools.walkers.annotator.AnnotationUtils;
import org.broadinstitute.gatk.utils.commandline.*;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.engine.walkers.PartitionBy;
import org.broadinstitute.gatk.engine.walkers.PartitionType;
import org.broadinstitute.gatk.engine.walkers.RodWalker;
import org.broadinstitute.gatk.engine.walkers.TreeReducible;
import org.broadinstitute.gatk.engine.SampleUtils;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import org.broadinstitute.gatk.engine.GATKVCFUtils;
import htsjdk.variant.vcf.*;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
import java.io.File;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Apply a score cutoff to filter variants based on a recalibration table
*
* <p>
* This tool performs the second pass in a two-stage process called VQSR; the first pass is performed by the
* <a href='https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_variantrecalibration_VariantRecalibrator.php'>VariantRecalibrator</a> tool.
* In brief, the first pass consists of creating a Gaussian mixture model by looking at the distribution of annotation
* values over a high quality subset of the input call set, and then scoring all input variants according to the model.
* The second pass consists of filtering variants based on score cutoffs identified in the first pass.
*</p>
*
* <p>
* Using the tranche file and recalibration table generated by the previous step, the ApplyRecalibration tool looks at each variant's VQSLOD value
* and decides which tranche it falls in. Variants in tranches that fall below the specified truth sensitivity filter level
* have their FILTER field annotated with the corresponding tranche level. This will result in a call set that is filtered
* to the desired level but retains the information necessary to increase sensitivity if needed.</p>
*
* <p>To be clear, please note that by "filtered", we mean that variants failing the requested tranche cutoff are <b>marked
* as filtered</b> in the output VCF; they are <b>not discarded</b>.</p>
*
* <p>VQSR is probably the hardest part of the Best Practices to get right, so be sure to read the
* <a href='https://www.broadinstitute.org/gatk/guide/article?id=39'>method documentation</a>,
* <a href='https://www.broadinstitute.org/gatk/guide/article?id=1259'>parameter recommendations</a> and
* <a href='https://www.broadinstitute.org/gatk/guide/article?id=2805'>tutorial</a> to really understand what these
* tools and how to use them for best results on your own data.</p>
*
* <h3>Input</h3>
* <ul>
* <li>The raw input variants to be filtered.</li>
* <li>The recalibration table file that was generated by the VariantRecalibrator tool.</li>
* <li>The tranches file that was generated by the VariantRecalibrator tool.</li>
* </ul>
*
* <h3>Output</h3>
* <ul>
* <li>A recalibrated VCF file in which each variant of the requested type is annotated with its VQSLOD and marked as filtered if the score is below the desired quality level.</li>
* </ul>
*
* <h3>Usage example for filtering SNPs</h3>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T ApplyRecalibration \
* -R reference.fasta \
* -input raw_variants.vcf \
* --ts_filter_level 99.0 \
* -tranchesFile output.tranches \
* -recalFile output.recal \
* -mode SNP \
* -o path/to/output.recalibrated.filtered.vcf
* </pre>
*
* <h3>Allele-specific usage</h3>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T ApplyRecalibration \
* -R reference.fasta \
* -input raw_variants.withASannotations.vcf \
* -AS \
* --ts_filter_level 99.0 \
* -tranchesFile output.AS.tranches \
* -recalFile output.AS.recal \
* -mode SNP \
* -o path/to/output.recalibrated.ASfiltered.vcf
* </pre>
* Each allele will be annotated by its corresponding entry in the AS_FilterStatus INFO field annotation. Allele-specific VQSLOD and culprit are also carried through from VariantRecalibrator and stored in the AS_VQSLOD and AS_culprit INFO fields, respectively.
* The site-level filter is set to the most lenient of any of the allele filters. That is, if one allele passes, the whole site will be PASS. If no alleles pass, the site-level filter will be set to the lowest sensitivity tranche among all the alleles.
*
* Note that the .tranches and .recal files should be derived from an allele-specific run of VariantRecalibrator
* Also note that the AS_culprit, AS_FilterStatus, and AS_VQSLOD fields will have placeholder values (NA or NaN) for alleles of a type that have not yet been processed by ApplyRecalibration
* The spanning deletion allele (*) will not be recalibrated because it represents missing data. Its VQSLOD will remain NaN and it's culprit and FilterStatus will be NA.
*
* <h3>Caveats</h3>
*
* <ul>
* <li>The tranche values used in the example above are only meant to be a general example. You should determine the level of sensitivity
* that is appropriate for your specific project. Remember that higher sensitivity (more power to detect variants, yay!) comes
* at the cost of specificity (more false negatives, boo!). You have to choose at what point you want to set the tradeoff.</li>
* <li>In order to create the tranche reporting plots (which are only generated for SNPs, not indels!) Rscript needs to be
* in your environment PATH (this is the scripting version of R, not the interactive version).</li>
* </ul>
*/
@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} )
@PartitionBy(PartitionType.LOCUS)
public class ApplyRecalibration extends RodWalker<Integer, Integer> implements TreeReducible<Integer> {
public static final String LOW_VQSLOD_FILTER_NAME = "LOW_VQSLOD";
private final double DEFAULT_VQSLOD_CUTOFF = 0.0;
boolean foundSNPTranches = false;
boolean foundINDELTranches = false;
/////////////////////////////
// Inputs
/////////////////////////////
/**
* These calls should be unfiltered and annotated with the error covariates that are intended to use for modeling.
*/
@Input(fullName="input", shortName = "input", doc="The raw input variants to be recalibrated", required=true)
public List<RodBinding<VariantContext>> input;
@Input(fullName="recal_file", shortName="recalFile", doc="The input recal file used by ApplyRecalibration", required=true)
protected RodBinding<VariantContext> recal;
@Input(fullName="tranches_file", shortName="tranchesFile", doc="The input tranches file describing where to cut the data", required=false)
protected File TRANCHES_FILE;
/////////////////////////////
// Outputs
/////////////////////////////
@Output( doc="The output filtered and recalibrated VCF file in which each variant is annotated with its VQSLOD value")
private VariantContextWriter vcfWriter = null;
/////////////////////////////
// Command Line Arguments
/////////////////////////////
@Argument(fullName="ts_filter_level", shortName="ts_filter_level", doc="The truth sensitivity level at which to start filtering", required=false)
protected Double TS_FILTER_LEVEL = null;
/**
* Filter the input file based on allele-specific recalibration data. See tool docs for site-level and allele-level filtering details.
* Requires a .recal file produced using an allele-specific run of VariantRecalibrator
*/
@Argument(fullName="useAlleleSpecificAnnotations", shortName="AS", doc="If specified, the tool will attempt to apply a filter to each allele based on the input tranches and allele-specific .recal file.", required=false)
public boolean useASannotations = false;
@Advanced
@Argument(fullName="lodCutoff", shortName="lodCutoff", doc="The VQSLOD score below which to start filtering", required=false)
protected Double VQSLOD_CUTOFF = null;
/**
* For this to work properly, the -ignoreFilter argument should also be applied to the VariantRecalibration command.
*/
@Argument(fullName="ignore_filter", shortName="ignoreFilter", doc="If specified, the recalibration will be applied to variants marked as filtered by the specified filter name in the input VCF file", required=false)
private String[] IGNORE_INPUT_FILTERS = null;
@Argument(fullName="ignore_all_filters", shortName="ignoreAllFilters", doc="If specified, the variant recalibrator will ignore all input filters. Useful to rerun the VQSR from a filtered output file.", required=false)
private boolean IGNORE_ALL_FILTERS = false;
@Argument(fullName="excludeFiltered", shortName="ef", doc="Don't output filtered loci after applying the recalibration", required=false)
protected boolean EXCLUDE_FILTERED = false;
@Argument(fullName = "mode", shortName = "mode", doc = "Recalibration mode to employ: 1.) SNP for recalibrating only SNPs (emitting indels untouched in the output VCF); 2.) INDEL for indels; and 3.) BOTH for recalibrating both SNPs and indels simultaneously.", required = false)
public VariantRecalibratorArgumentCollection.Mode MODE = VariantRecalibratorArgumentCollection.Mode.SNP;
/////////////////////////////
// Private Member Variables
/////////////////////////////
final private List<Tranche> tranches = new ArrayList<>();
final private Set<String> inputNames = new HashSet<>();
final private Set<String> ignoreInputFilterSet = new TreeSet<>();
final static private String listPrintSeparator = ",";
final static private String trancheFilterString = "VQSRTranche";
final static private String arrayParseRegex = "[\\[\\]\\s]";
final static private String emptyStringValue = "NA";
final static private String emptyFloatValue = "NaN";
//---------------------------------------------------------------------------------------------------------------
//
// initialize
//
//---------------------------------------------------------------------------------------------------------------
public void initialize() {
if( TS_FILTER_LEVEL != null ) {
for ( final Tranche t : Tranche.readTranches(TRANCHES_FILE) ) {
if ( t.ts >= TS_FILTER_LEVEL ) {
tranches.add(t);
}
logger.info(String.format("Read tranche " + t));
}
Collections.reverse(tranches); // this algorithm wants the tranches ordered from best (lowest truth sensitivity) to worst (highest truth sensitivity)
}
for( final RodBinding rod : input ) {
inputNames.add( rod.getName() );
}
if( IGNORE_INPUT_FILTERS != null ) {
ignoreInputFilterSet.addAll( Arrays.asList(IGNORE_INPUT_FILTERS) );
}
// setup the header fields
final Set<VCFHeaderLine> hInfo = new HashSet<>();
final Set<VCFHeaderLine> inputHeaders = GATKVCFUtils.getHeaderFields(getToolkit(), inputNames);
hInfo.addAll(inputHeaders);
addVQSRStandardHeaderLines(hInfo);
if (useASannotations)
addAlleleSpecificVQSRHeaderLines(hInfo);
checkForPreviousApplyRecalRun(Collections.unmodifiableSet(inputHeaders));
final TreeSet<String> samples = new TreeSet<>();
samples.addAll(SampleUtils.getUniqueSamplesFromRods(getToolkit(), inputNames));
//generate headers from tranches file
//TODO: throw away old tranche headers if we're ignoring filters
if( TS_FILTER_LEVEL != null ) {
// if the user specifies both ts_filter_level and lodCutoff then throw a user error
if( VQSLOD_CUTOFF != null ) {
throw new UserException("Arguments --ts_filter_level and --lodCutoff are mutually exclusive. Please only specify one option.");
}
if( tranches.size() >= 2 ) {
for( int iii = 0; iii < tranches.size() - 1; iii++ ) {
final Tranche t = tranches.get(iii);
hInfo.add(new VCFFilterHeaderLine(t.name, String.format("Truth sensitivity tranche level for " + t.model.toString() + " model at VQS Lod: " + t.minVQSLod + " <= x < " + tranches.get(iii+1).minVQSLod)));
}
}
if( tranches.size() >= 1 ) {
hInfo.add(new VCFFilterHeaderLine(tranches.get(0).name + "+", String.format("Truth sensitivity tranche level for " + tranches.get(0).model.toString() + " model at VQS Lod < " + tranches.get(0).minVQSLod)));
} else {
throw new UserException("No tranches were found in the file or were above the truth sensitivity filter level " + TS_FILTER_LEVEL);
}
logger.info("Keeping all variants in tranche " + tranches.get(tranches.size()-1));
} else {
if( VQSLOD_CUTOFF == null ) {
VQSLOD_CUTOFF = DEFAULT_VQSLOD_CUTOFF;
}
hInfo.add(new VCFFilterHeaderLine(LOW_VQSLOD_FILTER_NAME, "VQSLOD < " + VQSLOD_CUTOFF));
logger.info("Keeping all variants with VQSLOD >= " + VQSLOD_CUTOFF);
}
final VCFHeader vcfHeader = new VCFHeader(hInfo, samples);
vcfWriter.writeHeader(vcfHeader);
}
private boolean trancheIntervalIsValid(final String sensitivityLimits) {
final String[] vals = sensitivityLimits.split("to");
if(vals.length != 2)
return false;
try {
double lowerLimit = Double.parseDouble(vals[0]);
double upperLimit = Double.parseDouble(vals[1].replace("+","")); //why does our last tranche end with 100+? Is there anything greater than 100 percent? Really???
}
catch(NumberFormatException e) {
throw new UserException("Poorly formatted tranche filter name does not contain two sensitivity interval end points.");
}
return true;
}
public static void addVQSRStandardHeaderLines(final Set<VCFHeaderLine> hInfo) {
hInfo.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.END_KEY));
hInfo.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.VQS_LOD_KEY));
hInfo.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.CULPRIT_KEY));
hInfo.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.POSITIVE_LABEL_KEY));
hInfo.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.NEGATIVE_LABEL_KEY));
}
public static void addAlleleSpecificVQSRHeaderLines(final Set<VCFHeaderLine> hInfo) {
hInfo.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.AS_FILTER_STATUS_KEY));
hInfo.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.AS_CULPRIT_KEY));
hInfo.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.AS_VQS_LOD_KEY));
}
/**
* Check the filter declarations in the input VCF header to see if any ApplyRecalibration mode has been run
* Here we assume that the tranches are named with a specific format: VQSRTranche[SNP|INDEL][lowerLimit]to[upperLimit]
* @param inputHeaders
*/
private void checkForPreviousApplyRecalRun(final Set<VCFHeaderLine> inputHeaders) {
for(final VCFHeaderLine header : inputHeaders) {
if(header instanceof VCFFilterHeaderLine) {
final String filterName = ((VCFFilterHeaderLine)header).getID();
if(filterName.length() < 12 || !filterName.substring(0, 11).equalsIgnoreCase(trancheFilterString)) {
continue;
}
if(filterName.charAt(11) == 'S') {
//for SNP tranches, get sensitivity limit
final String sensitivityLimits = filterName.substring(14);
if(trancheIntervalIsValid(sensitivityLimits))
foundSNPTranches = true;
}
else if(filterName.charAt(11) == 'I') {
//for INDEL tranches, get sensitivity limit
final String sensitivityLimits = filterName.substring(16);
if(trancheIntervalIsValid(sensitivityLimits))
foundINDELTranches = true;
}
}
}
}
//---------------------------------------------------------------------------------------------------------------
//
// map
//
//---------------------------------------------------------------------------------------------------------------
public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
if( tracker == null ) { // For some reason RodWalkers get map calls with null trackers
return 1;
}
final List<VariantContext> VCs = tracker.getValues(input, context.getLocation());
final List<VariantContext> recals = tracker.getValues(recal, context.getLocation());
for( final VariantContext vc : VCs ) {
final boolean evaluateThisVariant = useASannotations || VariantDataManager.checkVariationClass( vc, MODE );
final boolean variantIsNotFiltered = IGNORE_ALL_FILTERS || vc.isNotFiltered() || (!ignoreInputFilterSet.isEmpty() && ignoreInputFilterSet.containsAll(vc.getFilters())); //vc.isNotFiltered is true for PASS; vc.filtersHaveBeenApplied covers PASS and filters
if( evaluateThisVariant && variantIsNotFiltered) {
String filterString;
final VariantContextBuilder builder = new VariantContextBuilder(vc);
if (!useASannotations) {
filterString = doSiteSpecificFiltering(vc, recals, builder);
}
else { //allele-specific mode
filterString = doAlleleSpecificFiltering(vc, recals, builder);
}
//for both non-AS and AS modes:
if( filterString.equals(VCFConstants.PASSES_FILTERS_v4) ) {
builder.passFilters();
} else if(filterString.equals(VCFConstants.UNFILTERED)) {
builder.unfiltered();
} else {
builder.filters(filterString);
}
final VariantContext outputVC = builder.make();
if( !EXCLUDE_FILTERED || outputVC.isNotFiltered() ) {
vcfWriter.add( outputVC );
}
} else { // valid VC but not compatible with this mode, so just emit the variant untouched
vcfWriter.add( vc );
}
}
return 1; // This value isn't used for anything
}
public double parseFilterLowerLimit(final String trancheFilter) {
final Pattern pattern = Pattern.compile("VQSRTranche\\S+(\\d+\\.\\d+)to(\\d+\\.\\d+)");
final Matcher m = pattern.matcher(trancheFilter);
return m.find() ? Double.parseDouble(m.group(1)) : -1;
}
/**
* Generate the VCF filter string for this record based on the ApplyRecalibration modes run so far
* @param vc the input VariantContext (with at least one ApplyRecalibration mode already run)
* @param bestLod best LOD from the alleles we've seen in this recalibration mode
* @return the String to use as the VCF filter field
*/
protected String generateFilterStringFromAlleles(final VariantContext vc, final double bestLod) {
String filterString = ".";
final boolean bothModesWereRun = (MODE == VariantRecalibratorArgumentCollection.Mode.SNP && foundINDELTranches) || (MODE == VariantRecalibratorArgumentCollection.Mode.INDEL && foundSNPTranches);
final boolean onlyOneModeNeeded = !vc.isMixed() && VariantDataManager.checkVariationClass( vc, MODE );
//if both SNP and INDEL modes have not yet been run (and need to be), leave this variant as unfiltered and add the filters for the alleles in this mode to the INFO field
if (!bothModesWereRun && !onlyOneModeNeeded) {
return VCFConstants.UNFILTERED;
}
//if both SNP and INDEL modes have been run or the site is not mixed, generate a filter string for this site based on both models
//pull out the allele filter status from the info field (there may be more than one entry in the list if there were multiple snp/indel alleles assessed in the other mode)
final String prevFilterStatus = vc.getAttributeAsString(GATKVCFConstants.AS_FILTER_STATUS_KEY, null);
//if this site hasn't had a filter applied yet
if (prevFilterStatus != null && !prevFilterStatus.equals(VCFConstants.UNFILTERED)) {
final String prevAllelesFilterStatusString = vc.getAttributeAsString(GATKVCFConstants.AS_FILTER_STATUS_KEY, null);
final String[] prevAllelesFilterStatusList = prevAllelesFilterStatusString.split(listPrintSeparator);
//start with the current best allele filter as the most lenient filter across all modes and all alleles
String mostLenientFilterName = generateFilterString(bestLod);
//if the current mode's best allele passes the tranche filter, then let the whole site pass
if (mostLenientFilterName.equals(VCFConstants.PASSES_FILTERS_v4)) {
filterString = mostLenientFilterName;
}
//if the current mode's best allele does not pass the tranche filter, compare the most lenient filter of this mode with those from the previous mode
else {
double mostLenientSensitivityLowerLimit = parseFilterLowerLimit(mostLenientFilterName);
for (int i = 0; i < prevAllelesFilterStatusList.length; i++) {
final String alleleFilterString = prevAllelesFilterStatusList[i].replaceAll(arrayParseRegex, "").trim();
//if any allele from the previous mode passed the tranche filter, then let the whole site pass
if (alleleFilterString.equals(VCFConstants.PASSES_FILTERS_v4)) { //this allele is PASS
mostLenientFilterName = alleleFilterString;
break;
}
//if there's no PASS, then we need to parse the filters to find out how lenient they are
else {
final double alleleLowerLimit = parseFilterLowerLimit(alleleFilterString);
if (alleleLowerLimit == -1)
continue;
if (alleleLowerLimit < mostLenientSensitivityLowerLimit) {
mostLenientSensitivityLowerLimit = alleleLowerLimit;
mostLenientFilterName = alleleFilterString;
}
}
}
filterString = mostLenientFilterName;
}
}
//if both modes have been run, but the previous mode didn't apply a filter, use the current mode's best allele VQSLOD filter (shouldn't get run, but just in case)
else {
filterString = generateFilterString(bestLod);
}
return filterString;
}
/**
* Generate the VCF filter string for this record based on the provided lod score
* @param lod non-null double
* @return the String to use as the VCF filter field
*/
protected String generateFilterString( final double lod ) {
String filterString = null;
if( TS_FILTER_LEVEL != null ) {
for( int i = tranches.size() - 1; i >= 0; i-- ) {
final Tranche tranche = tranches.get(i);
if( lod >= tranche.minVQSLod ) {
if( i == tranches.size() - 1 ) {
filterString = VCFConstants.PASSES_FILTERS_v4;
} else {
filterString = tranche.name;
}
break;
}
}
if( filterString == null ) {
filterString = tranches.get(0).name+"+";
}
} else {
filterString = ( lod < VQSLOD_CUTOFF ? LOW_VQSLOD_FILTER_NAME : VCFConstants.PASSES_FILTERS_v4 );
}
return filterString;
}
private VariantContext getMatchingRecalVC(final VariantContext target, final List<VariantContext> recalVCs, final Allele allele) {
for( final VariantContext recalVC : recalVCs ) {
if ( target.getEnd() == recalVC.getEnd() ) {
if (!useASannotations)
return recalVC;
else if (allele.equals(recalVC.getAlternateAllele(0)))
return recalVC;
}
}
return null;
}
/**
*
* @param altIndex current alt allele
* @param prevCulpritList culprits from previous ApplyRecalibration run
* @param prevLodList lods from previous ApplyRecalibration run
* @param prevASfiltersList AS_filters from previous ApplyRecalibration run
* @param culpritString
* @param lodString
* @param AS_filterString
*/
private void updateAnnotationsWithoutRecalibrating(final int altIndex, final String[] prevCulpritList, final String[] prevLodList, final String[] prevASfiltersList,
final List<String> culpritString, final List<String> lodString, final List<String> AS_filterString) {
if (foundINDELTranches || foundSNPTranches) {
if (altIndex < prevCulpritList.length) {
culpritString.add(prevCulpritList[altIndex].replaceAll(arrayParseRegex, "").trim());
lodString.add(prevLodList[altIndex].replaceAll(arrayParseRegex, "").trim());
AS_filterString.add(prevASfiltersList[altIndex].replaceAll(arrayParseRegex, "").trim());
}
} else { //if the other allele type hasn't been processed yet, make sure there are enough entries
culpritString.add(emptyStringValue);
lodString.add(emptyFloatValue);
AS_filterString.add(emptyStringValue);
}
}
/**
* Calculate the allele-specific filter status of vc
* @param vc
* @param recals
* @param builder is modified by adding attributes
* @return a String with the filter status for this site
*/
private String doAlleleSpecificFiltering(final VariantContext vc, final List<VariantContext> recals, final VariantContextBuilder builder) {
double bestLod = VariantRecalibratorEngine.MIN_ACCEPTABLE_LOD_SCORE;
final List<String> culpritStrings = new ArrayList<>();
final List<String> lodStrings = new ArrayList<>();
final List<String> AS_filterStrings = new ArrayList<>();
String[] prevCulpritList = null;
String[] prevLodList = null;
String[] prevASfiltersList = null;
//get VQSR annotations from previous run of ApplyRecalibration, if applicable
if(foundINDELTranches || foundSNPTranches) {
final String prevCulprits = vc.getAttributeAsString(GATKVCFConstants.AS_CULPRIT_KEY,"");
prevCulpritList = prevCulprits.isEmpty()? new String[0] : prevCulprits.split(listPrintSeparator);
final String prevLodString = vc.getAttributeAsString(GATKVCFConstants.AS_VQS_LOD_KEY,"");
prevLodList = prevLodString.isEmpty()? new String[0] : prevLodString.split(listPrintSeparator);
final String prevASfilters = vc.getAttributeAsString(GATKVCFConstants.AS_FILTER_STATUS_KEY,"");
prevASfiltersList = prevASfilters.isEmpty()? new String[0] : prevASfilters.split(listPrintSeparator);
}
//for each allele in the current VariantContext
for (int altIndex = 0; altIndex < vc.getNAlleles()-1; altIndex++) {
final Allele allele = vc.getAlternateAllele(altIndex);
//if the current allele is not part of this recalibration mode, add its annotations to the list and go to the next allele
if (!VariantDataManager.checkVariationClass(vc, allele, MODE)) {
updateAnnotationsWithoutRecalibrating(altIndex, prevCulpritList, prevLodList, prevASfiltersList, culpritStrings, lodStrings, AS_filterStrings);
continue;
}
//if the current allele does need to have recalibration applied...
//initialize allele-specific VQSR annotation data with values for spanning deletion
String alleleLodString = emptyFloatValue;
String alleleFilterString = emptyStringValue;
String alleleCulpritString = emptyStringValue;
//if it's not a spanning deletion, replace those allele strings with the real values
if (!allele.equals(Allele.SPAN_DEL)) {
VariantContext recalDatum = getMatchingRecalVC(vc, recals, allele);
if (recalDatum == null) {
throw new UserException("Encountered input allele which isn't found in the input recal file. Please make sure VariantRecalibrator and ApplyRecalibration were run on the same set of input variants with flag -AS. First seen at: " + vc);
}
//compare VQSLODs for all alleles in the current mode for filtering later
final double lod = recalDatum.getAttributeAsDouble(GATKVCFConstants.VQS_LOD_KEY, VariantRecalibratorEngine.MIN_ACCEPTABLE_LOD_SCORE);
if (lod > bestLod)
bestLod = lod;
alleleLodString = String.format("%.4f", lod);
alleleFilterString = generateFilterString(lod);
alleleCulpritString = recalDatum.getAttributeAsString(GATKVCFConstants.CULPRIT_KEY, ".");
if(recalDatum != null) {
if (recalDatum.hasAttribute(GATKVCFConstants.POSITIVE_LABEL_KEY))
builder.attribute(GATKVCFConstants.POSITIVE_LABEL_KEY, true);
if (recalDatum.hasAttribute(GATKVCFConstants.NEGATIVE_LABEL_KEY))
builder.attribute(GATKVCFConstants.NEGATIVE_LABEL_KEY, true);
}
}
//append per-allele VQSR annotations
lodStrings.add(alleleLodString);
AS_filterStrings.add(alleleFilterString);
culpritStrings.add(alleleCulpritString);
}
// Annotate the new record with its VQSLOD, AS_FilterStatus, and the worst performing annotation
if(!AS_filterStrings.isEmpty() )
builder.attribute(GATKVCFConstants.AS_FILTER_STATUS_KEY, AnnotationUtils.encodeStringList(AS_filterStrings));
if(!lodStrings.isEmpty())
builder.attribute(GATKVCFConstants.AS_VQS_LOD_KEY, AnnotationUtils.encodeStringList(lodStrings));
if(!culpritStrings.isEmpty())
builder.attribute(GATKVCFConstants.AS_CULPRIT_KEY, AnnotationUtils.encodeStringList(culpritStrings));
return generateFilterStringFromAlleles(vc, bestLod);
}
/**
* Calculate the filter status for a given VariantContext using the combined data from all alleles at a site
* @param vc
* @param recals
* @param builder is modified by adding attributes
* @return a String with the filter status for this site
*/
private String doSiteSpecificFiltering(final VariantContext vc, final List<VariantContext> recals, final VariantContextBuilder builder) {
VariantContext recalDatum = getMatchingRecalVC(vc, recals, null);
if( recalDatum == null ) {
throw new UserException("Encountered input variant which isn't found in the input recal file. Please make sure VariantRecalibrator and ApplyRecalibration were run on the same set of input variants. First seen at: " + vc );
}
final String lodString = recalDatum.getAttributeAsString(GATKVCFConstants.VQS_LOD_KEY, null);
if( lodString == null ) {
throw new UserException("Encountered a malformed record in the input recal file. There is no lod for the record at: " + vc );
}
final double lod;
try {
lod = Double.valueOf(lodString);
} catch (NumberFormatException e) {
throw new UserException("Encountered a malformed record in the input recal file. The lod is unreadable for the record at: " + vc );
}
builder.attribute(GATKVCFConstants.VQS_LOD_KEY, lod);
builder.attribute(GATKVCFConstants.CULPRIT_KEY, recalDatum.getAttribute(GATKVCFConstants.CULPRIT_KEY));
if(recalDatum != null) {
if (recalDatum.hasAttribute(GATKVCFConstants.POSITIVE_LABEL_KEY))
builder.attribute(GATKVCFConstants.POSITIVE_LABEL_KEY, true);
if (recalDatum.hasAttribute(GATKVCFConstants.NEGATIVE_LABEL_KEY))
builder.attribute(GATKVCFConstants.NEGATIVE_LABEL_KEY, true);
}
return generateFilterString(lod);
}
//---------------------------------------------------------------------------------------------------------------
//
// reduce
//
//---------------------------------------------------------------------------------------------------------------
public Integer reduceInit() {
return 1; // This value isn't used for anything
}
public Integer reduce( final Integer mapValue, final Integer reduceSum ) {
return 1; // This value isn't used for anything
}
public Integer treeReduce( final Integer lhs, final Integer rhs ) {
return 1; // This value isn't used for anything
}
public void onTraversalDone( final Integer reduceSum ) {
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2/M2_HapMapSensitivity.md
# CRSP HapMap Sensitivity Evaluation
###Current M2 Performance
(gsa-unstable 9/1/15, commit:a08903d)
| Mixture | type | sensitvity |
|------|----------------------|
| 5-plex |SNP|0.9691274|
| 5-plex |INDEL|0.87466127|
| 10-plex |SNP|0.97179496|
| 10-plex |INDEL|0.8888889|
| 20-plex |SNP|0.9537307|
| 20-plex |INDEL|0.83281654|
###Run Procedure
Run the script separately for each HapMap mixture bam:
inputDir=/dsde/working/mutect/laura/hapmapSensitivity/inputs/
Queue_Jar=<Queue jar of interest>
```
java -jar $Queue_Jar -S Qscript_HapMapPlex.scala \
-intervals $inputDir/agilent_5plex_intervalFiles.list \
-tumors $inputDir/agilent_5plex_bams.list \
-truthVCF $inputDir/agilent_5plex_truth_intervals.vcf \
-snpCounts $inputDir/agilent_5plex_truth_intervals.snpCounts.list \
-indelCounts $inputDir/agilent_5plex_truth_intervals.indelCounts.list \
-o <output.5plex.sensitivity.report> \
-qsub -jobQueue gsa -jobResReq virtual_free=5G -sc 50
```
The HapMap bams get run as tumors without normals because we're not interested in specificity here, so we don't need the normals to filter out noise
###Inputs
Bam lists:
5- and 10-plex have 3 replicates, 20-plex has 9
Interval files:
If we're only interested in sensitivity, then we only need to run the caller around known true positive sites, which we take from the truth VCFs
This workaround repeats the truth filename for the number of bams -- in theory each could have a separate truth VCF, but they are the same titration mixture
SNP/INDEL counts:
This is the number of events in the truth VCFs so we can find the sensitivity across all samples
TODO: this could be generalized
###Outputs
Each run outputs its own SNP and INDEL sensitivity combined across all samples:
```
Sensitivity across all samples:
SNPs: 0.95156
INDELs: 0.7328859
```
Note that these are not filtered for depth as described in the CRSP documentation
###Resources
Truth file preparation for 5-plex:
Start with /cga/tcga-gsc/benchmark/data/crsp-truth/1kg_5plex_wgs_hc_calls.codingIndelSnp.db135.recode.vcf
Select out allele fraction greater than 20% using "vc.isBiallelic() ? AF >= 0.2 : vc.hasGenotypes() && vc.getCalledChrCount(vc.getAltAlleleWithHighestAlleleCount())/(1.0*vc.getCalledChrCount()) >= 0.2"
Similar for 10-plex source:
/cga/tcga-gsc/benchmark/data/crsp-truth/1kg_10plex_wgs_hc_calls.codingIndelSnp.db135.recode.vcf
And 20-plex source:
/cga/tcga-gsc/benchmark/data/crsp-truth/1kg_20plex_wgs_hc_calls.codingIndelSnp.db135.recode.vcf
both also using AF filter of 0.2<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/DiploidSNPGenotypeLikelihoods.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import htsjdk.samtools.SAMUtils;
import org.broadinstitute.gatk.utils.BaseUtils;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.QualityUtils;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.fragments.FragmentCollection;
import org.broadinstitute.gatk.utils.fragments.FragmentUtils;
import org.broadinstitute.gatk.utils.genotyper.DiploidGenotype;
import org.broadinstitute.gatk.utils.pileup.PileupElement;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import java.util.List;
import static java.lang.Math.log10;
import static java.lang.Math.pow;
/**
* Stable, error checking version of the Bayesian genotyper. Useful for calculating the likelihoods, priors,
* and posteriors given a pile of bases and quality scores
*
* Suppose we have bases b1, b2, ..., bN with qualities scores q1, q2, ..., qN. This object
* calculates:
*
* P(G | D) = P(G) * P(D | G)
*
* where
*
* P(D | G) = sum_i log10 P(bi | G)
*
* and
*
* P(bi | G) = 1 - P(error | q1) if bi is in G
* = P(error | q1) / 3 if bi is not in G
*
* for homozygous genotypes and for heterozygous genotypes:
*
* P(bi | G) = 1 - P(error | q1) / 2 + P(error | q1) / 6 if bi is in G
* = P(error | q1) / 3 if bi is not in G
*
* for each of the 10 unique diploid genotypes AA, AC, AG, .., TT
*
* Everything is stored as arrays indexed by DiploidGenotype.ordinal() values in log10 space.
*
* The priors contain the relative probabilities of each genotype, and must be provided at object creation.
* From then on, you can call any of the add() routines to update the likelihoods and posteriors in the above
* model.
*/
public class DiploidSNPGenotypeLikelihoods implements Cloneable {
public final static double DEFAULT_PCR_ERROR_RATE = FragmentUtils.DEFAULT_PCR_ERROR_RATE;
protected final static int FIXED_PLOIDY = 2;
protected final static int MAX_PLOIDY = FIXED_PLOIDY + 1;
protected final static double ploidyAdjustment = log10(FIXED_PLOIDY);
protected final static double log10_3 = log10(3.0);
protected boolean VERBOSE = false;
//
// The fundamental data arrays associated with a Genotype Likelihoods object
//
protected double[] log10Likelihoods = null;
// TODO: don't calculate this each time through
protected double log10_PCR_error_3;
protected double log10_1_minus_PCR_error;
/**
* Create a new GenotypeLikelhoods object with given PCR error rate for each diploid genotype
*
* @param PCR_error_rate the PCR error rate
*/
public DiploidSNPGenotypeLikelihoods(double PCR_error_rate) {
log10_PCR_error_3 = log10(PCR_error_rate) - log10_3;
log10_1_minus_PCR_error = log10(1.0 - PCR_error_rate);
setToZero();
}
/**
* Cloning of the object
* @return clone
* @throws CloneNotSupportedException
*/
protected Object clone() throws CloneNotSupportedException {
DiploidSNPGenotypeLikelihoods c = (DiploidSNPGenotypeLikelihoods)super.clone();
c.log10Likelihoods = log10Likelihoods.clone();
return c;
}
protected void setToZero() {
log10Likelihoods = genotypeZeros.clone(); // likelihoods are all zeros
}
/**
* Returns an array of log10 likelihoods for each genotype, indexed by DiploidGenotype.ordinal values()
* @return likelihoods array
*/
public double[] getLikelihoods() {
return log10Likelihoods;
}
// -------------------------------------------------------------------------------------
//
// add() routines. These are the workhorse routines for calculating the overall genotype
// likelihoods given observed bases and reads. Includes high-level operators all the
// way down to single base and qual functions.
//
// -------------------------------------------------------------------------------------
/**
* Updates likelihoods and posteriors to reflect the additional observations contained within the
* read-based pileup up by calling add(observedBase, qualityScore) for each base / qual in the
* pileup
*
* @param pileup read pileup
* @param ignoreBadBases should we ignore bad bases?
* @param capBaseQualsAtMappingQual should we cap a base's quality by its read's mapping quality?
* @param minBaseQual the minimum base quality at which to consider a base valid
* @return the number of good bases found in the pileup
*/
public int add(ReadBackedPileup pileup, boolean ignoreBadBases, boolean capBaseQualsAtMappingQual, int minBaseQual) {
int n = 0;
// for each fragment, add to the likelihoods
FragmentCollection<PileupElement> fpile = pileup.toFragments();
for ( PileupElement p : fpile.getSingletonReads() )
n += add(p, ignoreBadBases, capBaseQualsAtMappingQual, minBaseQual);
for ( List<PileupElement> overlappingPair : fpile.getOverlappingPairs() )
n += add(overlappingPair, ignoreBadBases, capBaseQualsAtMappingQual, minBaseQual);
return n;
}
public int add(PileupElement elt, boolean ignoreBadBases, boolean capBaseQualsAtMappingQual, int minBaseQual) {
byte obsBase = elt.getBase();
byte qual = qualToUse(elt, ignoreBadBases, capBaseQualsAtMappingQual, minBaseQual);
if ( qual == 0 )
return 0;
return add(obsBase, qual, (byte)0, (byte)0, 1);
}
public int add(List<PileupElement> overlappingPair, boolean ignoreBadBases, boolean capBaseQualsAtMappingQual, int minBaseQual) {
final PileupElement p1 = overlappingPair.get(0);
final PileupElement p2 = overlappingPair.get(1);
final byte observedBase1 = p1.getBase();
final byte qualityScore1 = qualToUse(p1, ignoreBadBases, capBaseQualsAtMappingQual, minBaseQual);
final byte observedBase2 = p2.getBase();
final byte qualityScore2 = qualToUse(p2, ignoreBadBases, capBaseQualsAtMappingQual, minBaseQual);
if ( qualityScore1 == 0 ) {
if ( qualityScore2 == 0 ) // abort early if we didn't see any good bases
return 0;
else {
return add(observedBase2, qualityScore2, (byte)0, (byte)0);
}
} else {
return add(observedBase1, qualityScore1, observedBase2, qualityScore2);
}
}
/**
*
* @param obsBase1 first observed base
* @param qual1 base qual of first observed base
* @param obsBase2 second observed base
* @param qual2 base qual of second observed base; can be 0, indicating no second base was observed for this fragment
* @param nObs the number of times this quad of values was seen. Generally 1, but reduced reads can have nObs > 1 for synthetic reads
* @return 0 if the base is bad, 1 otherwise
*/
private int add(byte obsBase1, byte qual1, byte obsBase2, byte qual2, int nObs) {
// TODO-- Right now we assume that there are at most 2 reads per fragment. This assumption is fine
// TODO-- given the current state of next-gen sequencing, but may need to be fixed in the future.
// TODO-- However, when that happens, we'll need to be a lot smarter about the caching we do here.
// Just look up the cached result if it's available, or compute and store it
DiploidSNPGenotypeLikelihoods gl;
if ( ! inCache(obsBase1, qual1, obsBase2, qual2, FIXED_PLOIDY) ) {
gl = calculateCachedGenotypeLikelihoods(obsBase1, qual1, obsBase2, qual2, FIXED_PLOIDY);
} else {
gl = getCachedGenotypeLikelihoods(obsBase1, qual1, obsBase2, qual2, FIXED_PLOIDY);
}
// for bad bases, there are no likelihoods
if ( gl == null )
return 0;
double[] likelihoods = gl.getLikelihoods();
for ( DiploidGenotype g : DiploidGenotype.values() ) {
double likelihood = likelihoods[g.ordinal()];
log10Likelihoods[g.ordinal()] += likelihood * nObs;
}
return 1;
}
private int add(byte obsBase1, byte qual1, byte obsBase2, byte qual2) {
return add(obsBase1, qual1, obsBase2, qual2, 1);
}
// -------------------------------------------------------------------------------------
//
// Dealing with the cache routines
//
// -------------------------------------------------------------------------------------
static DiploidSNPGenotypeLikelihoods[][][][][] CACHE = new DiploidSNPGenotypeLikelihoods[BaseUtils.BASES.length][QualityUtils.MAX_SAM_QUAL_SCORE +1][BaseUtils.BASES.length+1][QualityUtils.MAX_SAM_QUAL_SCORE +1][MAX_PLOIDY];
protected boolean inCache(byte observedBase1, byte qualityScore1, byte observedBase2, byte qualityScore2, int ploidy) {
return getCache(CACHE, observedBase1, qualityScore1, observedBase2, qualityScore2, ploidy) != null;
}
protected DiploidSNPGenotypeLikelihoods getCachedGenotypeLikelihoods(byte observedBase1, byte qualityScore1, byte observedBase2, byte qualityScore2, int ploidy) {
DiploidSNPGenotypeLikelihoods gl = getCache(CACHE, observedBase1, qualityScore1, observedBase2, qualityScore2, ploidy);
if ( gl == null )
throw new RuntimeException(String.format("BUG: trying to fetch an unset cached genotype likelihood at base1=%c, qual1=%d, base2=%c, qual2=%d, ploidy=%d",
observedBase1, qualityScore1, observedBase2, qualityScore2, ploidy));
return gl;
}
protected DiploidSNPGenotypeLikelihoods calculateCachedGenotypeLikelihoods(byte observedBase1, byte qualityScore1, byte observedBase2, byte qualityScore2, int ploidy) {
DiploidSNPGenotypeLikelihoods gl = calculateGenotypeLikelihoods(observedBase1, qualityScore1, observedBase2, qualityScore2);
setCache(CACHE, observedBase1, qualityScore1, observedBase2, qualityScore2, ploidy, gl);
return gl;
}
protected void setCache( DiploidSNPGenotypeLikelihoods[][][][][] cache,
byte observedBase1, byte qualityScore1, byte observedBase2, byte qualityScore2, int ploidy,
DiploidSNPGenotypeLikelihoods val ) {
int i = BaseUtils.simpleBaseToBaseIndex(observedBase1);
int j = qualityScore1;
int k = qualityScore2 != 0 ? BaseUtils.simpleBaseToBaseIndex(observedBase2) : BaseUtils.BASES.length;
int l = qualityScore2;
int m = ploidy;
cache[i][j][k][l][m] = val;
}
protected DiploidSNPGenotypeLikelihoods getCache(DiploidSNPGenotypeLikelihoods[][][][][] cache,
byte observedBase1, byte qualityScore1, byte observedBase2, byte qualityScore2, int ploidy) {
int i = BaseUtils.simpleBaseToBaseIndex(observedBase1);
int j = qualityScore1;
int k = qualityScore2 != 0 ? BaseUtils.simpleBaseToBaseIndex(observedBase2) : BaseUtils.BASES.length;
int l = qualityScore2;
int m = ploidy;
return cache[i][j][k][l][m];
}
protected DiploidSNPGenotypeLikelihoods calculateGenotypeLikelihoods(byte observedBase1, byte qualityScore1, byte observedBase2, byte qualityScore2) {
double[] log10FourBaseLikelihoods = computeLog10Likelihoods(observedBase1, qualityScore1, observedBase2, qualityScore2);
try {
DiploidSNPGenotypeLikelihoods gl = (DiploidSNPGenotypeLikelihoods)this.clone();
gl.setToZero();
// we need to adjust for ploidy. We take the raw p(obs | chrom) / ploidy, which is -log10(ploidy) in log space
for ( DiploidGenotype g : DiploidGenotype.values() ) {
// todo assumes ploidy is 2 -- should be generalized. Obviously the below code can be turned into a loop
double p_base = 0.0;
p_base += pow(10, log10FourBaseLikelihoods[BaseUtils.simpleBaseToBaseIndex(g.base1)] - ploidyAdjustment);
p_base += pow(10, log10FourBaseLikelihoods[BaseUtils.simpleBaseToBaseIndex(g.base2)] - ploidyAdjustment);
final double likelihood = log10(p_base);
gl.log10Likelihoods[g.ordinal()] += likelihood;
}
if ( VERBOSE ) {
for ( DiploidGenotype g : DiploidGenotype.values() ) { System.out.printf("%s\t", g); }
System.out.println();
for ( DiploidGenotype g : DiploidGenotype.values() ) { System.out.printf("%.2f\t", gl.log10Likelihoods[g.ordinal()]); }
System.out.println();
}
return gl;
} catch ( CloneNotSupportedException e ) {
throw new RuntimeException(e);
}
}
/**
* Updates likelihoods and posteriors to reflect an additional observation of observedBase with
* qualityScore.
*
* @param observedBase1 the base observed on the 1st read of the fragment
* @param qualityScore1 the qual of the base on the 1st read of the fragment, or zero if NA
* @param observedBase2 the base observed on the 2nd read of the fragment
* @param qualityScore2 the qual of the base on the 2nd read of the fragment, or zero if NA
* @return likelihoods for this observation or null if the base was not considered good enough to add to the likelihoods (Q0 or 'N', for example)
*/
protected double[] computeLog10Likelihoods(byte observedBase1, byte qualityScore1, byte observedBase2, byte qualityScore2) {
double[] log10FourBaseLikelihoods = baseZeros.clone();
for ( byte trueBase : BaseUtils.BASES ) {
double likelihood = 0.0;
for ( byte fragmentBase : BaseUtils.BASES ) {
double log10FragmentLikelihood = (trueBase == fragmentBase ? log10_1_minus_PCR_error : log10_PCR_error_3);
if ( qualityScore1 != 0 ) {
log10FragmentLikelihood += log10PofObservingBaseGivenChromosome(observedBase1, fragmentBase, qualityScore1);
}
if ( qualityScore2 != 0 ) {
log10FragmentLikelihood += log10PofObservingBaseGivenChromosome(observedBase2, fragmentBase, qualityScore2);
}
//if ( VERBOSE ) {
// System.out.printf(" L(%c | b=%s, Q=%d) = %f / %f%n",
// observedBase, trueBase, qualityScore, pow(10,likelihood) * 100, likelihood);
//}
likelihood += pow(10, log10FragmentLikelihood);
}
log10FourBaseLikelihoods[BaseUtils.simpleBaseToBaseIndex(trueBase)] = log10(likelihood);
}
return log10FourBaseLikelihoods;
}
/**
*
* @param observedBase observed base
* @param chromBase target base
* @param qual base quality
* @return log10 likelihood
*/
protected double log10PofObservingBaseGivenChromosome(byte observedBase, byte chromBase, byte qual) {
double logP;
if ( observedBase == chromBase ) {
// the base is consistent with the chromosome -- it's 1 - e
//logP = oneMinusData[qual];
double e = pow(10, (qual / -10.0));
logP = log10(1.0 - e);
} else {
// the base is inconsistent with the chromosome -- it's e * P(chromBase | observedBase is an error)
logP = qual / -10.0 + (-log10_3);
}
//System.out.printf("%c %c %d => %f%n", observedBase, chromBase, qual, logP);
return logP;
}
/**
* Helper function that returns the phred-scaled base quality score we should use for calculating
* likelihoods for a pileup element. May return 0 to indicate that the observation is bad, and may
* cap the quality score by the mapping quality of the read itself.
*
* @param p Pileup element
* @param ignoreBadBases Should we ignore bad bases?
* @param capBaseQualsAtMappingQual Should we cap the base qualities at the mapping quality of the read?
* @param minBaseQual Minimum allowed base quality
* @return the actual base quality to use
*/
private static byte qualToUse(PileupElement p, boolean ignoreBadBases, boolean capBaseQualsAtMappingQual, int minBaseQual) {
if ( ignoreBadBases && !BaseUtils.isRegularBase( p.getBase() ) )
return 0;
byte qual = p.getQual();
if ( qual > SAMUtils.MAX_PHRED_SCORE )
throw new UserException.MisencodedBAM(p.getRead(), "we encountered an extremely high quality score (" + (int)qual + ")");
if ( capBaseQualsAtMappingQual )
qual = (byte) Math.min( 0xff & qual, p.getMappingQual());
if ( (int)qual < minBaseQual )
qual = (byte)0;
return qual;
}
// -----------------------------------------------------------------------------------------------------------------
//
//
// helper routines
//
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Return a string representation of this object in a moderately usable form
*
* @return string representation
*/
public String toString() {
double sum = 0;
StringBuilder s = new StringBuilder();
for (DiploidGenotype g : DiploidGenotype.values()) {
s.append(String.format("%s %.10f ", g, log10Likelihoods[g.ordinal()]));
sum += Math.pow(10,log10Likelihoods[g.ordinal()]);
}
s.append(String.format(" %f", sum));
return s.toString();
}
// -----------------------------------------------------------------------------------------------------------------
//
//
// Validation routines
//
//
// -----------------------------------------------------------------------------------------------------------------
public boolean validate() {
return validate(true);
}
public boolean validate(boolean throwException) {
try {
for ( DiploidGenotype g : DiploidGenotype.values() ) {
String bad = null;
int i = g.ordinal();
if ( ! MathUtils.wellFormedDouble(log10Likelihoods[i]) || ! MathUtils.isNegativeOrZero(log10Likelihoods[i]) ) {
bad = String.format("Likelihood %f is badly formed", log10Likelihoods[i]);
}
if ( bad != null ) {
throw new IllegalStateException(String.format("At %s: %s", g.toString(), bad));
}
}
} catch ( IllegalStateException e ) {
if ( throwException )
throw new RuntimeException(e);
else
return false;
}
return true;
}
//
// Constant static data
//
private final static double[] genotypeZeros = new double[DiploidGenotype.values().length];
private final static double[] baseZeros = new double[BaseUtils.BASES.length];
static {
for ( DiploidGenotype g : DiploidGenotype.values() ) {
genotypeZeros[g.ordinal()] = 0.0;
}
for ( byte base : BaseUtils.BASES ) {
baseZeros[BaseUtils.simpleBaseToBaseIndex(base)] = 0.0;
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/IndependentAllelesDiploidExactAFCalculator.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.tools.walkers.genotyper.GenotypeLikelihoodCalculators;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import java.util.*;
/**
* Computes the conditional bi-allelic exact results
*
* Suppose vc contains 2 alt allele: A* with C and T. This function first computes:
*
* (1) P(D | AF_c > 0 && AF_t == *) [i.e., T can be anything]
*
* it then computes the conditional probability on AF_c == 0:
*
* (2) P(D | AF_t > 0 && AF_c == 0)
*
* Thinking about this visually, we have the following likelihood matrix where each cell is
* the P(D | AF_c == i && AF_t == j):
*
* 0 AF_c > 0
* -----------------
* 0 | |
* |--|-------------
* a | |
* f | |
* _ | |
* t | |
* > | |
* 0 | |
*
* What we really want to know how
*
* (3) P(D | AF_c == 0 & AF_t == 0)
*
* compares with
*
* (4) P(D | AF_c > 0 || AF_t > 0)
*
* This is effectively asking for the value in the upper left vs. the sum of all cells.
*
* This class implements the conditional likelihoods summation for any number of alt
* alleles, where each alt allele has its EXACT probability of segregating calculated by
* reducing each alt B into the case XB and computing P(D | AF_b > 0 ) as follows:
*
* Suppose we have for a A/B/C site the following GLs:
*
* AA AB BB AC BC CC
*
* and we want to get the bi-allelic GLs for X/B, where X is everything not B
*
* XX = AA + AC + CC (since X = A or C)
* XB = AB + BC
* BB = BB
*
* The posterior of the site being a variant site is calculated using
* the likelihood of the AF whe all alternatives are collapsed to be zero.
*/
public class IndependentAllelesDiploidExactAFCalculator extends DiploidExactAFCalculator {
private final static int[] BIALLELIC_NON_INFORMATIVE_PLS = new int[]{0,0,0};
private final static List<Allele> BIALLELIC_NOCALL = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
/**
* Sorts AFCalcResults by their posteriors of AF > 0, so the
*/
private final static class CompareAFCalculatorResultsByPNonRef implements Comparator<AFCalculationResult> {
@Override
public int compare(AFCalculationResult o1, AFCalculationResult o2) {
return -1 * Double.compare(o1.getLog10PosteriorOfAFGT0(), o2.getLog10PosteriorOfAFGT0());
}
}
private final static CompareAFCalculatorResultsByPNonRef compareAFCalcResultsByPNonRef = new CompareAFCalculatorResultsByPNonRef();
/**
* The AFCalc model we are using to do the bi-allelic computation
*/
final AFCalculator biAlleleExactModel;
protected IndependentAllelesDiploidExactAFCalculator() {
super();
biAlleleExactModel = new ReferenceDiploidExactAFCalculator();
}
@Override
public AFCalculationResult computeLog10PNonRef(final VariantContext vc, final int defaultPloidy,
final double[] log10AlleleFrequencyPriors, final StateTracker stateTracker) {
final List<AFCalculationResult> independentResultTrackers = computeAlleleIndependentExact(vc, defaultPloidy, log10AlleleFrequencyPriors);
if ( independentResultTrackers.size() == 0 )
throw new IllegalStateException("Independent alleles model returned an empty list of results at VC " + vc);
if ( independentResultTrackers.size() == 1 ) {
// fast path for the very common bi-allelic use case
return independentResultTrackers.get(0);
} else {
final AFCalculationResult combinedAltAllelesResult = combineAltAlleleIndependentExact(vc,defaultPloidy,log10AlleleFrequencyPriors);
// we are a multi-allelic, so we need to actually combine the results
final List<AFCalculationResult> withMultiAllelicPriors = applyMultiAllelicPriors(independentResultTrackers);
return combineIndependentPNonRefs(vc, withMultiAllelicPriors, combinedAltAllelesResult);
}
}
private AFCalculationResult combineAltAlleleIndependentExact(final VariantContext vc, int defaultPloidy, double[] log10AlleleFrequencyPriors) {
final VariantContext combinedAltAllelesVariantContext = makeCombinedAltAllelesVariantContext(vc);
final AFCalculationResult resultTracker = biAlleleExactModel.getLog10PNonRef(combinedAltAllelesVariantContext, defaultPloidy, vc.getNAlleles() - 1, log10AlleleFrequencyPriors);
return resultTracker;
}
private VariantContext makeCombinedAltAllelesVariantContext(final VariantContext vc) {
final int nAltAlleles = vc.getNAlleles() - 1;
if ( nAltAlleles == 1 )
return vc;
else {
final VariantContextBuilder vcb = new VariantContextBuilder(vc);
final Allele reference = vcb.getAlleles().get(0);
vcb.alleles(Arrays.asList(reference, GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE));
final int genotypeCount = GenotypeLikelihoodCalculators.genotypeCount(2, vc.getNAlleles());
final double[] hetLikelihoods = new double[vc.getNAlleles() - 1];
final double[] homAltLikelihoods = new double[genotypeCount - hetLikelihoods.length - 1];
final double[] newLikelihoods = new double[3];
final List<Genotype> newGenotypes = new ArrayList<>(vc.getNSamples());
for (final Genotype oldGenotype : vc.getGenotypes()) {
final GenotypeBuilder gb = new GenotypeBuilder(oldGenotype);
final List<Allele> oldAlleles = oldGenotype.getAlleles();
if (oldAlleles != null) {
final List<Allele> newAlleles = new ArrayList<>(oldAlleles.size());
for (int i = 0; i < oldAlleles.size(); i++) {
final Allele oldAllele = oldAlleles.get(i);
if (oldAllele.isReference())
newAlleles.add(reference);
else if (oldAllele.isNoCall())
newAlleles.add(Allele.NO_CALL);
else
newAlleles.add(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE);
}
gb.alleles(newAlleles);
}
if (oldGenotype.isNonInformative())
gb.PL(BIALLELIC_NON_INFORMATIVE_PLS);
else if (combineAltAlleleLikelihoods(oldGenotype, genotypeCount, newLikelihoods, hetLikelihoods, homAltLikelihoods))
gb.PL(newLikelihoods);
newGenotypes.add(gb.make());
}
return vcb.genotypes(newGenotypes).make();
}
}
/**
* Compute the conditional exact AFCalcResult for each allele in vc independently, returning
* the result of each, in order of the alt alleles in VC
*
* @param vc the VariantContext we want to analyze, with at least 1 alt allele
* @param log10AlleleFrequencyPriors the priors
* @return a list of the AFCalcResults for each bi-allelic sub context of vc
*/
@Requires({"vc != null", "log10AlleleFrequencyPriors != null"})
@Ensures("goodIndependentResult(vc, result)")
protected final List<AFCalculationResult> computeAlleleIndependentExact(final VariantContext vc, final int defaultPloidy,
final double[] log10AlleleFrequencyPriors) {
final List<AFCalculationResult> results = new LinkedList<AFCalculationResult>();
for ( final VariantContext subvc : makeAlleleConditionalContexts(vc) ) {
final AFCalculationResult resultTracker = biAlleleExactModel.getLog10PNonRef(subvc, defaultPloidy, vc.getNAlleles() - 1, log10AlleleFrequencyPriors);
results.add(resultTracker);
}
return results;
}
/**
* Returns the bi-allelic variant context for each alt allele in vc with bi-allelic likelihoods, in order
*
* @param vc the variant context to split. Must have n.alt.alleles > 1
* @return a bi-allelic variant context for each alt allele in vc
*/
@Requires({"vc != null", "vc.getNAlleles() > 1"})
@Ensures("result.size() == vc.getNAlleles() - 1")
protected final List<VariantContext> makeAlleleConditionalContexts(final VariantContext vc) {
final int nAltAlleles = vc.getNAlleles() - 1;
if ( nAltAlleles == 1 ) {
// fast path for bi-allelic case.
return Collections.singletonList(vc);
} else {
// go through the work of ripping up the VC into its biallelic components
final List<VariantContext> vcs = new LinkedList<VariantContext>();
for ( int altI = 0; altI < nAltAlleles; altI++ ) {
vcs.add(biallelicCombinedGLs(vc, altI + 1));
}
return vcs;
}
}
/**
* Create a single bi-allelic variant context from rootVC with alt allele with index altAlleleIndex
*
* @param rootVC the root (potentially multi-allelic) variant context
* @param altAlleleIndex index of the alt allele, from 0 == first alt allele
* @return a bi-allelic variant context based on rootVC
*/
@Requires({"rootVC.getNAlleles() > 1", "altAlleleIndex < rootVC.getNAlleles()"})
@Ensures({"result.isBiallelic()"})
protected final VariantContext biallelicCombinedGLs(final VariantContext rootVC, final int altAlleleIndex) {
if ( rootVC.isBiallelic() ) {
return rootVC;
} else {
final int nAlts = rootVC.getNAlleles() - 1;
final List<Genotype> biallelicGenotypes = new ArrayList<Genotype>(rootVC.getNSamples());
for ( final Genotype g : rootVC.getGenotypes() )
biallelicGenotypes.add(combineGLsPrecise(g, altAlleleIndex, nAlts));
final VariantContextBuilder vcb = new VariantContextBuilder(rootVC);
final Allele altAllele = rootVC.getAlternateAllele(altAlleleIndex - 1);
vcb.alleles(Arrays.asList(rootVC.getReference(), altAllele));
vcb.genotypes(biallelicGenotypes);
return vcb.make();
}
}
private static final double PHRED_2_LOG10_COEFF = -.1;
/**
* Returns a new Genotype with the PLs of the multi-allelic original reduced to a bi-allelic case.
*
* <p>Uses the log-sum-exp trick in order to work well with very low PLs</p>
*
* <p>This is handled in the following way:</p>
*
* <p>Suppose we have for a A/B/C site the following GLs:</p>
*
* <p>AA AB BB AC BC CC</p>
*
* <p>and we want to get the bi-allelic GLs for X/B, where X is everything not B</p>
*
* <p>XX = AA + AC + CC (since X = A or C)<br/>
* XB = AB + BC <br/>
* BB = BB <br/>
* </p>
* <p>
* This implementation use the log sum trick in order to avoid numeric inestability.
* </p>
*
* @param original the original multi-allelic genotype
* @param altIndex the index of the alt allele we wish to keep in the bialleic case -- with ref == 0
* @param nAlts the total number of alt alleles
* @return a new biallelic genotype with appropriate PLs
*/
@Requires({"original.hasLikelihoods()"})
@Ensures({"result.hasLikelihoods()", "result.getPL().length == 3"})
protected Genotype combineGLsPrecise(final Genotype original, final int altIndex, final int nAlts ) {
if ( original.isNonInformative() )
return new GenotypeBuilder(original).PL(BIALLELIC_NON_INFORMATIVE_PLS).alleles(BIALLELIC_NOCALL).make();
if ( altIndex < 1 || altIndex > nAlts ) throw new IllegalStateException("altIndex must be between 1 and nAlts " + nAlts);
final int[] pls = original.getPL();
final int nAlleles = nAlts + 1;
final int plCount = pls.length;
double BB = 0;
final double[] XBvalues = new double[nAlleles - 1];
final double[] XXvalues = new double[plCount - nAlleles];
int xbOffset = 0;
int xxOffset = 0;
for ( int index = 0; index < plCount; index++ ) {
final GenotypeLikelihoods.GenotypeLikelihoodsAllelePair pair = GenotypeLikelihoods.getAllelePair(index);
int i = pair.alleleIndex1;
int j = pair.alleleIndex2;
if (i == j) {
if (i == altIndex) BB = PHRED_2_LOG10_COEFF * pls[index]; else XXvalues[xxOffset++] = PHRED_2_LOG10_COEFF * pls[index];
} else if (i == altIndex || j == altIndex)
XBvalues[xbOffset++] = PHRED_2_LOG10_COEFF * pls[index];
else
XXvalues[xxOffset++] = PHRED_2_LOG10_COEFF * pls[index];
}
final double XB = MathUtils.log10sumLog10(XBvalues);
final double XX = MathUtils.log10sumLog10(XXvalues);
final double[] GLs = new double[] { XX, XB, BB};
return new GenotypeBuilder(original).PL(GLs).alleles(BIALLELIC_NOCALL).make();
}
protected final List<AFCalculationResult> applyMultiAllelicPriors(final List<AFCalculationResult> conditionalPNonRefResults) {
final ArrayList<AFCalculationResult> sorted = new ArrayList<AFCalculationResult>(conditionalPNonRefResults);
// sort the results, so the most likely allele is first
Collections.sort(sorted, compareAFCalcResultsByPNonRef);
double lastPosteriorGt0 = sorted.get(0).getLog10PosteriorOfAFGT0();
final double log10SingleAllelePriorOfAFGt0 = conditionalPNonRefResults.get(0).getLog10PriorOfAFGT0();
for ( int i = 0; i < sorted.size(); i++ ) {
if ( sorted.get(i).getLog10PosteriorOfAFGT0() > lastPosteriorGt0 )
throw new IllegalStateException("pNonRefResults not sorted: lastPosteriorGt0 " + lastPosteriorGt0 + " but current is " + sorted.get(i).getLog10PosteriorOfAFGT0());
final double log10PriorAFGt0 = (i + 1) * log10SingleAllelePriorOfAFGt0;
final double log10PriorAFEq0 = Math.log10(1 - Math.pow(10, log10PriorAFGt0));
final double[] thetaTONPriors = new double[] { log10PriorAFEq0, log10PriorAFGt0 };
// bind pNonRef for allele to the posterior value of the AF > 0 with the new adjusted prior
sorted.set(i, sorted.get(i).withNewPriors(MathUtils.normalizeFromLog10(thetaTONPriors, true)));
}
return sorted;
}
/**
* Take the independent estimates of pNonRef for each alt allele and combine them into a single result
*
* Given n independent calculations for each of n alternate alleles create a single
* combined AFCalcResult with:
*
* priors for AF == 0 equal to theta^N for the nth least likely allele
* posteriors that reflect the combined chance that any alleles are segregating and corresponding
* likelihoods
* combined MLEs in the order of the alt alleles in vc
*
* @param sortedResultsWithThetaNPriors the pNonRef result for each allele independently
*/
protected AFCalculationResult combineIndependentPNonRefs(final VariantContext vc,
final List<AFCalculationResult> sortedResultsWithThetaNPriors,
final AFCalculationResult combinedAltAllelesResult) {
int nEvaluations = 0;
final int nAltAlleles = sortedResultsWithThetaNPriors.size();
final int[] alleleCountsOfMLE = new int[nAltAlleles];
final Map<Allele, Double> log10pRefByAllele = new HashMap<Allele, Double>(nAltAlleles);
// the sum of the log10 posteriors for AF == 0 and AF > 0 to determine joint probs
for ( final AFCalculationResult sortedResultWithThetaNPriors : sortedResultsWithThetaNPriors ) {
final Allele altAllele = sortedResultWithThetaNPriors.getAllelesUsedInGenotyping().get(1);
final int altI = vc.getAlleles().indexOf(altAllele) - 1;
// MLE of altI allele is simply the MLE of this allele in altAlleles
alleleCountsOfMLE[altI] = sortedResultWithThetaNPriors.getAlleleCountAtMLE(altAllele);
// bind pNonRef for allele to the posterior value of the AF > 0 with the new adjusted prior
log10pRefByAllele.put(altAllele, sortedResultWithThetaNPriors.getLog10PosteriorOfAFEq0());
// trivial -- update the number of evaluations
nEvaluations += sortedResultWithThetaNPriors.nEvaluations;
}
return new IndependentAlleleAFCalculationResult(alleleCountsOfMLE, nEvaluations, vc.getAlleles(),
// necessary to ensure all values < 0
MathUtils.normalizeFromLog10(new double[] { combinedAltAllelesResult.getLog10LikelihoodOfAFEq0(), combinedAltAllelesResult.getLog10LikelihoodOfAFGT0() }, true),
// priors incorporate multiple alt alleles, must be normalized
MathUtils.normalizeFromLog10(new double[] { combinedAltAllelesResult.getLog10PriorOfAFEq0(), combinedAltAllelesResult.getLog10PriorOfAFGT0() }, true),
log10pRefByAllele, sortedResultsWithThetaNPriors);
}
private boolean combineAltAlleleLikelihoods(final Genotype g, final int plMaxIndex, final double[] dest,
final double[] hetLikelihoods, final double[] homAltLikelihoods) {
final int[] pls = g.getPL();
if (pls == null)
return false;
int hetNextIndex = 0;
int homAltNextIndex = 0;
for (int plIndex = 1; plIndex < plMaxIndex; plIndex++) {
final GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(plIndex);
if (alleles.alleleIndex1 == 0 || alleles.alleleIndex2 == 0)
hetLikelihoods[hetNextIndex++] = pls[plIndex] * PHRED_2_LOG10_COEFF;
else
homAltLikelihoods[homAltNextIndex++] = pls[plIndex] * PHRED_2_LOG10_COEFF;
}
dest[0] = pls[0] * PHRED_2_LOG10_COEFF;
dest[1] = MathUtils.approximateLog10SumLog10(hetLikelihoods);
dest[2] = MathUtils.approximateLog10SumLog10(homAltLikelihoods);
return true;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/OriginalDiploidExactAFCalculator.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.collections.Pair;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Map;
/**
* Original bi-allelic ~O(N) implementation. Kept here for posterity and reference
*/
class OriginalDiploidExactAFCalculator extends DiploidExactAFCalculator {
protected OriginalDiploidExactAFCalculator() {
}
@Override
protected AFCalculationResult computeLog10PNonRef(final VariantContext vc,
@SuppressWarnings("unused")
final int defaultPloidy,
final double[] log10AlleleFrequencyPriors,
final StateTracker stateTracker) {
final double[] log10AlleleFrequencyLikelihoods = new double[log10AlleleFrequencyPriors.length];
final double[] log10AlleleFrequencyPosteriors = new double[log10AlleleFrequencyPriors.length];
final Pair<Integer, Integer> result = linearExact(vc, log10AlleleFrequencyPriors, log10AlleleFrequencyLikelihoods, log10AlleleFrequencyPosteriors);
final int lastK = result.getFirst();
final int mleK = result.getSecond();
final double log10LikelihoodAFGt0 = lastK == 0 ? MathUtils.LOG10_P_OF_ZERO : MathUtils.log10sumLog10(log10AlleleFrequencyLikelihoods, 1, lastK+1);
final double[] log10Likelihoods = new double[]{log10AlleleFrequencyLikelihoods[0], log10LikelihoodAFGt0};
final double[] log10Priors = new double[]{log10AlleleFrequencyPriors[0], MathUtils.log10sumLog10(log10AlleleFrequencyPriors, 1)};
final double[] log10Posteriors = MathUtils.vectorSum(log10Likelihoods, log10Priors);
final double log10PRef = log10Posteriors[1] > log10Posteriors[0] ? MathUtils.LOG10_P_OF_ZERO : 0.0;
final Map<Allele, Double> log10pRefByAllele = Collections.singletonMap(vc.getAlternateAllele(0), log10PRef);
return new AFCalculationResult(new int[]{mleK}, 0, vc.getAlleles(),
MathUtils.normalizeFromLog10(log10Likelihoods, true),
MathUtils.normalizeFromLog10(log10Priors, true),
log10pRefByAllele);
}
/**
* A simple data structure that holds the current, prev, and prev->prev likelihoods vectors
* for the exact model calculation
*/
private final static class ExactACCache {
double[] kMinus2, kMinus1, kMinus0;
private static double[] create(int n) {
return new double[n];
}
public ExactACCache(int n) {
kMinus2 = create(n);
kMinus1 = create(n);
kMinus0 = create(n);
}
final public void rotate() {
double[] tmp = kMinus2;
kMinus2 = kMinus1;
kMinus1 = kMinus0;
kMinus0 = tmp;
}
final public double[] getkMinus2() {
return kMinus2;
}
final public double[] getkMinus1() {
return kMinus1;
}
final public double[] getkMinus0() {
return kMinus0;
}
}
private Pair<Integer, Integer> linearExact(final VariantContext vc,
double[] log10AlleleFrequencyPriors,
double[] log10AlleleFrequencyLikelihoods,
double[] log10AlleleFrequencyPosteriors) {
final ArrayList<double[]> genotypeLikelihoods = getGLs(vc.getGenotypes(), true);
final int numSamples = genotypeLikelihoods.size()-1;
final int numChr = 2*numSamples;
final ExactACCache logY = new ExactACCache(numSamples+1);
logY.getkMinus0()[0] = 0.0; // the zero case
double maxLog10L = Double.NEGATIVE_INFINITY;
boolean done = false;
int lastK = -1, mleK = -1;
for (int k=0; k <= numChr && ! done; k++ ) {
final double[] kMinus0 = logY.getkMinus0();
if ( k == 0 ) { // special case for k = 0
for ( int j=1; j <= numSamples; j++ ) {
kMinus0[j] = kMinus0[j-1] + genotypeLikelihoods.get(j)[0];
}
} else { // k > 0
final double[] kMinus1 = logY.getkMinus1();
final double[] kMinus2 = logY.getkMinus2();
for ( int j=1; j <= numSamples; j++ ) {
final double[] gl = genotypeLikelihoods.get(j);
final double logDenominator = MathUtils.Log10Cache.get(2*j) + MathUtils.Log10Cache.get(2*j-1);
double aa = Double.NEGATIVE_INFINITY;
double ab = Double.NEGATIVE_INFINITY;
if (k < 2*j-1)
aa = MathUtils.Log10Cache.get(2*j-k) + MathUtils.Log10Cache.get(2*j-k-1) + kMinus0[j-1] + gl[0];
if (k < 2*j)
ab = MathUtils.Log10Cache.get(2*k) + MathUtils.Log10Cache.get(2*j-k)+ kMinus1[j-1] + gl[1];
double log10Max;
if (k > 1) {
final double bb = MathUtils.Log10Cache.get(k) + MathUtils.Log10Cache.get(k-1) + kMinus2[j-1] + gl[2];
log10Max = MathUtils.approximateLog10SumLog10(aa, ab, bb);
} else {
// we know we aren't considering the BB case, so we can use an optimized log10 function
log10Max = MathUtils.approximateLog10SumLog10(aa, ab);
}
// finally, update the L(j,k) value
kMinus0[j] = log10Max - logDenominator;
}
}
// update the posteriors vector
final double log10LofK = kMinus0[numSamples];
log10AlleleFrequencyLikelihoods[k] = log10LofK;
log10AlleleFrequencyPosteriors[k] = log10LofK + log10AlleleFrequencyPriors[k];
// can we abort early?
lastK = k;
if ( log10LofK > maxLog10L ) {
maxLog10L = log10LofK;
mleK = k;
}
if ( log10LofK < maxLog10L - StateTracker.MAX_LOG10_ERROR_TO_STOP_EARLY ) {
//if ( DEBUG ) System.out.printf(" *** breaking early k=%d log10L=%.2f maxLog10L=%.2f%n", k, log10LofK, maxLog10L);
done = true;
}
logY.rotate();
}
return new Pair<>(lastK, mleK);
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/VariantDataManager.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantrecalibration;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.MathUtils;
import htsjdk.variant.vcf.VCFConstants;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import org.broadinstitute.gatk.utils.collections.ExpandingArrayList;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import java.util.*;
/**
* Created by IntelliJ IDEA.
* User: rpoplin
* Date: Mar 4, 2011
*/
public class VariantDataManager {
private List<VariantDatum> data = Collections.emptyList();
private double[] meanVector;
private double[] varianceVector; // this is really the standard deviation
public List<String> annotationKeys;
private final VariantRecalibratorArgumentCollection VRAC;
protected final static Logger logger = Logger.getLogger(VariantDataManager.class);
protected final List<TrainingSet> trainingSets;
private static final double SAFETY_OFFSET = 0.01; //To use for example as 1/(X + SAFETY_OFFSET) to protect against dividing or taking log of X=0.
private static final double PRECISION = 0.01; //To use mainly with MathUtils.compareDoubles(a,b,PRECISION)
public VariantDataManager( final List<String> annotationKeys, final VariantRecalibratorArgumentCollection VRAC ) {
this.data = Collections.emptyList();
this.annotationKeys = new ArrayList<>( annotationKeys );
this.VRAC = VRAC;
meanVector = new double[this.annotationKeys.size()];
varianceVector = new double[this.annotationKeys.size()];
trainingSets = new ArrayList<>();
}
public void setData( final List<VariantDatum> data ) {
this.data = data;
}
public void setNormalization(final Map<String, Double> anMeans, final Map<String, Double> anStdDevs) {
for (int i = 0; i < this.annotationKeys.size(); i++) {
meanVector[i] = anMeans.get(annotationKeys.get(i));
varianceVector[i] = anStdDevs.get(annotationKeys.get(i));
}
}
public List<VariantDatum> getData() {
return data;
}
/**
* Normalize annotations to mean 0 and standard deviation 1.
* Order the variant annotations by the provided list {@code theOrder} or standard deviation.
*
* @param calculateMeans Boolean indicating whether or not to calculate the means
* @param theOrder a list of integers specifying the desired annotation order. If this is null
* annotations will get sorted in decreasing size of their standard deviations.
*/
public void normalizeData(final boolean calculateMeans, List<Integer> theOrder) {
boolean foundZeroVarianceAnnotation = false;
for( int iii = 0; iii < meanVector.length; iii++ ) {
final double theMean, theSTD;
if (calculateMeans) {
theMean = mean(iii, true);
theSTD = standardDeviation(theMean, iii, true);
if (Double.isNaN(theMean)) {
throw new UserException.BadInput("Values for " + annotationKeys.get(iii) + " annotation not detected for ANY training variant in the input callset. VariantAnnotator may be used to add these annotations.");
}
foundZeroVarianceAnnotation = foundZeroVarianceAnnotation || (theSTD < 1E-5);
meanVector[iii] = theMean;
varianceVector[iii] = theSTD;
}
else {
theMean = meanVector[iii];
theSTD = varianceVector[iii];
}
logger.info(annotationKeys.get(iii) + String.format(": \t mean = %.2f\t standard deviation = %.2f", theMean, theSTD));
for( final VariantDatum datum : data ) {
// Transform each data point via: (x - mean) / standard deviation
datum.annotations[iii] = ( datum.isNull[iii] ? 0.1 * Utils.getRandomGenerator().nextGaussian() : ( datum.annotations[iii] - theMean ) / theSTD );
}
}
if( foundZeroVarianceAnnotation ) {
throw new UserException.BadInput( "Found annotations with zero variance. They must be excluded before proceeding." );
}
// trim data by standard deviation threshold and mark failing data for exclusion later
for( final VariantDatum datum : data ) {
boolean remove = false;
for( final double val : datum.annotations ) {
remove = remove || (Math.abs(val) > VRAC.STD_THRESHOLD);
}
datum.failingSTDThreshold = remove;
}
// re-order the data by increasing standard deviation so that the results don't depend on the order things were specified on the command line
// standard deviation over the training points is used as a simple proxy for information content, perhaps there is a better thing to use here
// or use the serialized report's annotation order via the argument theOrder
if (theOrder == null){
theOrder = calculateSortOrder(meanVector);
}
annotationKeys = reorderList(annotationKeys, theOrder);
varianceVector = ArrayUtils.toPrimitive(reorderArray(ArrayUtils.toObject(varianceVector), theOrder));
meanVector = ArrayUtils.toPrimitive(reorderArray(ArrayUtils.toObject(meanVector), theOrder));
for( final VariantDatum datum : data ) {
datum.annotations = ArrayUtils.toPrimitive(reorderArray(ArrayUtils.toObject(datum.annotations), theOrder));
datum.isNull = ArrayUtils.toPrimitive(reorderArray(ArrayUtils.toObject(datum.isNull), theOrder));
}
logger.info("Annotation order is: " + annotationKeys.toString());
}
public double[] getMeanVector() {
return meanVector;
}
public double[] getVarianceVector() {
return varianceVector;
}
/**
* Get a list of indices which give the ascending sort order of the data array
* @param inputVector the data to consider
* @return a non-null list of integers with length matching the length of the input array
*/
protected List<Integer> calculateSortOrder(final double[] inputVector) {
final List<Integer> theOrder = new ArrayList<>(inputVector.length);
final List<MyDoubleForSorting> toBeSorted = new ArrayList<>(inputVector.length);
int count = 0;
for( int iii = 0; iii < inputVector.length; iii++ ) {
toBeSorted.add(new MyDoubleForSorting(-1.0 * Math.abs(inputVector[iii] - mean(iii, false)), count++));
}
Collections.sort(toBeSorted);
for( final MyDoubleForSorting d : toBeSorted ) {
theOrder.add(d.originalIndex); // read off the sort order by looking at the index field
}
return theOrder;
}
// small private class to assist in reading off the new ordering of the annotation array
private class MyDoubleForSorting implements Comparable<MyDoubleForSorting> {
final Double myData;
final int originalIndex;
public MyDoubleForSorting(final double myData, final int originalIndex) {
this.myData = myData;
this.originalIndex = originalIndex;
}
@Override
public int compareTo(final MyDoubleForSorting other) {
return myData.compareTo(other.myData);
}
}
/**
* Convenience connector method to work with arrays instead of lists. See ##reorderList##
*/
private <T> T[] reorderArray(final T[] data, final List<Integer> order) {
return reorderList(Arrays.asList(data), order).toArray(data);
}
/**
* Reorder the given data list to be in the specified order
* @param data the data to reorder
* @param order the new order to use
* @return a reordered list of data
*/
private <T> List<T> reorderList(final List<T> data, final List<Integer> order) {
final List<T> returnList = new ArrayList<>(data.size());
for( final int index : order ) {
returnList.add( data.get(index) );
}
return returnList;
}
/**
* Convert a normalized point to it's original annotation value
*
* norm = (orig - mu) / sigma
* orig = norm * sigma + mu
*
* @param normalizedValue the normalized value of the ith annotation
* @param annI the index of the annotation value
* @return the denormalized value for the annotation
*/
public double denormalizeDatum(final double normalizedValue, final int annI) {
final double mu = meanVector[annI];
final double sigma = varianceVector[annI];
return normalizedValue * sigma + mu;
}
public void addTrainingSet( final TrainingSet trainingSet ) {
trainingSets.add( trainingSet );
}
public List<String> getAnnotationKeys() {
return annotationKeys;
}
public boolean checkHasTrainingSet() {
for( final TrainingSet trainingSet : trainingSets ) {
if( trainingSet.isTraining ) { return true; }
}
return false;
}
public boolean checkHasTruthSet() {
for( final TrainingSet trainingSet : trainingSets ) {
if( trainingSet.isTruth ) { return true; }
}
return false;
}
public List<VariantDatum> getTrainingData() {
final List<VariantDatum> trainingData = new ExpandingArrayList<>();
for( final VariantDatum datum : data ) {
if( datum.atTrainingSite && !datum.failingSTDThreshold ) {
trainingData.add( datum );
}
}
logger.info( "Training with " + trainingData.size() + " variants after standard deviation thresholding." );
if( trainingData.size() < VRAC.MIN_NUM_BAD_VARIANTS ) {
logger.warn( "WARNING: Training with very few variant sites! Please check the model reporting PDF to ensure the quality of the model is reliable." );
} else if( trainingData.size() > VRAC.MAX_NUM_TRAINING_DATA ) {
logger.warn( "WARNING: Very large training set detected. Downsampling to " + VRAC.MAX_NUM_TRAINING_DATA + " training variants." );
Collections.shuffle(trainingData, Utils.getRandomGenerator());
return trainingData.subList(0, VRAC.MAX_NUM_TRAINING_DATA);
}
return trainingData;
}
public List<VariantDatum> selectWorstVariants() {
final List<VariantDatum> trainingData = new ExpandingArrayList<>();
for( final VariantDatum datum : data ) {
if( datum != null && !datum.failingSTDThreshold && !Double.isInfinite(datum.lod) && datum.lod < VRAC.BAD_LOD_CUTOFF ) {
datum.atAntiTrainingSite = true;
trainingData.add( datum );
}
}
logger.info( "Selected worst " + trainingData.size() + " scoring variants --> variants with LOD <= " + String.format("%.4f", VRAC.BAD_LOD_CUTOFF) + "." );
return trainingData;
}
public List<VariantDatum> getEvaluationData() {
final List<VariantDatum> evaluationData = new ExpandingArrayList<>();
for( final VariantDatum datum : data ) {
if( datum != null && !datum.failingSTDThreshold && !datum.atTrainingSite && !datum.atAntiTrainingSite ) {
evaluationData.add( datum );
}
}
return evaluationData;
}
/**
* Remove all VariantDatum's from the data list which are marked as aggregate data
*/
public void dropAggregateData() {
final Iterator<VariantDatum> iter = data.iterator();
while (iter.hasNext()) {
final VariantDatum datum = iter.next();
if( datum.isAggregate ) {
iter.remove();
}
}
}
public List<VariantDatum> getRandomDataForPlotting( final int numToAdd, final List<VariantDatum> trainingData, final List<VariantDatum> antiTrainingData, final List<VariantDatum> evaluationData ) {
final List<VariantDatum> returnData = new ExpandingArrayList<>();
Collections.shuffle(trainingData, Utils.getRandomGenerator());
Collections.shuffle(antiTrainingData, Utils.getRandomGenerator());
Collections.shuffle(evaluationData, Utils.getRandomGenerator());
returnData.addAll(trainingData.subList(0, Math.min(numToAdd, trainingData.size())));
returnData.addAll(antiTrainingData.subList(0, Math.min(numToAdd, antiTrainingData.size())));
returnData.addAll(evaluationData.subList(0, Math.min(numToAdd, evaluationData.size())));
Collections.shuffle(returnData, Utils.getRandomGenerator());
return returnData;
}
protected double mean( final int index, final boolean trainingData ) {
double sum = 0.0;
int numNonNull = 0;
for( final VariantDatum datum : data ) {
if( (trainingData == datum.atTrainingSite) && !datum.isNull[index] ) { sum += datum.annotations[index]; numNonNull++; }
}
return sum / ((double) numNonNull);
}
protected double standardDeviation( final double mean, final int index, final boolean trainingData ) {
double sum = 0.0;
int numNonNull = 0;
for( final VariantDatum datum : data ) {
if( (trainingData == datum.atTrainingSite) && !datum.isNull[index] ) { sum += ((datum.annotations[index] - mean)*(datum.annotations[index] - mean)); numNonNull++; }
}
return Math.sqrt( sum / ((double) numNonNull) );
}
public void decodeAnnotations( final VariantDatum datum, final VariantContext vc, final boolean jitter ) {
final double[] annotations = new double[annotationKeys.size()];
final boolean[] isNull = new boolean[annotationKeys.size()];
int iii = 0;
for( final String key : annotationKeys ) {
isNull[iii] = false;
annotations[iii] = decodeAnnotation( key, vc, jitter, VRAC, datum );
if( Double.isNaN(annotations[iii]) ) { isNull[iii] = true; }
iii++;
}
datum.annotations = annotations;
datum.isNull = isNull;
}
/** Transforms an interval [xmin, xmax] to (-inf, +inf) **/
private static double logitTransform( final double x, final double xmin, final double xmax) {
return Math.log((x - xmin)/(xmax - x));
}
private static double decodeAnnotation( final String annotationKey, final VariantContext vc, final boolean jitter, final VariantRecalibratorArgumentCollection vrac, final VariantDatum datum ) {
double value;
final double LOG_OF_TWO = 0.6931472;
try {
//if we're in allele-specific mode and an allele-specific annotation has been requested, parse the appropriate value from the list
if(vrac.useASannotations && annotationKey.startsWith(GATKVCFConstants.ALLELE_SPECIFIC_PREFIX)) {
final List<Object> valueList = vc.getAttributeAsList(annotationKey);
if (vc.hasAllele(datum.alternateAllele)) {
final int altIndex = vc.getAlleleIndex(datum.alternateAllele)-1; //-1 is to convert the index from all alleles (including reference) to just alternate alleles
value = Double.parseDouble((String)valueList.get(altIndex));
}
//if somehow our alleles got mixed up
else
throw new IllegalStateException("VariantDatum allele " + datum.alternateAllele + " is not contained in the input VariantContext.");
}
else
value = vc.getAttributeAsDouble( annotationKey, Double.NaN );
if( Double.isInfinite(value) ) { value = Double.NaN; }
if( jitter && annotationKey.equalsIgnoreCase(GATKVCFConstants.HAPLOTYPE_SCORE_KEY) && MathUtils.compareDoubles(value, 0.0, PRECISION) == 0 ) { value += 0.01 * Utils.getRandomGenerator().nextGaussian(); }
if( jitter && (annotationKey.equalsIgnoreCase(GATKVCFConstants.FISHER_STRAND_KEY) || annotationKey.equalsIgnoreCase(GATKVCFConstants.AS_FILTER_STATUS_KEY)) && MathUtils.compareDoubles(value, 0.0, PRECISION) == 0 ) { value += 0.01 * Utils.getRandomGenerator().nextGaussian(); }
if( jitter && annotationKey.equalsIgnoreCase(GATKVCFConstants.INBREEDING_COEFFICIENT_KEY) && MathUtils.compareDoubles(value, 0.0, PRECISION) == 0 ) { value += 0.01 * Utils.getRandomGenerator().nextGaussian(); }
if( jitter && (annotationKey.equalsIgnoreCase(GATKVCFConstants.STRAND_ODDS_RATIO_KEY) || annotationKey.equalsIgnoreCase(GATKVCFConstants.AS_STRAND_ODDS_RATIO_KEY)) && MathUtils.compareDoubles(value, LOG_OF_TWO, PRECISION) == 0 ) { value += 0.01 * Utils.getRandomGenerator().nextGaussian(); } //min SOR is 2.0, then we take ln
if( jitter && (annotationKey.equalsIgnoreCase(VCFConstants.RMS_MAPPING_QUALITY_KEY) || annotationKey.equalsIgnoreCase(GATKVCFConstants.AS_RMS_MAPPING_QUALITY_KEY))) {
if( vrac.MQ_CAP > 0) {
value = logitTransform(value, -SAFETY_OFFSET, vrac.MQ_CAP + SAFETY_OFFSET);
if (MathUtils.compareDoubles(value, logitTransform(vrac.MQ_CAP, -SAFETY_OFFSET, vrac.MQ_CAP + SAFETY_OFFSET), PRECISION) == 0 ) {
value += vrac.MQ_JITTER * Utils.getRandomGenerator().nextGaussian();
}
} else if( MathUtils.compareDoubles(value, vrac.MQ_CAP, PRECISION) == 0 ) {
value += vrac.MQ_JITTER * Utils.getRandomGenerator().nextGaussian();
}
}
} catch( Exception e ) {
value = Double.NaN; // The VQSR works with missing data by marginalizing over the missing dimension when evaluating the Gaussian mixture model
}
return value;
}
public void parseTrainingSets( final RefMetaDataTracker tracker, final GenomeLoc genomeLoc, final VariantContext evalVC, final VariantDatum datum, final boolean TRUST_ALL_POLYMORPHIC ) {
datum.isKnown = false;
datum.atTruthSite = false;
datum.atTrainingSite = false;
datum.atAntiTrainingSite = false;
datum.prior = 2.0;
for( final TrainingSet trainingSet : trainingSets ) {
for( final VariantContext trainVC : tracker.getValues(trainingSet.rodBinding, genomeLoc) ) {
if (VRAC.useASannotations && !doAllelesMatch(trainVC, datum))
continue;
if( isValidVariant( evalVC, trainVC, TRUST_ALL_POLYMORPHIC ) ) {
datum.isKnown = datum.isKnown || trainingSet.isKnown;
datum.atTruthSite = datum.atTruthSite || trainingSet.isTruth;
datum.atTrainingSite = datum.atTrainingSite || trainingSet.isTraining;
datum.prior = Math.max( datum.prior, trainingSet.prior );
datum.consensusCount += ( trainingSet.isConsensus ? 1 : 0 );
}
if( trainVC != null ) {
datum.atAntiTrainingSite = datum.atAntiTrainingSite || trainingSet.isAntiTraining;
}
}
}
}
private boolean isValidVariant( final VariantContext evalVC, final VariantContext trainVC, final boolean TRUST_ALL_POLYMORPHIC) {
return trainVC != null && trainVC.isNotFiltered() && trainVC.isVariant() && checkVariationClass( evalVC, trainVC ) &&
(TRUST_ALL_POLYMORPHIC || !trainVC.hasGenotypes() || trainVC.isPolymorphicInSamples());
}
private boolean doAllelesMatch(final VariantContext trainVC, final VariantDatum datum) {
//only do this check in the allele-specific case, where each datum represents one allele
return datum.alternateAllele == null || trainVC.getAlternateAlleles().contains(datum.alternateAllele);
}
protected static boolean checkVariationClass( final VariantContext evalVC, final VariantContext trainVC ) {
switch( trainVC.getType() ) {
case SNP:
case MNP:
return checkVariationClass( evalVC, VariantRecalibratorArgumentCollection.Mode.SNP );
case INDEL:
case MIXED:
case SYMBOLIC:
return checkVariationClass( evalVC, VariantRecalibratorArgumentCollection.Mode.INDEL );
default:
return false;
}
}
protected static boolean checkVariationClass( final VariantContext evalVC, final VariantRecalibratorArgumentCollection.Mode mode ) {
switch( mode ) {
case SNP:
return evalVC.isSNP() || evalVC.isMNP();
case INDEL:
return evalVC.isStructuralIndel() || evalVC.isIndel() || evalVC.isMixed() || evalVC.isSymbolic();
case BOTH:
return true;
default:
throw new IllegalStateException( "Encountered unknown recal mode: " + mode );
}
}
protected static boolean checkVariationClass( final VariantContext evalVC, final Allele allele, final VariantRecalibratorArgumentCollection.Mode mode ) {
switch( mode ) {
case SNP:
//note that spanning deletions are considered SNPs by this logic
return evalVC.getReference().length() == allele.length();
case INDEL:
return (evalVC.getReference().length() != allele.length()) || allele.isSymbolic();
case BOTH:
return true;
default:
throw new IllegalStateException( "Encountered unknown recal mode: " + mode );
}
}
public void writeOutRecalibrationTable( final VariantContextWriter recalWriter ) {
// we need to sort in coordinate order in order to produce a valid VCF
Collections.sort( data, new Comparator<VariantDatum>() {
public int compare(VariantDatum vd1, VariantDatum vd2) {
return vd1.loc.compareTo(vd2.loc);
}} );
// create dummy alleles to be used
List<Allele> alleles = Arrays.asList(Allele.create("N", true), Allele.create("<VQSR>", false));
for( final VariantDatum datum : data ) {
if (VRAC.useASannotations)
alleles = Arrays.asList(datum.referenceAllele, datum.alternateAllele); //use the alleles to distinguish between multiallelics in AS mode
VariantContextBuilder builder = new VariantContextBuilder("VQSR", datum.loc.getContig(), datum.loc.getStart(), datum.loc.getStop(), alleles);
builder.attribute(VCFConstants.END_KEY, datum.loc.getStop());
builder.attribute(GATKVCFConstants.VQS_LOD_KEY, String.format("%.4f", datum.lod));
builder.attribute(GATKVCFConstants.CULPRIT_KEY, (datum.worstAnnotation != -1 ? annotationKeys.get(datum.worstAnnotation) : "NULL"));
if ( datum.atTrainingSite ) builder.attribute(GATKVCFConstants.POSITIVE_LABEL_KEY, true);
if ( datum.atAntiTrainingSite ) builder.attribute(GATKVCFConstants.NEGATIVE_LABEL_KEY, true);
recalWriter.add(builder.make());
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/PosteriorLikelihoodsUtils.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.variantutils;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import htsjdk.variant.variantcontext.*;
import htsjdk.variant.vcf.VCFConstants;
import java.util.*;
public class PosteriorLikelihoodsUtils {
public static VariantContext calculatePosteriorGLs(final VariantContext vc1,
final Collection<VariantContext> resources,
final int numRefSamplesFromMissingResources,
final double globalFrequencyPriorDirichlet,
final boolean useInputSamples,
final boolean useAC,
final boolean useACoff) {
final Map<Allele,Integer> totalAlleleCounts = new HashMap<>();
boolean nonSNPprior = false;
if (vc1 == null) throw new IllegalArgumentException("VariantContext vc1 is null");
final boolean nonSNPeval = !vc1.isSNP();
final double[] alleleCounts = new double[vc1.getNAlleles()];
//only use discovered allele count if there are at least 10 samples
final boolean useDiscoveredAC = !useACoff && vc1.getNSamples() >= 10;
if(vc1.isSNP())
{
//store the allele counts for each allele in the variant priors
for ( final VariantContext resource : resources ) {
if( !resource.isSNP()) nonSNPprior = true;
addAlleleCounts(totalAlleleCounts,resource,useAC);
}
//add the allele counts from the input samples (if applicable)
if ( useInputSamples ) {
addAlleleCounts(totalAlleleCounts,vc1,useAC);
}
//add zero allele counts for any reference alleles not seen in priors (if applicable)
int existingRefCounts = 0;
if (totalAlleleCounts.containsKey(vc1.getReference()))
existingRefCounts += totalAlleleCounts.get(vc1.getReference());
totalAlleleCounts.put(vc1.getReference(),existingRefCounts+numRefSamplesFromMissingResources);
}
// now extract the counts of the alleles present within vc1, and in order
int alleleIndex = 0;
for ( final Allele allele : vc1.getAlleles() ) {
alleleCounts[alleleIndex++] = globalFrequencyPriorDirichlet + ( totalAlleleCounts.containsKey(allele) ?
totalAlleleCounts.get(allele) : 0 );
}
//parse the likelihoods for each sample's genotype
final List<double[]> likelihoods = new ArrayList<>(vc1.getNSamples());
for ( final Genotype genotype : vc1.getGenotypes() ) {
if (!genotype.hasExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY)){
likelihoods.add(genotype.hasLikelihoods() ? genotype.getLikelihoods().getAsVector() : null );
}
else {
Object PPfromVCF = genotype.getExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY);
//parse the PPs into a vector of probabilities
if (PPfromVCF instanceof String) {
final String PPstring = (String)PPfromVCF;
if (PPstring.charAt(0)=='.') //samples not in trios will have PP tag like ".,.,." if family priors are applied
likelihoods.add(genotype.hasLikelihoods() ? genotype.getLikelihoods().getAsVector() : null );
else {
final String[] likelihoodsAsStringVector = PPstring.split(",");
double[] likelihoodsAsVector = new double[likelihoodsAsStringVector.length];
for ( int i = 0; i < likelihoodsAsStringVector.length; i++ ) {
likelihoodsAsVector[i] = Double.parseDouble(likelihoodsAsStringVector[i])/-10.0;
}
likelihoods.add(likelihoodsAsVector);
}
}
else {
int[] likelihoodsAsInts = extractInts(PPfromVCF);
double[] likelihoodsAsVector = new double[likelihoodsAsInts.length];
for ( int i = 0; i < likelihoodsAsInts.length; i++ ) {
likelihoodsAsVector[i] = likelihoodsAsInts[i]/-10.0;
}
likelihoods.add(likelihoodsAsVector);
}
}
}
//TODO: for now just use priors that are SNPs because indel priors will bias SNP calls
final boolean useFlatPriors = nonSNPeval || nonSNPprior || (resources.isEmpty() && !useDiscoveredAC);
final List<double[]> posteriors = calculatePosteriorGLs(likelihoods,alleleCounts,vc1.getMaxPloidy(2), useFlatPriors);
final GenotypesContext newContext = GenotypesContext.create();
for ( int genoIdx = 0; genoIdx < vc1.getNSamples(); genoIdx ++ ) {
final GenotypeBuilder builder = new GenotypeBuilder(vc1.getGenotype(genoIdx));
builder.phased(vc1.getGenotype(genoIdx).isPhased());
if ( posteriors.get(genoIdx) != null ) {
GATKVariantContextUtils.updateGenotypeAfterSubsetting(vc1.getAlleles(), vc1.getMaxPloidy(2), builder,
GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, posteriors.get(genoIdx), vc1.getAlleles());
builder.attribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY,
Utils.listFromPrimitives(GenotypeLikelihoods.fromLog10Likelihoods(posteriors.get(genoIdx)).getAsPLs()));
}
newContext.add(builder.make());
}
final List<Integer> priors = Utils.listFromPrimitives(
GenotypeLikelihoods.fromLog10Likelihoods(getDirichletPrior(alleleCounts, vc1.getMaxPloidy(2),useFlatPriors)).getAsPLs());
final VariantContextBuilder builder = new VariantContextBuilder(vc1).genotypes(newContext).attribute(GATKVCFConstants.GENOTYPE_PRIOR_KEY, priors);
// add in the AC, AF, and AN attributes
VariantContextUtils.calculateChromosomeCounts(builder, true);
return builder.make();
}
/**
* Given genotype likelihoods and known allele counts, calculate the posterior likelihoods
* over the genotype states
* @param genotypeLikelihoods - the genotype likelihoods for the individual
* @param knownAlleleCountsByAllele - the known allele counts in the population. For AC=2 AN=12 site, this is {10,2}
* @param ploidy - the ploidy to assume
* @param useFlatPriors - if true, apply flat priors to likelihoods in order to calculate posterior probabilities
* @return - the posterior genotype likelihoods
*/
protected static List<double[]> calculatePosteriorGLs(final List<double[]> genotypeLikelihoods,
final double[] knownAlleleCountsByAllele,
final int ploidy,
final boolean useFlatPriors) {
if ( ploidy != 2 ) {
throw new IllegalStateException("Genotype posteriors not yet implemented for ploidy != 2");
}
final double[] genotypePriorByAllele = getDirichletPrior(knownAlleleCountsByAllele,ploidy, useFlatPriors);
final List<double[]> posteriors = new ArrayList<>(genotypeLikelihoods.size());
for ( final double[] likelihoods : genotypeLikelihoods ) {
double[] posteriorProbabilities = null;
if ( likelihoods != null ) {
if ( likelihoods.length != genotypePriorByAllele.length ) {
throw new IllegalStateException(String.format("Likelihoods not of correct size: expected %d, observed %d",
knownAlleleCountsByAllele.length*(knownAlleleCountsByAllele.length+1)/2,likelihoods.length));
}
posteriorProbabilities = new double[genotypePriorByAllele.length];
for ( int genoIdx = 0; genoIdx < likelihoods.length; genoIdx ++ ) {
posteriorProbabilities[genoIdx] = likelihoods[genoIdx] + genotypePriorByAllele[genoIdx];
}
posteriorProbabilities = MathUtils.normalizeFromLog10(posteriorProbabilities, true);
}
posteriors.add(posteriorProbabilities);
}
return posteriors;
}
// convenience function for a single genotypelikelihoods array. Just wraps.
protected static double[] calculatePosteriorGLs(final double[] genotypeLikelihoods,
final double[] knownAlleleCountsByAllele,
final int ploidy,
final boolean useFlatPriors) {
return calculatePosteriorGLs(Arrays.asList(genotypeLikelihoods),knownAlleleCountsByAllele,ploidy, useFlatPriors).get(0);
}
/**
* Given known allele counts (whether external, from the sample, or both), calculate the prior distribution
* over genotype states. This assumes
* 1) Random sampling of alleles (known counts are unbiased, and frequency estimate is Dirichlet)
* 2) Genotype states are independent (Hardy-Weinberg)
* These assumptions give rise to a Dirichlet-Multinomial distribution of genotype states as a prior
* (the "number of trials" for the multinomial is simply the ploidy)
* @param knownCountsByAllele - the known counts per allele. For an AC=2, AN=12 site this is {10,2}
* @param ploidy - the number of chromosomes in the sample. For now restricted to 2.
* @return - the Dirichlet-Multinomial distribution over genotype states
*/
protected static double[] getDirichletPrior(final double[] knownCountsByAllele, final int ploidy, final boolean useFlatPrior) {
if ( ploidy != 2 ) {
throw new IllegalStateException("Genotype priors not yet implemented for ploidy != 2");
}
// multi-allelic format is
// AA AB BB AC BC CC AD BD CD DD ...
final double sumOfKnownCounts = MathUtils.sum(knownCountsByAllele);
final double[] priors = new double[knownCountsByAllele.length*(knownCountsByAllele.length+1)/2];
int priorIndex = 0;
for ( int allele2 = 0; allele2 < knownCountsByAllele.length; allele2++ ) {
for ( int allele1 = 0; allele1 <= allele2; allele1++) {
if (useFlatPrior)
priors[priorIndex++] = 1.0;
else {
final int[] counts = new int[knownCountsByAllele.length];
counts[allele1] += 1;
counts[allele2] += 1;
priors[priorIndex++] = MathUtils.dirichletMultinomial(knownCountsByAllele,sumOfKnownCounts,counts,ploidy);
}
}
}
return priors;
}
/**
* Parse counts for each allele
* @param counts - Map to store and return data
* @param context - line to be parsed from the input VCF file
* @param useAC - use allele count annotation value from VariantContext (vs. MLEAC)
*/
private static void addAlleleCounts(final Map<Allele,Integer> counts, final VariantContext context, final boolean useAC) {
final int[] ac;
//use MLEAC value...
if ( context.hasAttribute(GATKVCFConstants.MLE_ALLELE_COUNT_KEY) && ! useAC ) {
ac = getAlleleCounts(GATKVCFConstants.MLE_ALLELE_COUNT_KEY, context);
}
//...unless specified by the user in useAC or unless MLEAC is absent
else if ( context.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) ) {
ac = getAlleleCounts(VCFConstants.ALLELE_COUNT_KEY, context);
}
//if VariantContext annotation doesn't contain AC or MLEAC then get the data from direct evaluation
else {
ac = new int[context.getAlternateAlleles().size()];
int idx = 0;
for ( final Allele allele : context.getAlternateAlleles() ) {
ac[idx++] = context.getCalledChrCount(allele);
}
}
//since the allele count for the reference allele is not given in the VCF format,
//calculate it from the allele number minus the total counts for alternate alleles
for ( final Allele allele : context.getAlleles() ) {
final int count;
if ( allele.isReference() ) {
if ( context.hasAttribute(VCFConstants.ALLELE_NUMBER_KEY) ) {
count = Math.max(context.getAttributeAsInt(VCFConstants.ALLELE_NUMBER_KEY,-1) - (int) MathUtils.sum(ac),0); //occasionally an MLEAC value will sneak in that's greater than the AN
} else {
count = Math.max(context.getCalledChrCount() - (int) MathUtils.sum(ac),0);
}
} else {
count = ac[context.getAlternateAlleles().indexOf(allele)];
}
//if this allele isn't in the map yet, add it
if ( ! counts.containsKey(allele) ) {
counts.put(allele,0);
}
//add the count for the current allele to the existing value in the map
counts.put(allele,count + counts.get(allele));
}
}
/**
* Retrieve allele count data from VariantContext using VCFkey, checks for correct number of values in VCF
* @param VCFkey VariantContext annotation tag of interest (should be AC or MLEAC)
* @param context VariantContext from which to extract the data
* @return int[] with allele count data
*/
private static int[] getAlleleCounts(final String VCFkey, final VariantContext context) {
final Object alleleCountsFromVCF = context.getAttribute(VCFkey);
if ( alleleCountsFromVCF instanceof List ) {
if ( ((List) alleleCountsFromVCF).size() != context.getAlternateAlleles().size() )
throw new UserException(String.format("Variant does not contain the same number of MLE allele counts as alternate alleles for record at %s:%d", context.getChr(), context.getStart()));
}
else if ( alleleCountsFromVCF instanceof String || alleleCountsFromVCF instanceof Integer) {//here length is 1
if (context.getAlternateAlleles().size() != 1)
throw new UserException(String.format("Variant does not contain the same number of MLE allele counts as alternate alleles for record at %s:%d", context.getChr(), context.getStart()));
}
return extractInts(alleleCountsFromVCF);
}
/**
* Check the formatting on the Object returned by a call to VariantContext::getAttribute() and parse appropriately
* @param integerListContainingVCField - Object returned by a call to VariantContext::getAttribute()
* @return - array of ints
*/
public static int[] extractInts(final Object integerListContainingVCField) {
List<Integer> mleList = null;
if ( integerListContainingVCField instanceof List ) {
if ( ((List) integerListContainingVCField).get(0) instanceof String ) {
mleList = new ArrayList<>(((List) integerListContainingVCField).size());
for ( Object s : ((List)integerListContainingVCField)) {
mleList.add(Integer.parseInt((String) s));
}
} else {
mleList = (List<Integer>) integerListContainingVCField;
}
} else if ( integerListContainingVCField instanceof Integer ) {
mleList = Arrays.asList((Integer) integerListContainingVCField);
} else if ( integerListContainingVCField instanceof String ) {
mleList = Arrays.asList(Integer.parseInt((String)integerListContainingVCField));
}
if ( mleList == null )
throw new IllegalArgumentException(String.format("VCF does not have properly formatted "+
GATKVCFConstants.MLE_ALLELE_COUNT_KEY+" or "+VCFConstants.ALLELE_COUNT_KEY));
final int[] mle = new int[mleList.size()];
if ( ! ( mleList.get(0) instanceof Integer ) ) {
throw new IllegalStateException("BUG: The AC values should be an Integer, but was "+mleList.get(0).getClass().getCanonicalName());
}
for ( int idx = 0; idx < mle.length; idx++) {
mle[idx] = mleList.get(idx);
}
return mle;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/readthreading/DanglingChainMergingGraph.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading;
import com.google.java.contract.Ensures;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs.*;
import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
import org.broadinstitute.gatk.utils.smithwaterman.*;
import org.jgrapht.EdgeFactory;
import java.util.*;
public abstract class DanglingChainMergingGraph extends BaseGraph<MultiDeBruijnVertex, MultiSampleEdge> {
private static final int MAX_CIGAR_COMPLEXITY = 3;
private int maxMismatchesInDanglingHead = -1;
protected boolean alreadyBuilt;
/**
* Create a new ReadThreadingAssembler using kmerSize for matching
* @param kmerSize must be >= 1
*/
protected DanglingChainMergingGraph(final int kmerSize, final EdgeFactory<MultiDeBruijnVertex, MultiSampleEdge> edgeFactory) {
super(kmerSize, edgeFactory);
}
protected void setMaxMismatchesInDanglingHead(final int maxMismatchesInDanglingHead) {
this.maxMismatchesInDanglingHead = maxMismatchesInDanglingHead;
}
/**
* Edge factory that encapsulates the numPruningSamples assembly parameter
*/
protected static class MyEdgeFactory implements EdgeFactory<MultiDeBruijnVertex, MultiSampleEdge> {
final int numPruningSamples;
public MyEdgeFactory(int numPruningSamples) {
this.numPruningSamples = numPruningSamples;
}
@Override
public MultiSampleEdge createEdge(final MultiDeBruijnVertex sourceVertex, final MultiDeBruijnVertex targetVertex) {
return new MultiSampleEdge(false, 1, numPruningSamples);
}
public MultiSampleEdge createEdge(final boolean isRef, final int multiplicity) {
return new MultiSampleEdge(isRef, multiplicity, numPruningSamples);
}
}
/**
* Class to keep track of the important dangling chain merging data
*/
protected static final class DanglingChainMergeHelper {
final List<MultiDeBruijnVertex> danglingPath, referencePath;
final byte[] danglingPathString, referencePathString;
final Cigar cigar;
public DanglingChainMergeHelper(final List<MultiDeBruijnVertex> danglingPath,
final List<MultiDeBruijnVertex> referencePath,
final byte[] danglingPathString,
final byte[] referencePathString,
final Cigar cigar) {
this.danglingPath = danglingPath;
this.referencePath = referencePath;
this.danglingPathString = danglingPathString;
this.referencePathString = referencePathString;
this.cigar = cigar;
}
}
/**
* Try to recover dangling tails
*
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @param minDanglingBranchLength the minimum length of a dangling branch for us to try to merge it
*/
public void recoverDanglingTails(final int pruneFactor, final int minDanglingBranchLength) {
if ( ! alreadyBuilt ) throw new IllegalStateException("recoverDanglingTails requires the graph be already built");
int attempted = 0;
int nRecovered = 0;
for ( final MultiDeBruijnVertex v : vertexSet() ) {
if ( outDegreeOf(v) == 0 && ! isRefSink(v) ) {
attempted++;
nRecovered += recoverDanglingTail(v, pruneFactor, minDanglingBranchLength);
}
}
logger.debug("Recovered " + nRecovered + " of " + attempted + " dangling tails");
}
/**
* Try to recover dangling heads
*
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @param minDanglingBranchLength the minimum length of a dangling branch for us to try to merge it
*/
public void recoverDanglingHeads(final int pruneFactor, final int minDanglingBranchLength) {
if ( ! alreadyBuilt ) throw new IllegalStateException("recoverDanglingHeads requires the graph be already built");
// we need to build a list of dangling heads because that process can modify the graph (and otherwise generate
// a ConcurrentModificationException if we do it while iterating over the vertexes)
final List<MultiDeBruijnVertex> danglingHeads = new ArrayList<>();
int attempted = 0;
int nRecovered = 0;
for ( final MultiDeBruijnVertex v : vertexSet() ) {
if ( inDegreeOf(v) == 0 && ! isRefSource(v) )
danglingHeads.add(v);
}
// now we can try to recover the dangling heads
for ( final MultiDeBruijnVertex v : danglingHeads ) {
attempted++;
nRecovered += recoverDanglingHead(v, pruneFactor, minDanglingBranchLength);
}
logger.debug("Recovered " + nRecovered + " of " + attempted + " dangling heads");
}
/**
* Attempt to attach vertex with out-degree == 0 to the graph
*
* @param vertex the vertex to recover
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @param minDanglingBranchLength the minimum length of a dangling branch for us to try to merge it
* @return 1 if we successfully recovered the vertex and 0 otherwise
*/
protected int recoverDanglingTail(final MultiDeBruijnVertex vertex, final int pruneFactor, final int minDanglingBranchLength) {
if ( outDegreeOf(vertex) != 0 ) throw new IllegalStateException("Attempting to recover a dangling tail for " + vertex + " but it has out-degree > 0");
// generate the CIGAR string from Smith-Waterman between the dangling tail and reference paths
final DanglingChainMergeHelper danglingTailMergeResult = generateCigarAgainstDownwardsReferencePath(vertex, pruneFactor, minDanglingBranchLength);
// if the CIGAR is too complex (or couldn't be computed) then we do not allow the merge into the reference path
if ( danglingTailMergeResult == null || ! cigarIsOkayToMerge(danglingTailMergeResult.cigar, false, true) )
return 0;
// merge
return mergeDanglingTail(danglingTailMergeResult);
}
/**
* Attempt to attach vertex with in-degree == 0, or a vertex on its path, to the graph
*
* @param vertex the vertex to recover
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @param minDanglingBranchLength the minimum length of a dangling branch for us to try to merge it
* @return 1 if we successfully recovered a vertex and 0 otherwise
*/
protected int recoverDanglingHead(final MultiDeBruijnVertex vertex, final int pruneFactor, final int minDanglingBranchLength) {
if ( inDegreeOf(vertex) != 0 ) throw new IllegalStateException("Attempting to recover a dangling head for " + vertex + " but it has in-degree > 0");
// generate the CIGAR string from Smith-Waterman between the dangling tail and reference paths
final DanglingChainMergeHelper danglingHeadMergeResult = generateCigarAgainstUpwardsReferencePath(vertex, pruneFactor, minDanglingBranchLength);
// if the CIGAR is too complex (or couldn't be computed) then we do not allow the merge into the reference path
if ( danglingHeadMergeResult == null || ! cigarIsOkayToMerge(danglingHeadMergeResult.cigar, true, false) )
return 0;
// merge
return mergeDanglingHead(danglingHeadMergeResult);
}
/**
* Determine whether the provided cigar is okay to merge into the reference path
*
* @param cigar the cigar to analyze
* @param requireFirstElementM if true, require that the first cigar element be an M operator in order for it to be okay
* @param requireLastElementM if true, require that the last cigar element be an M operator in order for it to be okay
* @return true if it's okay to merge, false otherwise
*/
protected boolean cigarIsOkayToMerge(final Cigar cigar, final boolean requireFirstElementM, final boolean requireLastElementM) {
final List<CigarElement> elements = cigar.getCigarElements();
final int numElements = elements.size();
// don't allow more than a couple of different ops
if ( numElements == 0 || numElements > MAX_CIGAR_COMPLEXITY )
return false;
// the first element must be an M
if ( requireFirstElementM && elements.get(0).getOperator() != CigarOperator.M )
return false;
// the last element must be an M
if ( requireLastElementM && elements.get(numElements - 1).getOperator() != CigarOperator.M )
return false;
// note that there are checks for too many mismatches in the dangling branch later in the process
return true;
}
/**
* Actually merge the dangling tail if possible
*
* @param danglingTailMergeResult the result from generating a Cigar for the dangling tail against the reference
* @return 1 if merge was successful, 0 otherwise
*/
protected int mergeDanglingTail(final DanglingChainMergeHelper danglingTailMergeResult) {
final List<CigarElement> elements = danglingTailMergeResult.cigar.getCigarElements();
final CigarElement lastElement = elements.get(elements.size() - 1);
if ( lastElement.getOperator() != CigarOperator.M )
throw new IllegalArgumentException("The last Cigar element must be an M");
final int lastRefIndex = danglingTailMergeResult.cigar.getReferenceLength() - 1;
final int matchingSuffix = Math.min(GraphUtils.longestSuffixMatch(danglingTailMergeResult.referencePathString, danglingTailMergeResult.danglingPathString, lastRefIndex), lastElement.getLength());
if ( matchingSuffix == 0 )
return 0;
final int altIndexToMerge = Math.max(danglingTailMergeResult.cigar.getReadLength() - matchingSuffix - 1, 0);
// there is an important edge condition that we need to handle here: Smith-Waterman correctly calculates that there is a
// deletion, that deletion is left-aligned such that the LCA node is part of that deletion, and the rest of the dangling
// tail is a perfect match to the suffix of the reference path. In this case we need to push the reference index to merge
// down one position so that we don't incorrectly cut a base off of the deletion.
final boolean firstElementIsDeletion = elements.get(0).getOperator() == CigarOperator.D;
final boolean mustHandleLeadingDeletionCase = firstElementIsDeletion && (elements.get(0).getLength() + matchingSuffix == lastRefIndex + 1);
final int refIndexToMerge = lastRefIndex - matchingSuffix + 1 + (mustHandleLeadingDeletionCase ? 1 : 0);
// another edge condition occurs here: if Smith-Waterman places the whole tail into an insertion then it will try to
// merge back to the LCA, which results in a cycle in the graph. So we do not want to merge in such a case.
if ( refIndexToMerge == 0 )
return 0;
// it's safe to merge now
addEdge(danglingTailMergeResult.danglingPath.get(altIndexToMerge), danglingTailMergeResult.referencePath.get(refIndexToMerge), ((MyEdgeFactory)getEdgeFactory()).createEdge(false, 1));
return 1;
}
/**
* Actually merge the dangling head if possible
*
* @param danglingHeadMergeResult the result from generating a Cigar for the dangling head against the reference
* @return 1 if merge was successful, 0 otherwise
*/
protected int mergeDanglingHead(final DanglingChainMergeHelper danglingHeadMergeResult) {
final List<CigarElement> elements = danglingHeadMergeResult.cigar.getCigarElements();
final CigarElement firstElement = elements.get(0);
if ( firstElement.getOperator() != CigarOperator.M )
throw new IllegalArgumentException("The first Cigar element must be an M");
final int indexesToMerge = bestPrefixMatch(danglingHeadMergeResult.referencePathString, danglingHeadMergeResult.danglingPathString, firstElement.getLength());
if ( indexesToMerge <= 0 )
return 0;
// we can't push back the reference path
if ( indexesToMerge >= danglingHeadMergeResult.referencePath.size() - 1 )
return 0;
// but we can manipulate the dangling path if we need to
if ( indexesToMerge >= danglingHeadMergeResult.danglingPath.size() &&
! extendDanglingPathAgainstReference(danglingHeadMergeResult, indexesToMerge - danglingHeadMergeResult.danglingPath.size() + 2) )
return 0;
addEdge(danglingHeadMergeResult.referencePath.get(indexesToMerge+1), danglingHeadMergeResult.danglingPath.get(indexesToMerge), ((MyEdgeFactory)getEdgeFactory()).createEdge(false, 1));
return 1;
}
/**
* Generates the CIGAR string from the Smith-Waterman alignment of the dangling path (where the
* provided vertex is the sink) and the reference path.
*
* @param vertex the sink of the dangling chain
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @return a SmithWaterman object which can be null if no proper alignment could be generated
*/
protected DanglingChainMergeHelper generateCigarAgainstDownwardsReferencePath(final MultiDeBruijnVertex vertex, final int pruneFactor, final int minDanglingBranchLength) {
final int minTailPathLength = Math.max(1, minDanglingBranchLength); // while heads can be 0, tails absolutely cannot
// find the lowest common ancestor path between this vertex and the diverging master path if available
final List<MultiDeBruijnVertex> altPath = findPathUpwardsToLowestCommonAncestor(vertex, pruneFactor);
if ( altPath == null || isRefSource(altPath.get(0)) || altPath.size() < minTailPathLength + 1 ) // add 1 to include the LCA
return null;
// now get the reference path from the LCA
final List<MultiDeBruijnVertex> refPath = getReferencePath(altPath.get(0), TraversalDirection.downwards, Arrays.asList(incomingEdgeOf(altPath.get(1))));
// create the Smith-Waterman strings to use
final byte[] refBases = getBasesForPath(refPath, false);
final byte[] altBases = getBasesForPath(altPath, false);
// run Smith-Waterman to determine the best alignment (and remove trailing deletions since they aren't interesting)
final SmithWaterman alignment = new SWPairwiseAlignment(refBases, altBases, SWParameterSet.STANDARD_NGS, SWPairwiseAlignment.OVERHANG_STRATEGY.LEADING_INDEL);
return new DanglingChainMergeHelper(altPath, refPath, altBases, refBases, AlignmentUtils.removeTrailingDeletions(alignment.getCigar()));
}
/**
* Generates the CIGAR string from the Smith-Waterman alignment of the dangling path (where the
* provided vertex is the source) and the reference path.
*
* @param vertex the source of the dangling head
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @return a SmithWaterman object which can be null if no proper alignment could be generated
*/
protected DanglingChainMergeHelper generateCigarAgainstUpwardsReferencePath(final MultiDeBruijnVertex vertex, final int pruneFactor, final int minDanglingBranchLength) {
// find the highest common descendant path between vertex and the reference source if available
final List<MultiDeBruijnVertex> altPath = findPathDownwardsToHighestCommonDescendantOfReference(vertex, pruneFactor);
if ( altPath == null || isRefSink(altPath.get(0)) || altPath.size() < minDanglingBranchLength + 1 ) // add 1 to include the LCA
return null;
// now get the reference path from the LCA
final List<MultiDeBruijnVertex> refPath = getReferencePath(altPath.get(0), TraversalDirection.upwards, Collections.<MultiSampleEdge>emptyList());
// create the Smith-Waterman strings to use
final byte[] refBases = getBasesForPath(refPath, true);
final byte[] altBases = getBasesForPath(altPath, true);
// run Smith-Waterman to determine the best alignment (and remove trailing deletions since they aren't interesting)
final SmithWaterman alignment = new SWPairwiseAlignment(refBases, altBases, SWParameterSet.STANDARD_NGS, SWPairwiseAlignment.OVERHANG_STRATEGY.LEADING_INDEL);
return new DanglingChainMergeHelper(altPath, refPath, altBases, refBases, AlignmentUtils.removeTrailingDeletions(alignment.getCigar()));
}
/**
* Finds the path upwards in the graph from this vertex to the first diverging node, including that (lowest common ancestor) vertex.
* Note that nodes are excluded if their pruning weight is less than the pruning factor.
*
* @param vertex the original vertex
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @return the path if it can be determined or null if this vertex either doesn't merge onto another path or
* has an ancestor with multiple incoming edges before hitting the reference path
*/
protected List<MultiDeBruijnVertex> findPathUpwardsToLowestCommonAncestor(final MultiDeBruijnVertex vertex, final int pruneFactor) {
final LinkedList<MultiDeBruijnVertex> path = new LinkedList<>();
MultiDeBruijnVertex v = vertex;
while ( inDegreeOf(v) == 1 && outDegreeOf(v) < 2 ) {
final MultiSampleEdge edge = incomingEdgeOf(v);
// if it has too low a weight, don't use it (or previous vertexes) for the path
if ( edge.getPruningMultiplicity() < pruneFactor )
path.clear();
// otherwise it is safe to use
else
path.addFirst(v);
v = getEdgeSource(edge);
}
path.addFirst(v);
return outDegreeOf(v) > 1 ? path : null;
}
/**
* Finds the path downwards in the graph from this vertex to the reference sequence, including the highest common descendant vertex.
* However note that the path is reversed so that this vertex ends up at the end of the path.
* Also note that nodes are excluded if their pruning weight is less than the pruning factor.
*
* @param vertex the original vertex
* @param pruneFactor the prune factor to use in ignoring chain pieces
* @return the path if it can be determined or null if this vertex either doesn't merge onto the reference path or
* has a descendant with multiple outgoing edges before hitting the reference path
*/
protected List<MultiDeBruijnVertex> findPathDownwardsToHighestCommonDescendantOfReference(final MultiDeBruijnVertex vertex, final int pruneFactor) {
final LinkedList<MultiDeBruijnVertex> path = new LinkedList<>();
MultiDeBruijnVertex v = vertex;
while ( ! isReferenceNode(v) && outDegreeOf(v) == 1 ) {
final MultiSampleEdge edge = outgoingEdgeOf(v);
// if it has too low a weight, don't use it (or previous vertexes) for the path
if ( edge.getPruningMultiplicity() < pruneFactor )
path.clear();
// otherwise it is safe to use
else
path.addFirst(v);
v = getEdgeTarget(edge);
}
path.addFirst(v);
return isReferenceNode(v) ? path : null;
}
private enum TraversalDirection {
downwards,
upwards
}
/**
* Finds the path in the graph from this vertex to the reference sink, including this vertex
*
* @param start the reference vertex to start from
* @param direction describes which direction to move in the graph (i.e. down to the reference sink or up to the source)
* @param blacklistedEdges edges to ignore in the traversal down; useful to exclude the non-reference dangling paths
* @return the path (non-null, non-empty)
*/
protected List<MultiDeBruijnVertex> getReferencePath(final MultiDeBruijnVertex start,
final TraversalDirection direction,
final Collection<MultiSampleEdge> blacklistedEdges) {
final List<MultiDeBruijnVertex> path = new ArrayList<>();
MultiDeBruijnVertex v = start;
while ( v != null ) {
path.add(v);
v = (direction == TraversalDirection.downwards ? getNextReferenceVertex(v, true, blacklistedEdges) : getPrevReferenceVertex(v));
}
return path;
}
/**
* The base sequence for the given path.
*
* @param path the list of vertexes that make up the path
* @param expandSource if true and if we encounter a source node, then expand (and reverse) the character sequence for that node
* @return non-null sequence of bases corresponding to the given path
*/
@Ensures({"result != null"})
public byte[] getBasesForPath(final List<MultiDeBruijnVertex> path, final boolean expandSource) {
if ( path == null ) throw new IllegalArgumentException("Path cannot be null");
final StringBuilder sb = new StringBuilder();
for ( final MultiDeBruijnVertex v : path ) {
if ( expandSource && isSource(v) ) {
final String seq = v.getSequenceString();
sb.append(new StringBuilder(seq).reverse().toString());
} else {
sb.append((char)v.getSuffix());
}
}
return sb.toString().getBytes();
}
/**
* Finds the index of the best extent of the prefix match between the provided paths, for dangling head merging.
* Assumes that path1.length >= maxIndex and path2.length >= maxIndex.
*
* @param path1 the first path
* @param path2 the second path
* @param maxIndex the maximum index to traverse (not inclusive)
* @return the index of the ideal prefix match or -1 if it cannot find one, must be less than maxIndex
*/
protected int bestPrefixMatch(final byte[] path1, final byte[] path2, final int maxIndex) {
final int maxMismatches = getMaxMismatches(maxIndex);
int mismatches = 0;
int index = 0;
int lastGoodIndex = -1;
while ( index < maxIndex ) {
if ( path1[index] != path2[index] ) {
if ( ++mismatches > maxMismatches )
return -1;
lastGoodIndex = index;
}
index++;
}
// if we got here then we hit the max index
return lastGoodIndex;
}
/**
* Determine the maximum number of mismatches permitted on the branch.
* Unless it's preset (e.g. by unit tests) it should be the length of the branch divided by the kmer size.
*
* @param lengthOfDanglingBranch the length of the branch itself
* @return positive integer
*/
private int getMaxMismatches(final int lengthOfDanglingBranch) {
return maxMismatchesInDanglingHead > 0 ? maxMismatchesInDanglingHead : Math.max(1, (lengthOfDanglingBranch / kmerSize));
}
protected boolean extendDanglingPathAgainstReference(final DanglingChainMergeHelper danglingHeadMergeResult, final int numNodesToExtend) {
final int indexOfLastDanglingNode = danglingHeadMergeResult.danglingPath.size() - 1;
final int indexOfRefNodeToUse = indexOfLastDanglingNode + numNodesToExtend;
if ( indexOfRefNodeToUse >= danglingHeadMergeResult.referencePath.size() )
return false;
final MultiDeBruijnVertex danglingSource = danglingHeadMergeResult.danglingPath.remove(indexOfLastDanglingNode);
final StringBuilder sb = new StringBuilder();
final byte[] refSourceSequence = danglingHeadMergeResult.referencePath.get(indexOfRefNodeToUse).getSequence();
for ( int i = 0; i < numNodesToExtend; i++ )
sb.append((char)refSourceSequence[i]);
sb.append(danglingSource.getSequenceString());
final byte[] sequenceToExtend = sb.toString().getBytes();
// clean up the source and edge
final MultiSampleEdge sourceEdge = outgoingEdgeOf(danglingSource);
MultiDeBruijnVertex prevV = getEdgeTarget(sourceEdge);
removeEdge(danglingSource, prevV);
// extend the path
for ( int i = numNodesToExtend; i > 0; i-- ) {
final MultiDeBruijnVertex newV = new MultiDeBruijnVertex(Arrays.copyOfRange(sequenceToExtend, i, i+kmerSize));
addVertex(newV);
final MultiSampleEdge newE = addEdge(newV, prevV);
newE.setMultiplicity(sourceEdge.getMultiplicity());
danglingHeadMergeResult.danglingPath.add(newV);
prevV = newV;
}
return true;
}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/graphs/Route.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs;
import java.util.List;
import java.util.ListIterator;
/**
* Represents a route or path through a graph.
* <p>
* In contrast with a {@link Path}, a route keeps track of the
* path taken at furcations in order to speed up some path comparisions like the
* one implemented by {@link #isSuffix}.
* </p>
*
* @author <NAME> <<EMAIL>>
*/
public class Route<V extends BaseVertex, E extends BaseEdge> extends Path<V,E> {
protected final Route<V,E> previousRouteWithLastVertexThatIsForkOrJoin;
protected final boolean lastVertexIsForkOrJoin;
/**
* Create a zero length route with a start in a particular vertex:
*
* @param initialVertex the first vertex of the route.
* @param graph the new route's graph.
*
* @throws IllegalArgumentException if {@code initialVertex} or {@code graph} are {@code null}.
* or if {@code initialVertex} does not belong to {@code graph}.
*/
public Route(final V initialVertex, final BaseGraph<V, E> graph) {
super(initialVertex, graph);
previousRouteWithLastVertexThatIsForkOrJoin = null;
lastVertexIsForkOrJoin = graph.inDegreeOf(initialVertex) > 1;
}
@Override
public boolean equals(final Object other) {
if (other == null) return false;
if (other == this) return true;
if (! (other instanceof Route)) return false;
@SuppressWarnings("unchecked")
final Route<V,E> otherRoute = (Route<V,E>) other;
return otherRoute.length() == this.length() && isSuffix(otherRoute);
}
/**
* Extends a route into a new instance.
*
* @param prefix the route to extend.
* @param nextVertex the vertex to extend the route to.
*
* @throws IllegalArgumentException if {@code prefix} is {@code null} or {@code nextVertex} is {@code null}
* or {@code nextVertex} does not belong to {@code prefix}'s graph or there is no edge that in the graph
* that would connect {@code prefix}'s last vertex with {@code nextVertex} directly.
*/
public Route(final Route<V,E> prefix, final V nextVertex) {
this(prefix,resolveSuffixEdge(prefix,nextVertex));
}
/**
* Extends a route into a new instance.
*
* @param prevVertex the vertex to extend the route to.
* @param suffix the route to extend.
*
* @throws IllegalArgumentException if {@code suffix} is {@code null} or {@code prevVertex} is {@code null}
* or {@code prevVertex} does not belong to {@code suffix}'s graph or there is no edge that in the graph
* that would connect {@code suffix}'s first vertex with {@code prevVertex} directly.
*/
public Route(final V prevVertex, final Route<V,E> suffix) {
this(resolvePrefixEdge(prevVertex, suffix),suffix);
}
/**
* Resolves the prefix edge as required by {@link Route(V,Route)}.
*/
private static <V extends BaseVertex,E extends BaseEdge> E resolvePrefixEdge(final V prevVertex, final Route<V, E> suffix) {
if (prevVertex == null) throw new NullPointerException();
if (!suffix.getGraph().containsVertex(prevVertex)) throw new IllegalArgumentException();
final E result = suffix.getGraph().getEdge(prevVertex,suffix.getFirstVertex());
if (result == null)
throw new IllegalArgumentException("there is no such edge in the graph");
return result;
}
/**
* Resolves the suffix edge as required by {@link Route(Route,V)}
*/
private static <V extends BaseVertex,E extends BaseEdge> E resolveSuffixEdge(final Route<V,E> prefix, final V nextVertex) {
if (nextVertex == null) throw new IllegalArgumentException();
if (!prefix.getGraph().containsVertex(nextVertex)) throw new IllegalArgumentException();
final E result = prefix.getGraph().getEdge(prefix.getLastVertex(),nextVertex);
if (result == null)
throw new IllegalArgumentException("there is no such edge in the graph");
return result;
}
/**
* Extends a route by prefixing an edge.
*
* @param initialEdge the extending edge.
* @param suffix the original path.
*
* @throws IllegalArgumentException if {@code suffix} or {@code initialEdge} are {@code null}, or {@code initialEdge} is
* not part of {@code suffix}'s graph, or {@code initialEdge} does not have as a target the first vertex in {@code suffix}.
*/
public Route(final E initialEdge, final Route<V,E> suffix) {
super(initialEdge,suffix);
final V firstVertex = getFirstVertex();
if(suffix.length() == 0) {
lastVertexIsForkOrJoin = suffix.lastVertexIsForkOrJoin || graph.outDegreeOf(firstVertex) > 1;
previousRouteWithLastVertexThatIsForkOrJoin = graph.inDegreeOf(firstVertex) > 1 ? new Route<>(firstVertex,graph) : null;
} else {
lastVertexIsForkOrJoin = suffix.lastVertexIsForkOrJoin;
if (suffix.previousRouteWithLastVertexThatIsForkOrJoin != null)
previousRouteWithLastVertexThatIsForkOrJoin = new Route<>(initialEdge,suffix.previousRouteWithLastVertexThatIsForkOrJoin);
else
previousRouteWithLastVertexThatIsForkOrJoin = graph.outDegreeOf(firstVertex) > 1 ?
new Route<>(new Route<>(firstVertex,graph),edgesInOrder.get(0)) :
graph.inDegreeOf(firstVertex) > 1 ? new Route<>(firstVertex,graph) : null;
}
}
/**
* Create copy of an existing route.
* @param route the route to copy
*
* @throws NullPointerException if {@code route} is {@code null}.
*/
protected Route(final Route<V, E> route) {
super(route);
lastVertexIsForkOrJoin = route.lastVertexIsForkOrJoin;
previousRouteWithLastVertexThatIsForkOrJoin = route.previousRouteWithLastVertexThatIsForkOrJoin;
}
/**
* Create a new Route extending another one with an edge
*
* @param route the route to extend.
* @param edge the edge to extend the route with.
*
* @throws IllegalArgumentException if {@code route} or {@code edge} are {@code null}, or {@code edge} is
* not part of {@code route}'s graph, or {@code edge} does not have as a source the last vertex in {@code route}.
*/
public Route(final Route<V, E> route, final E edge) {
super(route, edge);
lastVertexIsForkOrJoin = graph.outDegreeOf(route.lastVertex) > 1 || graph.inDegreeOf(lastVertex) > 1;
previousRouteWithLastVertexThatIsForkOrJoin = route.lastVertexIsForkOrJoin ? route : route.previousRouteWithLastVertexThatIsForkOrJoin;
}
@Override
public boolean isSuffix(final Path<V,E> other) {
if (other == this)
return true;
else if (other == null)
throw new IllegalArgumentException("other path must not be null");
else if (getGraph() != other.getGraph())
throw new IllegalArgumentException("other path must be part of the same graph");
else if (other instanceof Route)
return isRouteSuffix((Route<V,E>)other);
else
return super.isSuffix(other);
}
@Override
public String toString() {
return super.toString().replace("Path{", "Route{");
}
/**
* Faster version when comparing with a route.
*/
protected boolean isRouteSuffix(final Route<V,E> other) {
if (other.getGraph() != this.getGraph())
throw new IllegalArgumentException("you cannot compare routes on different graphs");
else if (lastVertex != other.lastVertex) // obvious case.
return false;
else if (this.previousRouteWithLastVertexThatIsForkOrJoin == null
&& other.previousRouteWithLastVertexThatIsForkOrJoin != null) // I am shorter or different path for sure.
return false;
else if (this.edgesInOrder.size() < other.edgesInOrder.size()) // I am shorter regardless of path, no way Jose!
return false;
else if (this.previousRouteWithLastVertexThatIsForkOrJoin == null || other.previousRouteWithLastVertexThatIsForkOrJoin == null) {
final ListIterator<E> myEdges = edgesInOrder.listIterator(edgesInOrder.size());
final ListIterator<E> otherEdges = other.edgesInOrder.listIterator(other.edgesInOrder.size());
while (otherEdges.hasPrevious())
if (myEdges.previous() != otherEdges.previous())
return false;
return true;
} else
return (other.previousRouteWithLastVertexThatIsForkOrJoin == this.previousRouteWithLastVertexThatIsForkOrJoin)
|| (previousRouteWithLastVertexThatIsForkOrJoin.lastVertex == other.previousRouteWithLastVertexThatIsForkOrJoin.lastVertex
&& previousRouteWithLastVertexThatIsForkOrJoin.isRouteSuffix(other.previousRouteWithLastVertexThatIsForkOrJoin));
}
/**
* Checks whether the last vertex in the route is a fork or a joining vertex.
* @return {@code true} iff so.
*/
public boolean lastVertexIsForkOrJoin() {
return lastVertexIsForkOrJoin;
}
/**
* Returns the longest prefix route that has as a last vertex a join or furcation vertex.
*
* @return never {@code null}.
*/
public Route<V,E> getPrefixRouteWithLastVertexThatIsForkOrJoin() {
return previousRouteWithLastVertexThatIsForkOrJoin;
}
/**
* Splice out the first few vertices of the route.
*
* @param length how many vertices to splice out
* @return a new route without those spliced vertices.
*
* @throws IllegalArgumentException if {@code length} is equal to the route's length or greater or if it is negative.
* Notice that non-vertex route are no legal routes.
*/
public Route<V,E> splicePrefix(final int length) {
if (length == 0)
return this;
if (length >= length())
throw new IllegalArgumentException("prefix slicing to long");
if (length < 0)
throw new IllegalArgumentException("prefix cannot be negative");
final List<E> resultEdges = getEdges().subList(length,length());
Route<V,E> result = new Route<>(graph.getEdgeSource(resultEdges.get(0)),graph);
for (final E edge : resultEdges)
result = new Route<>(result,edge);
return result;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/GeneralPloidyGenotypeLikelihoods.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import htsjdk.samtools.SAMUtils;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.GenotypeLikelihoods;
import org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.ExactACcounts;
import org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.ExactACset;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import java.util.*;
public abstract class GeneralPloidyGenotypeLikelihoods {
protected final int numChromosomes;
private final static double MAX_LOG10_ERROR_TO_STOP_EARLY = 6; // we want the calculation to be accurate to 1 / 10^6
protected static final boolean VERBOSE = false;
protected static final double qualVec[] = new double[SAMUtils.MAX_PHRED_SCORE+1];
//
// The fundamental data arrays associated with a Genotype Likelhoods object
//
protected double[] log10Likelihoods;
protected double[][] logMismatchProbabilityArray;
protected final int nSamplesPerPool;
protected final HashMap<String, ErrorModel> perLaneErrorModels;
protected final int likelihoodDim;
protected final boolean ignoreLaneInformation;
protected final double LOG10_PLOIDY;
protected boolean hasReferenceSampleData;
protected final int nAlleles;
protected final List<Allele> alleles;
private static final double MIN_LIKELIHOOD = Double.NEGATIVE_INFINITY;
private static final int MAX_NUM_ALLELES_TO_CACHE = 20;
private static final int MAX_NUM_SAMPLES_PER_POOL = 1000;
private static final boolean FAST_GL_COMPUTATION = true;
// constructor with given logPL elements
public GeneralPloidyGenotypeLikelihoods(final List<Allele> alleles, final double[] logLikelihoods, final int ploidy,
final HashMap<String, ErrorModel> perLaneErrorModels, final boolean ignoreLaneInformation) {
this.alleles = alleles;
this.nAlleles = alleles.size();
numChromosomes = ploidy;
nSamplesPerPool = numChromosomes/2;
this.perLaneErrorModels = perLaneErrorModels;
this.ignoreLaneInformation = ignoreLaneInformation;
// check if at least one lane has actual data
if (perLaneErrorModels == null || perLaneErrorModels.isEmpty())
hasReferenceSampleData = false;
else {
for (Map.Entry<String,ErrorModel> elt : perLaneErrorModels.entrySet()) {
if (elt.getValue().hasData()) {
hasReferenceSampleData = true;
break;
}
}
}
// check sizes
if (nAlleles > MAX_NUM_ALLELES_TO_CACHE)
throw new UserException("No support for this number of alleles");
if (nSamplesPerPool > MAX_NUM_SAMPLES_PER_POOL)
throw new UserException("No support for such large number of samples per pool");
likelihoodDim = GenotypeLikelihoods.numLikelihoods(nAlleles, numChromosomes);
if (logLikelihoods == null){
log10Likelihoods = new double[likelihoodDim];
Arrays.fill(log10Likelihoods, MIN_LIKELIHOOD);
} else {
if (logLikelihoods.length != likelihoodDim)
throw new ReviewedGATKException("BUG: inconsistent parameters when creating GeneralPloidyGenotypeLikelihoods object");
log10Likelihoods = logLikelihoods; //.clone(); // is clone needed?
}
fillCache();
LOG10_PLOIDY = Math.log10((double)numChromosomes);
}
/**
* Crucial inner class that handles addressing elements of pool likelihoods. We store likelihoods as a map
* of form int[] -> double (to be more precise, IntArrayWrapper -> Double).
* For a given ploidy (chromosome count) and number of alleles, we need a form to iterate deterministically
* across all possible allele conformations.
* Problem equivalent to listing in deterministic order all possible ways in which N integers will sum to P,
* where N is number of alleles and P is number of chromosomes.
* There's an option to list all integers so that sum will be UP to P.
* For example, with P=2,N=2, restrictSumTo = 2 iterator will produce
* [2 0] [1 1] [0 2]
*
*
*/
public static class SumIterator {
private int[] currentState;
private final int[] finalState;
private final int restrictSumTo;
private final int dim;
private boolean hasNext;
private int linearIndex;
private int currentSum;
/**
* Default constructor. Typical use case: restrictSumTo = -1 if there's no sum restriction, or will generate int[]
* vectors so that all add to this value.
*
* @param finalState End state - typically we should set value to (P,P,P,...)
* @param restrictSumTo See above
*/
public SumIterator(final int[] finalState,final int restrictSumTo) {
this.finalState = finalState;
this.dim = finalState.length;
this.restrictSumTo = restrictSumTo;
currentState = new int[dim];
reset();
}
/**
* Shortcut constructor for common use case: iterator will produce
* all vectors of length numAlleles whose sum = numChromosomes
* @param numAlleles Number of alleles
* @param numChromosomes Ploidy
*/
public SumIterator(final int numAlleles, final int numChromosomes) {
this(getInitialStateVector(numAlleles,numChromosomes), numChromosomes);
}
private static int[] getInitialStateVector(final int nAlleles, final int numChromosomes) {
int[] initialState = new int[nAlleles];
Arrays.fill(initialState,numChromosomes);
return initialState;
}
public void setInitialStateVector(final int[] stateVector) {
if (restrictSumTo > 0) {
// check that desired vector is valid
if (MathUtils.sum(stateVector) != restrictSumTo)
throw new ReviewedGATKException("BUG: initial state vector nor compatible with sum iterator");
final int numAlleles = currentState.length;
final int ploidy = restrictSumTo;
linearIndex = GeneralPloidyGenotypeLikelihoods.getLinearIndex(stateVector, numAlleles, ploidy);
}
else
throw new ReviewedGATKException("BUG: Not supported");
}
public void next() {
int initialDim = (restrictSumTo > 0)?1:0;
hasNext = next(finalState, initialDim);
if (hasNext)
linearIndex++;
}
private boolean next(final int[] finalState, int initialDim) {
boolean hasNextState = false;
for (int currentDim=initialDim; currentDim < finalState.length; currentDim++) {
final int x = currentState[currentDim]+1;
if (x > finalState[currentDim] || (currentSum >= restrictSumTo && initialDim > 0)) {
// update vector sum, and reset position
currentSum -= currentState[currentDim];
currentState[currentDim] = 0;
if (currentDim >= dim-1) {
hasNextState = false;
break;
}
}
else {
currentState[currentDim] = x;
hasNextState = true;
currentSum++;
break;
}
}
if (initialDim > 0) {
currentState[0] = restrictSumTo - currentSum;
}
return hasNextState;
}
public void reset() {
Arrays.fill(currentState, 0);
if (restrictSumTo > 0)
currentState[0] = restrictSumTo;
hasNext = true;
linearIndex = 0;
currentSum = 0;
}
public int[] getCurrentVector() {
return currentState;
}
public int[] getCurrentAltVector() {
return Arrays.copyOfRange(currentState,1,currentState.length);
}
/* public int getCurrentSum() {
return currentSum;
}
*/
public int getLinearIndex() {
return linearIndex;
}
public boolean hasNext() {
return hasNext;
}
}
public List<Allele> getAlleles() { return alleles;}
/**
* Returns an array of log10 likelihoods for each genotype conformation, with ordering determined by SumIterator class.
*
* @return likelihoods array
*/
public double[] getLikelihoods() {
return log10Likelihoods;
}
/**
* Set particular element of logPL vector
* @param idx index of allele count conformation to modify
* @param pl Likelihood to associate with map
*/
public void setLogPLs(final int idx, final double pl) {
log10Likelihoods[idx] = pl;
}
public void renormalize() {
log10Likelihoods = MathUtils.normalizeFromLog10(log10Likelihoods,false,true);
}
/** Compute most likely AC conformation based on currently stored PL's - just loop through log PL map and output max value
*
* @return vector with most likely allele count, ordered according to this object's alleles
*/
public Pair<int[],Double> getMostLikelyACCount() {
int[] mlInd = null;
double maxVal = Double.NEGATIVE_INFINITY;
final SumIterator iterator = new SumIterator(alleles.size(),numChromosomes);
int idx = 0;
while (iterator.hasNext()) {
double pl = log10Likelihoods[idx++];
if (pl > maxVal) {
maxVal = pl;
mlInd = iterator.getCurrentVector().clone();
}
iterator.next();
}
if (VERBOSE) {
System.out.println(GATKVCFConstants.MLE_ALLELE_COUNT_KEY + ": " + Arrays.toString(mlInd));
}
return new Pair<int[], Double>(mlInd,maxVal);
}
/**
* Given set of alleles with corresponding vector of likelihoods, subset to a new set of alleles
*
* @param oldLikelihoods Vector of PL's corresponding to original alleles
* @param numChromosomes Ploidy (number of chromosomes describing PL's)
* @param originalAlleles List of original alleles
* @param allelesToSubset Alleles to subset
* @return Vector of new PL's, ordered according to SumIterator's ordering
*/
public static double[] subsetToAlleles(final double[] oldLikelihoods, final int numChromosomes,
final List<Allele> originalAlleles, final List<Allele> allelesToSubset) {
int newPLSize = GeneralPloidyGenotypeLikelihoods.getNumLikelihoodElements(allelesToSubset.size(), numChromosomes);
double[] newPLs = new double[newPLSize];
int idx = 0;
// First fill boolean array stating whether each original allele is present in new mapping
final boolean [] allelePresent = new boolean[originalAlleles.size()];
for ( Allele allele : originalAlleles )
allelePresent[idx++] = allelesToSubset.contains(allele);
// compute mapping from old idx to new idx
// This might be needed in case new allele set is not ordered in the same way as old set
// Example. Original alleles: {T*,C,G,A}. New alleles: {G,C}. Permutation key = [2,1]
int[] permutationKey = new int[allelesToSubset.size()];
for (int k=0; k < allelesToSubset.size(); k++)
// for each allele to subset, find corresponding index in original allele list
permutationKey[k] = originalAlleles.indexOf(allelesToSubset.get(k));
if (VERBOSE) {
System.out.println("permutationKey:"+Arrays.toString(permutationKey));
}
final SumIterator iterator = new SumIterator(originalAlleles.size(),numChromosomes);
while (iterator.hasNext()) {
// for each entry in logPL table, associated originally with allele count stored in vec[],
// see if this allele count conformation will be present in new logPL table.
// For entry to be present, elements in dimensions not present in requested allele list have to have count = 0
int[] pVec = iterator.getCurrentVector();
double pl = oldLikelihoods[iterator.getLinearIndex()];
boolean keyPresent = true;
for (int k=0; k < allelePresent.length; k++)
if ( pVec[k]>0 && !allelePresent[k] )
keyPresent = false;
if (keyPresent) {// skip to next entry in logPLs if this conformation is not present in subset
final int[] newCount = new int[allelesToSubset.size()];
// map from old allele mapping count to new allele mapping
// In pseudo-Matlab notation: newCount = vec[permutationKey] for permutationKey vector
for (idx = 0; idx < newCount.length; idx++)
newCount[idx] = pVec[permutationKey[idx]];
// get corresponding index from new count
int outputIdx = GeneralPloidyGenotypeLikelihoods.getLinearIndex(newCount, allelesToSubset.size(), numChromosomes);
newPLs[outputIdx] = pl;
if (VERBOSE) {
System.out.println("Old Key:"+Arrays.toString(pVec));
System.out.println("New Key:"+Arrays.toString(newCount));
}
}
iterator.next();
}
return newPLs;
}
public static int getLinearIndex(int[] vectorIdx, int numAlleles, int ploidy) {
if (ploidy <= 0)
return 0;
int linearIdx = 0;
int cumSum = ploidy;
for (int k=numAlleles-1;k>=1; k--) {
int idx = vectorIdx[k];
// how many blocks are before current position
if (idx == 0)
continue;
for (int p=0; p < idx; p++)
linearIdx += getNumLikelihoodElements( k, cumSum-p);
cumSum -= idx;
}
return linearIdx;
}
/**
* Given a scalar index, what's the alelle count conformation corresponding to it?
* @param nAlleles Number of alleles
* @param numChromosomes Ploidy
* @param PLindex Index to query
* @return Allele count conformation, according to iteration order from SumIterator
*/
public static int[] getAlleleCountFromPLIndex(final int nAlleles, final int numChromosomes, final int PLindex) {
final GenotypeLikelihoodCalculator calculator = GenotypeLikelihoodCalculators.getInstance(numChromosomes, nAlleles);
final GenotypeAlleleCounts alleleCounts = calculator.genotypeAlleleCountsAt(PLindex);
return alleleCounts.alleleCountsByIndex(nAlleles - 1);
}
/*
* a cache of the PL ivector sizes as a function of # of alleles and pool sizes
*/
public static int getNumLikelihoodElements(int numAlleles, int ploidy) {
return GenotypeLikelihoodVectorSizes[numAlleles][ploidy];
}
private final static int[][] GenotypeLikelihoodVectorSizes = fillGLVectorSizeCache(MAX_NUM_ALLELES_TO_CACHE, 2*MAX_NUM_SAMPLES_PER_POOL);
private static int[][] fillGLVectorSizeCache(int maxAlleles, int maxPloidy) {
int[][] cache = new int[maxAlleles][maxPloidy];
for (int numAlleles=1; numAlleles < maxAlleles; numAlleles++) {
for (int ploidy=0; ploidy < maxPloidy; ploidy++) {
if (numAlleles == 1)
cache[numAlleles][ploidy] = 1;
else if (ploidy == 1)
cache[numAlleles][ploidy] = numAlleles;
else {
int acc =0;
for (int k=0; k <= ploidy; k++ )
acc += cache[numAlleles-1][ploidy-k];
cache[numAlleles][ploidy] = acc;
}
}
}
return cache;
}
/**
* Return a string representation of this object in a moderately usable form
*
* @return string representation
*/
public String toString() {
StringBuilder s = new StringBuilder(1000);
s.append("Alleles:");
for (Allele a: this.alleles){
s.append(a.getDisplayString());
s.append(",");
}
s.append("\nGLs:\n");
SumIterator iterator = new SumIterator(nAlleles,numChromosomes);
while (iterator.hasNext()) {
if (!Double.isInfinite(getLikelihoods()[iterator.getLinearIndex()])) {
s.append("Count [");
StringBuilder b = new StringBuilder(iterator.getCurrentVector().length*2);
for (int it:iterator.getCurrentVector()) {
b.append(it);
b.append(",");
}
s.append(b.toString());
s.append(String.format("] GL=%4.3f\n",this.getLikelihoods()[iterator.getLinearIndex()]) );
}
iterator.next();
}
return s.toString();
}
public void computeLikelihoods(ErrorModel errorModel,
List<Allele> alleleList, List<Integer> numObservations, ReadBackedPileup pileup) {
if (FAST_GL_COMPUTATION) {
// queue up elements to be computed. Assumptions:
// GLs distributions are unimodal
// GLs are continuous
// Hence, once an AC conformation is computed, we queue up its immediate topological neighbors.
// If neighbors fall below maximum - threshold, we don't queue up THEIR own neighbors
// and we repeat until queue is empty
// queue of AC conformations to process
final LinkedList<ExactACset> ACqueue = new LinkedList<ExactACset>();
// mapping of ExactACset indexes to the objects
final HashMap<ExactACcounts, ExactACset> indexesToACset = new HashMap<ExactACcounts, ExactACset>(likelihoodDim);
// add AC=0 to the queue
final int[] zeroCounts = new int[nAlleles];
zeroCounts[0] = numChromosomes;
ExactACset zeroSet =
new ExactACset(1, new ExactACcounts(zeroCounts));
ACqueue.add(zeroSet);
indexesToACset.put(zeroSet.getACcounts(), zeroSet);
// keep processing while we have AC conformations that need to be calculated
double maxLog10L = Double.NEGATIVE_INFINITY;
while ( !ACqueue.isEmpty() ) {
// compute log10Likelihoods
final ExactACset ACset = ACqueue.remove();
final double log10LofKs = calculateACConformationAndUpdateQueue(ACset, errorModel, alleleList, numObservations, maxLog10L, ACqueue, indexesToACset, pileup);
// adjust max likelihood seen if needed
maxLog10L = Math.max(maxLog10L, log10LofKs);
// clean up memory
indexesToACset.remove(ACset.getACcounts());
if ( VERBOSE )
System.out.printf(" *** removing used set=%s%n", ACset.getACcounts());
}
} else {
int plIdx = 0;
SumIterator iterator = new SumIterator(nAlleles, numChromosomes);
while (iterator.hasNext()) {
ExactACset ACset =
new ExactACset(1, new ExactACcounts(iterator.getCurrentVector()));
// for observed base X, add Q(jX,k) to likelihood vector for all k in error model
//likelihood(jA,jC,jG,jT) = logsum(logPr (errorModel[k],nA*Q(jA,k) + nC*Q(jC,k) + nG*Q(jG,k) + nT*Q(jT,k))
getLikelihoodOfConformation(ACset, errorModel, alleleList, numObservations, pileup);
setLogPLs(plIdx++, ACset.getLog10Likelihoods()[0]);
iterator.next();
}
}
// normalize PL's
renormalize();
}
private double calculateACConformationAndUpdateQueue(final ExactACset set,
final ErrorModel errorModel,
final List<Allele> alleleList,
final List<Integer> numObservations,
final double maxLog10L,
final LinkedList<ExactACset> ACqueue,
final HashMap<ExactACcounts,
ExactACset> indexesToACset,
final ReadBackedPileup pileup) {
// compute likelihood of set
getLikelihoodOfConformation(set, errorModel, alleleList, numObservations, pileup);
final double log10LofK = set.getLog10Likelihoods()[0];
// log result in PL vector
int idx = getLinearIndex(set.getACcounts().getCounts(), nAlleles, numChromosomes);
setLogPLs(idx, log10LofK);
// can we abort early because the log10Likelihoods are so small?
if ( log10LofK < maxLog10L - MAX_LOG10_ERROR_TO_STOP_EARLY ) {
if ( VERBOSE )
System.out.printf(" *** breaking early set=%s log10L=%.2f maxLog10L=%.2f%n", set.getACcounts(), log10LofK, maxLog10L);
return log10LofK;
}
// iterate over higher frequencies if possible
// by convention, ACcounts contained in set have full vector of possible pool ac counts including ref count.
final int ACwiggle = numChromosomes - set.getACsum() + set.getACcounts().getCounts()[0];
if ( ACwiggle == 0 ) // all alternate alleles already sum to 2N so we cannot possibly go to higher frequencies
return log10LofK;
// add conformations for other cases
for ( int allele = 1; allele < nAlleles; allele++ ) {
final int[] ACcountsClone = set.getACcounts().getCounts().clone();
ACcountsClone[allele]++;
// is this a valid conformation?
int altSum = (int)MathUtils.sum(ACcountsClone) - ACcountsClone[0];
ACcountsClone[0] = numChromosomes - altSum;
if (ACcountsClone[0] < 0)
continue;
updateACset(ACcountsClone, ACqueue, indexesToACset);
}
return log10LofK;
}
/**
* Abstract methods, must be implemented in subclasses
*
* @param ACset Count to compute
* @param errorModel Site-specific error model object
* @param alleleList List of alleles
* @param numObservations Number of observations for each allele
* @param pileup Read backed pileup in case it's necessary
*/
public abstract void getLikelihoodOfConformation(final ExactACset ACset,
final ErrorModel errorModel,
final List<Allele> alleleList,
final List<Integer> numObservations,
final ReadBackedPileup pileup);
public abstract int add(ReadBackedPileup pileup, UnifiedArgumentCollection UAC);
// Static methods
public static void updateACset(final int[] newSetCounts,
final LinkedList<ExactACset> ACqueue,
final HashMap<ExactACcounts, ExactACset> indexesToACset) {
final ExactACcounts index = new ExactACcounts(newSetCounts);
if ( !indexesToACset.containsKey(index) ) {
ExactACset newSet = new ExactACset(1, index);
indexesToACset.put(index, newSet);
ACqueue.add(newSet);
if (VERBOSE)
System.out.println(" *** Adding set to queue:" + index.toString());
}
}
// -----------------------------------------------------------------------------------------------------------------
//
//
// helper routines
//
//
// -----------------------------------------------------------------------------------------------------------------
//
// Constant static data
//
static {
// cache 10^(-k/10)
for (int j=0; j <= SAMUtils.MAX_PHRED_SCORE; j++)
qualVec[j] = Math.pow(10.0,-(double)j/10.0);
}
private void fillCache() {
// cache Q(j,k) = log10(j/2N*(1-ek) + (2N-j)/2N*ek) for j = 0:2N
logMismatchProbabilityArray = new double[1+numChromosomes][1+SAMUtils.MAX_PHRED_SCORE];
for (int i=0; i <= numChromosomes; i++) {
for (int j=0; j <= SAMUtils.MAX_PHRED_SCORE; j++) {
double phi = (double)i/numChromosomes;
logMismatchProbabilityArray[i][j] = Math.log10(phi * (1.0-qualVec[j]) + qualVec[j]/3.0 * (1.0-phi));
}
}
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/utils/gvcf/HomRefBlock.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.utils.gvcf;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.vcf.VCFHeaderLine;
import org.broadinstitute.gatk.utils.MathUtils;
import java.util.ArrayList;
import java.util.List;
/**
* Helper class for calculating a GQ band in the GVCF writer
*
* A band contains GQ and DP values for a contiguous stretch of hom-ref genotypes,
* and provides summary information about the entire block of genotypes.
*
* Genotypes within the HomRefBlock are restricted to hom-ref genotypes within a band of GQ scores
*
* User: depristo
* Date: 6/25/13
* Time: 9:41 AM
*/
final class HomRefBlock {
private final VariantContext startingVC;
private int stop;
private final int minGQ, maxGQ;
private int[] minPLs = null;
final private List<Integer> GQs = new ArrayList<>(100);
final private List<Integer> DPs = new ArrayList<>(100);
private final Allele ref;
private final int ploidy;
/**
* Create a new HomRefBlock
*
* @param startingVC the VariantContext that starts this band (for starting position information)
* @param minGQ the minGQ (inclusive) to use in this band
* @param maxGQ the maxGQ (exclusive) to use in this band
*/
public HomRefBlock(final VariantContext startingVC, final int minGQ, final int maxGQ, final int defaultPloidy) {
if ( startingVC == null ) throw new IllegalArgumentException("startingVC cannot be null");
if ( minGQ > maxGQ ) throw new IllegalArgumentException("bad minGQ " + minGQ + " as its > maxGQ " + maxGQ);
this.startingVC = startingVC;
this.stop = getStart() - 1;
this.ref = startingVC.getReference();
this.minGQ = minGQ;
this.maxGQ = maxGQ;
this.ploidy = startingVC.getMaxPloidy(defaultPloidy);
}
/**
* Create a new HomRefBlock only for doing bounds checking
*
* @param minGQ the minGQ (inclusive) to use in this band
* @param maxGQ the maxGQ (exclusive) to use in this band
*/
public HomRefBlock(final int minGQ, final int maxGQ, final int ploidy) {
if ( minGQ > maxGQ ) throw new IllegalArgumentException("bad minGQ " + minGQ + " as its > maxGQ " + maxGQ);
this.startingVC = null;
this.stop = -1;
this.ref = null;
this.minGQ = minGQ;
this.maxGQ = maxGQ;
this.ploidy = ploidy;
}
/**
* Add information from this Genotype to this band
* @param g a non-null Genotype with GQ and DP attributes
*/
public void add(final int pos, final Genotype g) {
if ( g == null ) throw new IllegalArgumentException("g cannot be null");
if ( ! g.hasGQ() ) throw new IllegalArgumentException("g must have GQ field");
if ( ! g.hasPL() ) throw new IllegalArgumentException("g must have PL field");
if ( pos != stop + 1 ) throw new IllegalArgumentException("adding genotype at pos " + pos + " isn't contiguous with previous stop " + stop);
if ( g.getPloidy() != ploidy)
throw new IllegalArgumentException("cannot add a genotype with a different ploidy: " + g.getPloidy() + " != " + ploidy);
if( minPLs == null )
minPLs = g.getPL();
else { // otherwise take the min with the provided genotype's PLs
final int[] PL = g.getPL();
if (PL.length != minPLs.length)
throw new IllegalStateException("trying to merge different PL array sizes: " + PL.length + " != " + minPLs.length);
for (int i = 0; i < PL.length; i++)
if (minPLs[i] > PL[i])
minPLs[i] = PL[i];
}
stop = pos;
GQs.add(Math.min(g.getGQ(), 99)); // cap the GQs by the max. of 99 emission
DPs.add(Math.max(g.getDP(),0));
}
/**
* Is the GQ value within the bounds of this GQ (GQ >= minGQ && GQ < maxGQ)
* @param GQ the GQ value to test
* @return true if within bounds, false otherwise
*/
public boolean withinBounds(final int GQ) {
return GQ >= minGQ && GQ < maxGQ;
}
/** Get the min GQ observed within this band */
public int getMinGQ() { return MathUtils.arrayMin(GQs); }
/** Get the median GQ observed within this band */
public int getMedianGQ() { return MathUtils.median(GQs); }
/** Get the min DP observed within this band */
public int getMinDP() { return MathUtils.arrayMin(DPs); }
/** Get the median DP observed within this band */
public int getMedianDP() { return MathUtils.median(DPs); }
/** Get the min PLs observed within this band, can be null if no PLs have yet been observed */
public int[] getMinPLs() { return minPLs; }
protected int getGQUpperBound() { return maxGQ; }
protected int getGQLowerBound() { return minGQ; }
public boolean isContiguous(final VariantContext vc) {
return vc.getEnd() == getStop() + 1 && startingVC.getChr().equals(vc.getChr());
}
public VariantContext getStartingVC() { return startingVC; }
public int getStart() { return startingVC.getStart(); }
public int getStop() { return stop; }
public Allele getRef() { return ref; }
public int getSize() { return getStop() - getStart() + 1; }
@Override
public String toString() {
return "HomRefBlock{" +
"minGQ=" + minGQ +
", maxGQ=" + maxGQ +
'}';
}
public VCFHeaderLine toVCFHeaderLine() {
// Need to uniquify the key for the header line using the min/max GQ, since
// VCFHeader does not allow lines with duplicate keys.
final String key = String.format("<KEY>", getGQLowerBound(), getGQUpperBound());
return new VCFHeaderLine(key, "minGQ=" + getGQLowerBound() + "(inclusive),maxGQ=" + getGQUpperBound() + "(exclusive)");
}
/**
* Get the ploidy of this hom-ref block.
* @return
*/
public int getPloidy() {
return ploidy;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2/Dream_Evaluations.md
# Dream Challenge Evaluation
In order to evaluate the performance of M2, we use two sets of data from the SMC DREAM Challenge. Specifically challenges #3 and #4.
All scripts referenced here are relative to the current working directory of ```
/dsde/working/mutect/dream_smc```
### Current Performance (Unmasked)
From the output of the evaluation method
(gsa-unstable 7/13/15, commit:9e93a70)
|set | subset | type | sensitivity | specificity | accuracy |
|----|--------|------|-------------|-------------|----------|
|SMC 3|chr21|SNP|0.935897435897|0.935897435897|0.935897435897|
|SMC 3|chr21|INDEL|0.904255319149|0.977011494253|0.940633406701|
|SMC 3|wgs|SNP|0.930532709098|0.955188985583|0.94286084734|
|SMC 3|wgs|INDEL|0.902139907396|0.970516962843|0.93632843512|
|SMC 4|chr21|SNP|0.769607843137|0.969135802469|0.869371822803|
|SMC 4|chr21|INDEL|0.771241830065|0.991596638655|0.88141923436|
|SMC 4|wgs|SNP|0.764507007622|0.975374480433|0.869940744028|
|SMC 4|wgs|INDEL|0.768634634353|0.989389679877|0.879012157115|
### How To Run
The SCALA script for running M2 can be found in the gsa-unstable repository under ```private/gatk-tools-private/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2```
First, chose the appropriate settings (runnable as environment variables here)
```
QUEUE_JAR=<your-queue-jar>
OUT_VCF=<your-output-vcf>
GSA_UNSTABLE_HOME=<path-to-your-gsa-unstable-checkout>
# for Dream 3
NORMAL_BAM=/dsde/working/mutect/dream_smc/bams/synthetic.challenge.set3.normal.bam
TUMOR_BAM=/dsde/working/mutect/dream_smc/bams/synthetic.challenge.set3.tumor.bam
# for Dream 4
NORMAL_BAM=/dsde/working/mutect/dream_smc/bams/synthetic.challenge.set4.normal.bam
TUMOR_BAM=/dsde/working/mutect/dream_smc/bams/synthetic.challenge.set4.tumor.bam
# for WGS
INTERVALS=/dsde/working/mutect/dream_smc/bams/wgs_calling_regions.v1.interval_list
# for chromosome 21 only
INTERVALS=/dsde/working/mutect/ts/c21_wgs_calling_regions.v1.interval_list
TEMPDIR=/broad/hptmp/kcibul/mutect
```
and then run the following Queue command
```
java \
-Djava.io.tmpdir=$TEMPDIR \
-jar $QUEUE_JAR \
-S $GSA_UNSTABLE_HOME/private/gatk-queue-extensions-internal/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/m2/run_M2_dream.scala \
--job_queue gsa -qsub -jobResReq virtual_free=5G -startFromScratch \
-sc 200 \
-normal $NORMAL_BAM \
-tumor $TUMOR_BAM \
-L $INTERVALS \
-o $OUT_VCF \
-run
```
### How To Evaluate
Run the following
```
/dsde/working/mutect/dream_smc/dream_eval.pl [3|4] [wgs|21] [SNV|INDEL] input.vcf
```
where
- [3|4] the dream challenge round
- [wgs|21] evaluate the whole genome, or just a subset (chromosome 21)
- [SNV|INDEL] evaulate SNV (SNPs) or INDELS
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratificationManagerUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.manager;
// the imports for unit testing.
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.utils.Utils;
import org.broadinstitute.gatk.utils.collections.Pair;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.FileNotFoundException;
import java.util.*;
public class StratificationManagerUnitTest extends BaseTest {
@BeforeClass
public void init() throws FileNotFoundException {
}
// --------------------------------------------------------------------------------
//
// Basic tests Provider
//
// --------------------------------------------------------------------------------
private class StratificationStatesTestProvider extends TestDataProvider {
final List<List<Object>> allStates = new ArrayList<List<Object>>();
final List<IntegerStratifier> asSetOfStates = new ArrayList<IntegerStratifier>();
final int nStates;
public StratificationStatesTestProvider(final List<Integer> ... allStates) {
super(StratificationStatesTestProvider.class);
for ( List<Integer> states : allStates ) {
this.allStates.add(new ArrayList<Object>(states));
}
for ( List<Object> states : this.allStates ) {
asSetOfStates.add(new IntegerStratifier(states));
}
this.nStates = Utils.nCombinations(allStates);
setName(getName());
}
private String getName() {
StringBuilder b = new StringBuilder();
int c = 1;
for ( List<Object> state : allStates )
b.append(String.format("%d = [%s] ", c++, Utils.join(",", state)));
return b.toString();
}
public List<IntegerStratifier> getStateSpaceList() {
return asSetOfStates;
}
public ArrayList<Integer> values() {
final ArrayList<Integer> l = new ArrayList<Integer>();
for ( int i = 0; i < nStates; i++ )
l.add(i);
return l;
}
public Queue<List<Object>> getAllCombinations() {
return getAllCombinations(new LinkedList<List<Object>>(allStates));
}
private Queue<List<Object>> getAllCombinations(Queue<List<Object>> states) {
if ( states.isEmpty() )
return new LinkedList<List<Object>>();
else {
List<Object> head = states.poll();
Queue<List<Object>> substates = getAllCombinations(states);
Queue<List<Object>> newStates = new LinkedList<List<Object>>();
for ( final Object e : head) {
if ( substates.isEmpty() ) {
newStates.add(new LinkedList<Object>(Collections.singleton(e)));
} else {
for ( final List<Object> state : substates ) {
List<Object> newState = new LinkedList<Object>();
newState.add(e);
newState.addAll(state);
newStates.add(newState);
}
}
}
return newStates;
}
}
}
private class IntegerStratifier implements Stratifier {
final List<Object> integers;
private IntegerStratifier(final List<Object> integers) {
this.integers = integers;
}
@Override
public List<Object> getAllStates() {
return integers;
}
}
@DataProvider(name = "StratificationStatesTestProvider")
public Object[][] makeStratificationStatesTestProvider() {
new StratificationStatesTestProvider(Arrays.asList(0));
new StratificationStatesTestProvider(Arrays.asList(0, 1));
new StratificationStatesTestProvider(Arrays.asList(0, 1), Arrays.asList(2, 3));
new StratificationStatesTestProvider(Arrays.asList(0, 1), Arrays.asList(2, 3), Arrays.asList(4, 5));
new StratificationStatesTestProvider(Arrays.asList(0, 1), Arrays.asList(2, 3, 4), Arrays.asList(5, 6));
new StratificationStatesTestProvider(Arrays.asList(0, 1), Arrays.asList(2, 3, 4, 5), Arrays.asList(6));
new StratificationStatesTestProvider(Arrays.asList(0, 1), Arrays.asList(2, 3, 4, 5), Arrays.asList(6, 7));
new StratificationStatesTestProvider(Arrays.asList(0, 1), Arrays.asList(2, 3), Arrays.asList(4, 5), Arrays.asList(6, 7));
return StratificationStatesTestProvider.getTests(StratificationStatesTestProvider.class);
}
private final StratificationManager<IntegerStratifier, Integer> createManager(StratificationStatesTestProvider cfg) {
final StratificationManager<IntegerStratifier, Integer> manager = new StratificationManager<IntegerStratifier, Integer>(cfg.getStateSpaceList());
List<Integer> values = cfg.values();
for ( int i = 0; i < cfg.nStates; i++ )
manager.set(i, values.get(i));
Assert.assertEquals(manager.values(), values, "Values not equal");
return manager;
}
@Test(dataProvider = "StratificationStatesTestProvider")
public void testLeafCount(StratificationStatesTestProvider cfg) {
final StratificationManager<IntegerStratifier, Integer> stratificationManager = createManager(cfg);
Assert.assertEquals(stratificationManager.size(), cfg.nStates);
int nLeafs = 0;
for ( final StratNode node : stratificationManager.getRoot() ) {
if ( node.isLeaf() )
nLeafs++;
}
Assert.assertEquals(nLeafs, cfg.nStates, "Unexpected number of leaves");
}
@Test(dataProvider = "StratificationStatesTestProvider")
public void testKeys(StratificationStatesTestProvider cfg) {
final StratificationManager<IntegerStratifier, Integer> stratificationManager = createManager(cfg);
final Set<Integer> seenKeys = new HashSet<Integer>(cfg.nStates);
for ( final StratNode node : stratificationManager.getRoot() ) {
if ( node.isLeaf() ) {
Assert.assertFalse(seenKeys.contains(node.getKey()), "Already seen the key");
seenKeys.add(node.getKey());
}
}
}
@Test(dataProvider = "StratificationStatesTestProvider")
public void testFindSingleKeys(StratificationStatesTestProvider cfg) {
final StratificationManager<IntegerStratifier, Integer> stratificationManager = createManager(cfg);
final Set<Integer> seenKeys = new HashSet<Integer>(cfg.nStates);
for ( List<Object> state : cfg.getAllCombinations() ) {
final int key = stratificationManager.getKey(state);
Assert.assertFalse(seenKeys.contains(key), "Already saw state mapping to this key");
Assert.assertTrue(stratificationManager.containsKey(state));
seenKeys.add(key);
// test value
Assert.assertEquals(stratificationManager.get(key), cfg.values().get(key));
Assert.assertEquals(stratificationManager.get(state), cfg.values().get(key));
state.set(0, 12345); // not present
Assert.assertEquals(stratificationManager.getKey(state), -1);
Assert.assertFalse(stratificationManager.containsKey(state));
}
}
@Test(dataProvider = "StratificationStatesTestProvider")
public void testFindMultipleKeys(StratificationStatesTestProvider cfg) {
final StratificationManager<IntegerStratifier, Integer> stratificationManager = createManager(cfg);
final List<List<Object>> states = new ArrayList<List<Object>>(cfg.allStates);
final Set<Integer> keys = stratificationManager.getKeys(states);
Assert.assertEquals(keys.size(), cfg.nStates, "Find all states didn't find all of the expected unique keys");
final Queue<List<Object>> combinations = cfg.getAllCombinations();
while ( ! combinations.isEmpty() ) {
List<Object> first = combinations.poll();
List<Object> second = combinations.peek();
if ( second != null ) {
List<List<Object>> combined = StratificationManager.combineStates(first, second);
int nExpectedKeys = Utils.nCombinations(combined);
final int key1 = stratificationManager.getKey(first);
final int key2 = stratificationManager.getKey(second);
final Set<Integer> keysCombined = stratificationManager.getKeys(combined);
Assert.assertTrue(keysCombined.contains(key1), "couldn't find key in data set");
Assert.assertTrue(keysCombined.contains(key2), "couldn't find key in data set");
Assert.assertEquals(keysCombined.size(), nExpectedKeys);
}
}
}
@Test(dataProvider = "StratificationStatesTestProvider")
public void testMapSet(StratificationStatesTestProvider cfg) {
final StratificationManager<IntegerStratifier, Integer> stratificationManager = createManager(cfg);
stratificationManager.set(0, -1);
Assert.assertEquals((int)stratificationManager.get(0), -1);
}
@Test(dataProvider = "StratificationStatesTestProvider")
public void testStratifierByKey(StratificationStatesTestProvider cfg) {
final StratificationManager<IntegerStratifier, Integer> manager = createManager(cfg);
for ( int key = 0; key < cfg.nStates; key++ ) {
List<Pair<IntegerStratifier, Object>> stratsAndStates = manager.getStratsAndStatesForKey(key);
final List<Object> strats = manager.getStatesForKey(key);
Assert.assertEquals((int)manager.get(strats), key, "Key -> strats -> key failed to return same key");
for ( int i = 0; i < strats.size(); i++ ) {
Assert.assertEquals(stratsAndStates.get(i).getSecond(), strats.get(i), "Strats and StratsAndStates differ");
}
}
}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/validation/validationsiteselector/ValidationSiteSelector.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.validation.validationsiteselector;
import org.broadinstitute.gatk.utils.commandline.*;
import org.broadinstitute.gatk.engine.GATKVCFUtils;
import org.broadinstitute.gatk.engine.CommandLineGATK;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.engine.walkers.RodWalker;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.engine.SampleUtils;
import org.broadinstitute.gatk.utils.help.HelpConstants;
import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import htsjdk.variant.vcf.VCFHeader;
import htsjdk.variant.vcf.VCFHeaderLine;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import java.io.File;
import java.util.*;
/**
* Randomly select variant records according to specified options
*
* <p>
* This tool is intended for use in experiments where we sample data randomly from a set of variants, for example
* in order to choose sites for a follow-up validation study.</p>
*
* <p>Sites are selected randomly but within certain restrictions. There are two main sources of restrictions:</p>
* <ul>
* <li><b>Sample restrictions:</b> A user can specify a set of samples, and we will only consider sites which are
* polymorphic within the given sample subset. These sample restrictions can be given as a set of individual
* samples, a text file (each line containing a sample name), or a regular expression. A user can additionally
* specify whether samples will be considered based on their genotypes (a non-reference genotype means that the
* sample is polymorphic in that variant, and hence that variant will be considered for inclusion in set), or
* based on their PLs.</li>
* <li><b>Sampling methods:</b>
* <ol>
* <li>Uniform sampling will just sample uniformly from variants that are polymorphic in selected samples</li>
* <li>Sampling based on Allele Frequency spectrum will ensure that output sites have the same AF distribution as the input set</li>
* </ol>
* </li>
* <li>Variant type (SNP, Indel, etc.)</li>
* </ul>
*
* <h3>Input</h3>
* <p>
* One or more variant sets to choose from.
* </p>
*
* <h3>Output</h3>
* <p>
* A sites-only VCF with the desired number of randomly selected sites.
* </p>
*
* <h3>Usage examples</h3>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T ValidationSiteSelectorWalker \
* -R reference.fasta \
* -V input1.vcf \
* -V input2.vcf \
* -sn NA12878 \
* -o output.vcf \
* --numValidationSites 200 \
* -sampleMode POLY_BASED_ON_GT \
* -freqMode KEEP_AF_SPECTRUM
* </pre>
* <pre>
* java -jar GenomeAnalysisTK.jar \
* -T ValidationSiteSelectorWalker \
* -R reference.fasta \
* -V:foo input1.vcf \
* -V:bar input2.vcf \
* --numValidationSites 200 \
* -sf samples.txt \
* -o output.vcf \
* -sampleMode POLY_BASED_ON_GT \
* -freqMode UNIFORM \
* -selectType INDEL
* </pre>
*
*/
@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
public class ValidationSiteSelector extends RodWalker<Integer, Integer> {
public enum AF_COMPUTATION_MODE {
KEEP_AF_SPECTRUM,
UNIFORM
}
public enum SAMPLE_SELECTION_MODE {
NONE,
POLY_BASED_ON_GT,
POLY_BASED_ON_GL
}
/**
* The input VCF file
*/
@Input(fullName="variant", shortName = "V", doc="Input VCF file, can be specified multiple times", required=true)
public List<RodBinding<VariantContext>> variants;
/**
* The output VCF file
*/
@Output(doc="File to which variants should be written")
protected VariantContextWriter vcfWriter = null;
/**
* Sample name(s) to subset the input VCF to, prior to selecting variants. -sn A -sn B subsets to samples A and B.
*/
@Argument(fullName="sample_name", shortName="sn", doc="Include genotypes from this sample. Can be specified multiple times", required=false)
public Set<String> sampleNames = new HashSet<String>(0);
/**
* Sample regexps to subset the input VCF to, prior to selecting variants. -sn NA12* subsets to all samples with prefix NA12
*/
@Argument(fullName="sample_expressions", shortName="se", doc="Regular expression to select many samples from the ROD tracks provided. Can be specified multiple times", required=false)
public Set<String> sampleExpressions ;
/**
* File containing a list of sample names to subset the input vcf to. Equivalent to specifying the contents of the file separately with -sn
*/
@Input(fullName="sample_file", shortName="sf", doc="File containing a list of samples (one per line) to include. Can be specified multiple times", required=false)
public Set<File> sampleFiles;
/**
* A mode for selecting sites based on sample-level data. See the wiki documentation for more information.
*/
@Argument(fullName="sampleMode", shortName="sampleMode", doc="Sample selection mode", required=false)
private SAMPLE_SELECTION_MODE sampleMode = SAMPLE_SELECTION_MODE.NONE;
/**
* An P[nonref] threshold for SAMPLE_SELECTION_MODE=POLY_BASED_ON_GL. See the wiki documentation for more information.
*/
@Argument(shortName="samplePNonref",fullName="samplePNonref", doc="GL-based selection mode only: the probability" +
" that a site is non-reference in the samples for which to include the site",required=false)
private double samplePNonref = 0.99;
/**
* The number of sites in your validation set
*/
@Argument(fullName="numValidationSites", shortName="numSites", doc="Number of output validation sites", required=true)
private int numValidationSites;
/**
* Do not exclude filtered sites (e.g. not PASS or .) from consideration for validation
*/
@Argument(fullName="includeFilteredSites", shortName="ifs", doc="If true, will include filtered sites in set to choose variants from", required=false)
private boolean INCLUDE_FILTERED_SITES = false;
/**
* Argument for the frequency selection mode. (AC/AF/AN) are taken from VCF info field, not recalculated. Typically specified for sites-only VCFs that still have AC/AF/AN information.
*/
@Argument(fullName="ignoreGenotypes", shortName="ignoreGenotypes", doc="If true, will ignore genotypes in VCF, will take AC,AF from annotations and will make no sample selection", required=false)
private boolean IGNORE_GENOTYPES = false;
/**
* Argument for the frequency selection mode. Allows reference (non-polymorphic) sites to be included in the validation set.
*/
@Argument(fullName="ignorePolymorphicStatus", shortName="ignorePolymorphicStatus", doc="If true, will ignore polymorphic status in VCF, and will take VCF record directly without pre-selection", required=false)
private boolean IGNORE_POLYMORPHIC = false;
@Hidden
@Argument(fullName="numFrequencyBins", shortName="numBins", doc="Number of frequency bins if we're to match AF distribution", required=false)
private int numFrequencyBins = 20;
/**
* This argument selects allele frequency selection mode. See the wiki for more information.
*/
@Argument(fullName="frequencySelectionMode", shortName="freqMode", doc="Allele Frequency selection mode", required=false)
private AF_COMPUTATION_MODE freqMode = AF_COMPUTATION_MODE.KEEP_AF_SPECTRUM;
/**
* This argument selects particular kinds of variants (i.e. SNP, INDEL) out of a list. If left unspecified, all types are considered.
*/
@Argument(fullName="selectTypeToInclude", shortName="selectType", doc="Select only a certain type of variants from the input file. Valid types are INDEL, SNP, MIXED, MNP, SYMBOLIC, NO_VARIATION. Can be specified multiple times", required=false)
private List<VariantContext.Type> TYPES_TO_INCLUDE = new ArrayList<VariantContext.Type>();
private TreeSet<String> samples = new TreeSet<String>();
SampleSelector sampleSelector = null;
FrequencyModeSelector frequencyModeSelector = null;
private ArrayList<VariantContext.Type> selectedTypes = new ArrayList<VariantContext.Type>();
public void initialize() {
// Get list of samples to include in the output
Map<String, VCFHeader> vcfRods = GATKVCFUtils.getVCFHeadersFromRods(getToolkit());
TreeSet<String> vcfSamples = new TreeSet<String>(SampleUtils.getSampleList(vcfRods, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE));
Collection<String> samplesFromFile = SampleUtils.getSamplesFromFiles(sampleFiles);
Collection<String> samplesFromExpressions = SampleUtils.matchSamplesExpressions(vcfSamples, sampleExpressions);
// first, add any requested samples
samples.addAll(samplesFromFile);
samples.addAll(samplesFromExpressions);
samples.addAll(sampleNames);
// if none were requested, we want all of them
if ( samples.isEmpty() ) {
samples.addAll(vcfSamples);
}
sampleSelector = getSampleSelectorObject(sampleMode, samples);
// initialize frequency mode selector
frequencyModeSelector = getFrequencyModeSelectorObject(freqMode, getToolkit().getGenomeLocParser());
// if user specified types to include, add these, otherwise, add all possible variant context types to list of vc types to include
if (TYPES_TO_INCLUDE.isEmpty()) {
for (VariantContext.Type t : VariantContext.Type.values())
selectedTypes.add(t);
}
else {
for (VariantContext.Type t : TYPES_TO_INCLUDE)
selectedTypes.add(t);
}
Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>();
headerLines.add(new VCFHeaderLine("source", "ValidationSiteSelector"));
vcfWriter.writeHeader(new VCFHeader(headerLines));
}
@Override
public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if ( tracker == null )
return 0;
Collection<VariantContext> vcs = tracker.getValues(variants, context.getLocation());
if ( vcs == null || vcs.size() == 0) {
return 0;
}
for (VariantContext vc : vcs) {
if (!selectedTypes.contains(vc.getType()))
continue;
// skip if site isn't polymorphic and if user didn't request to ignore polymorphic status
if (!vc.isPolymorphicInSamples() && !IGNORE_POLYMORPHIC)
continue;
if (!INCLUDE_FILTERED_SITES && vc.filtersWereApplied() && vc.isFiltered())
continue;
// does this site pass the criteria for the samples we are interested in?
boolean passesSampleSelectionCriteria;
if (samples.isEmpty())
passesSampleSelectionCriteria = true;
else
passesSampleSelectionCriteria = sampleSelector.selectSiteInSamples(vc);
frequencyModeSelector.logCurrentSiteData(vc,passesSampleSelectionCriteria,IGNORE_GENOTYPES,IGNORE_POLYMORPHIC);
}
return 1;
}
@Override
public Integer reduceInit() { return 0; }
@Override
public Integer reduce(Integer value, Integer sum) { return value + sum; }
public void onTraversalDone(Integer result) {
logger.info("Outputting validation sites...");
ArrayList<VariantContext> selectedSites = frequencyModeSelector.selectValidationSites(numValidationSites);
for (VariantContext vc : selectedSites) {
vcfWriter.add(vc);
}
logger.info(result + " records processed.");
}
private SampleSelector getSampleSelectorObject(SAMPLE_SELECTION_MODE sampleMode, TreeSet<String> samples) {
SampleSelector sm;
switch ( sampleMode ) {
case POLY_BASED_ON_GL:
sm = new GLBasedSampleSelector(samples, Math.log10(1.0-samplePNonref));
break;
case POLY_BASED_ON_GT:
sm = new GTBasedSampleSelector(samples);
break;
case NONE:
sm = new NullSampleSelector(samples);
break;
default:
throw new IllegalArgumentException("Unsupported Sample Selection Mode: " + sampleMode);
}
return sm;
}
private FrequencyModeSelector getFrequencyModeSelectorObject (AF_COMPUTATION_MODE freqMode, GenomeLocParser parser) {
FrequencyModeSelector fm;
switch (freqMode) {
case KEEP_AF_SPECTRUM:
fm = new KeepAFSpectrumFrequencySelector(numFrequencyBins, parser);
break;
case UNIFORM:
fm = new UniformSamplingFrequencySelector(parser);
break;
default: throw new IllegalArgumentException("Unexpected Frequency Selection Mode: "+ freqMode);
}
return fm;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/GenotypeAlleleCounts.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import htsjdk.variant.variantcontext.Allele;
import org.broadinstitute.gatk.utils.IndexRange;
import org.broadinstitute.gatk.utils.MathUtils;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Collection of allele counts for a genotype. It encompasses what alleles are present in the genotype and in what number.</p>
*
* <p>Alleles are represented herein by their indices running from <b>0</b> to <b>N-1</b> where <i>N</i> is the number of alleles.</p>
*
* <p>Each allele present in a genotype (count != 0) has a <i>rank</i>, that is the 0-based ordinal of
* that allele amongst the ones present in the genotype as sorted by their index.</p>
*
* <p>For example:</p>
*
* <p><b>0/0/2/2</b> has two alleles with indices <b>0</b> and <b>2</b>, both with count 2.
* The rank of <b>0</b> is <i>0</i> whereas the rank of <b>2</b> is <i>1</i>.</p>
*
* <p><b>2/4/4/7</b> has three alleles with indices <b>2</b>, <b>4</b> and <b>7</b>. <b>2</b> and <b>7</b> have count 1 whereas <b>4</b> has count 2.
* The rank of <b>2</b> is <i>0</i>, the rank of <b>4</b> is <i>1</i>. and the rank of <b>7</b> is <i>2</i>.</p>
*
* <p>In contrast, in both examples above both <b>3</b> and <b>10</b> (and many others) are absent thus they have no rank (represented by <i>-1</i> whenever applies).</p>
*
* <p>{@link GenotypeAlleleCounts} instances have themselves their own index (returned by {@link #index() index()}, that indicate their 0-based ordinal within the possible genotype combinations with the same ploidy.</p>
*
* <p>For example, for ploidy 3:</p>
*
* <table>
* <th>Index</th><th>Genotype</th>
* <tr><td>0</td><td><b>0/0/0</b></td></tr>
* <tr><td>1</td><td><b>0/0/1</b></td></tr>
* <tr><td>2</td><td><b>0/1/1</b></td></tr>
* <tr><td>3</td><td><b>1/1/1</b></td></tr>
* <tr><td>4</td><td><b>0/0/2</b></td></tr>
* <tr><td>6</td><td><b>0/1/2</b></td></tr>
* <tr><td>7</td><td><b>1/1/2</b></td></tr>
* <tr><td>8</td><td><b>0/2/2</b></td></tr>
* <tr><td>9</td><td><b>1/2/2</b></td></tr>
* <tr><td>10</td><td><b>2/2/2</b></td></tr>
* <tr><td>11</td><td><b>0/0/3</b></td></tr>
* <tr><td>12</td><td><b>0/1/3</b></td></tr>
* <tr><td>13</td><td><b>1/1/3</b></td></tr>
* <tr><td>14</td><td><b>0/2/3</b></td></tr>
* <tr><td>15</td><td><b>1/2/3</b></td></tr>
* <tr><td>16</td><td><b>2/2/3</b></td></tr>
* <tr><td>17</td><td><b>0/3/3</b></td></tr>
* <tr><td>...</td><td>...</td></tr>
* </table>
*
* The total number of possible genotypes is only bounded by the maximum allele index.
*
* @author <NAME> <<EMAIL>>
*/
public class GenotypeAlleleCounts implements Comparable<GenotypeAlleleCounts>, Cloneable {
private static final double UNCOMPUTED_LOG_10_COMBINATION_COUNT = -1;
/**
* The log10 number of phased genotypes corresponding to this unphased genotype. For example,
* [0, 1, 1, 1] = AB: log10(2)
* [0, 2] = AA: log10(1)
* [0, 1, 1, 1, 2, 1] = ABC: log10(6)
* [0, 2, 1, 2] = AABB: log10(4!/(2!2!))
* This is evaluated lazily i.e. it is initialized to {@link GenotypeAlleleCounts::UNCOMPUTED_LOG_10_COMBINATION_COUNT}
* and only calculated if its getter is invoked.
*/
private double log10CombinationCount = UNCOMPUTED_LOG_10_COMBINATION_COUNT;
/**
* The ploidy of the genotype.
*/
private final int ploidy;
/**
* Sorted array of integer pairs as described in {@link #GenotypeAlleleCounts(int, int, int...)}.
*/
private int[] sortedAlleleCounts;
/**
* Number of different alleles in the genotype.
*/
private int distinctAlleleCount;
/**
* Index of this genotype within genotypes of the same ploidy.
*/
private int index;
/**
* Creates a new unphased genotype.
*
* <p>This method assumes that the invoker is passing a well formatted and sorted allele frequency array.
* Not checks are done for the sake of performance.</p>
*
* <p>
* The input argument {@code sortedAlleleCounts} list the index of alleles included in the unphased genotype
* and their frequency in the genotype in a single array using consecutive pairs:<br/>
*
* <pre> [allele_1,freq_1,allele_2,freq_2, ... , allele_i, freq_i, ... , allele_n, freq_n]</pre>
*
* <br/>
* No entry can have frequency == 0 (these must be omitted) and entries are sorted by allele index without
* any repetitions so that if <i>i < j</i> then <i>allele_i < allele_j</i>.
*
* </p>
*
* <p>
* The {@code ploidy} provided must be equal to the sum of all frequencies in {@code sortedAlleleCounts}
* </p>
* @param ploidy the genotype ploidy.
* @param sortedAlleleCounts sorted allele counts following the restrictions above.
* @param index the genotype index.
*/
private GenotypeAlleleCounts(final int ploidy, final int index, final int... sortedAlleleCounts) {
this(ploidy, index, sortedAlleleCounts, sortedAlleleCounts.length >> 1);
}
private GenotypeAlleleCounts(final int ploidy, final int index, final int[] sortedAlleleCounts, final int distinctAlleleCount){
this.ploidy = ploidy;
this.index = index;
this.sortedAlleleCounts = sortedAlleleCounts;
this.distinctAlleleCount = distinctAlleleCount;
}
/**
* Gets the log10 combination count, computing it if uninitialized. Note that the invoked MathUtils method uses fast cached
* log10 values of integers for any reasonable ploidy.
*
* This method should be invoked on instances of {@link GenotypeAlleleCounts} cached in {@link GenotypeLikelihoodCalculators::genotypeTableByPloidy}.
* Such usage allows the result of this computation to be cached once for an entire run of HaplotypeCaller.
* @return
*/
public double log10CombinationCount() {
if (log10CombinationCount == UNCOMPUTED_LOG_10_COMBINATION_COUNT) {
log10CombinationCount = MathUtils.log10Factorial(ploidy)
- new IndexRange(0, distinctAlleleCount).sum(n -> MathUtils.log10Factorial(sortedAlleleCounts[2*n+1]));
}
return log10CombinationCount;
}
/**
* Returns the genotype's ploidy.
* @return 0 or greater.
*/
public int ploidy() {
return ploidy;
}
/**
* Increases the allele counts a number of times.
*
* <p>
* This method must not be invoked on cached genotype-allele-counts that are meant to remain constant,
* such as the ones contained in {@link GenotypeLikelihoodCalculators#genotypeTableByPloidy}.
* </p>
*
* @param times the number of times to increase.
*
* @throws IllegalArgumentException if {@code times} is negative.
*/
protected void increase(final int times) {
for (int i = 0; i < times; i++)
increase();
}
/**
* Updates the genotype counts to match the next genotype.
*
* <p>
* This method must not be invoked on cached genotype-allele-counts that are meant to remain constant,
* such as the ones contained in {@link GenotypeLikelihoodCalculators#genotypeTableByPloidy}
* </p>
*/
protected void increase() {
// if the ploidy is zero there is only one possible genotype.
if (distinctAlleleCount == 0)
return;
// Worth make this case faster.
if (distinctAlleleCount == 1) {
if (ploidy == 1) {
sortedAlleleCounts[0]++;
} else {
if (sortedAlleleCounts.length < 4)
sortedAlleleCounts = Arrays.copyOf(sortedAlleleCounts,4);
sortedAlleleCounts[2] = sortedAlleleCounts[0] + 1;
sortedAlleleCounts[3] = 1;
sortedAlleleCounts[0] = 0;
sortedAlleleCounts[1] = ploidy - 1;
distinctAlleleCount = 2;
}
} else {
// Now, all the following ifs are just the way to avoid working with dynamically sizing List<int[]>
// as the final size of the resulting new sorted-allele-counts array varies depending on the situation.
// this is considerably faster and the logic complexity would not be that different actually so it is worth
// the if indentations.
//
// Notice that at this point distinctAlleleCount >= 2 thus sortedAlleleCounts.length >= 4.
//
// We only need to look at the two lowest allele indices to decide what to do.
final int allele0 = sortedAlleleCounts[0];
final int freq0 = sortedAlleleCounts[1];
final int allele1 = sortedAlleleCounts[2];
final int allele0Plus1 = allele0 + 1;
final boolean allele0And1AreConsecutive = allele0Plus1 == allele1;
final int[] newSortedAlleleCounts;
// The rest of the sorted allele counts array contains junk
final int sortedAlleleCountsLength = distinctAlleleCount << 1;
if (freq0 == 1) { // in this case allele0 wont be present in the result and all is frequency should go to allele0 + 1.
if (allele0And1AreConsecutive) { // need just to remove the first allele and add 1 to the frequency of the second (freq1 += 1).
System.arraycopy(sortedAlleleCounts, 2, sortedAlleleCounts, 0, sortedAlleleCountsLength - 2); // shift left the first component away.
sortedAlleleCounts[1]++; // freq1 has become freq0.
distinctAlleleCount--;
} else // just need to mutate allele0 to allele0 + 1.
sortedAlleleCounts[0] = allele0Plus1;
} else { // && freq0 > 1 as per sortedAlleleCounts format restrictions. In this case allele0 will mutated to '0' with frequency decreased by 1.
if (allele0And1AreConsecutive) { // we don't need to add a component for allele0 + 1 since it already exists.
sortedAlleleCounts[0] = 0;
sortedAlleleCounts[1] = freq0 - 1;
sortedAlleleCounts[3]++;
} else { // we need to insert allele0 + 1 in the sorted-allele-counts array and give it frequency 1.
if (sortedAlleleCounts.length < sortedAlleleCountsLength + 2) // make room for the new component.
sortedAlleleCounts = Arrays.copyOf(sortedAlleleCounts,sortedAlleleCountsLength + 2);
System.arraycopy(sortedAlleleCounts, 2, sortedAlleleCounts, 4, sortedAlleleCountsLength - 2);
sortedAlleleCounts[0] = 0;
sortedAlleleCounts[1] = freq0 - 1;
sortedAlleleCounts[2] = allele0Plus1;
sortedAlleleCounts[3] = 1;
distinctAlleleCount++;
}
}
}
index++;
log10CombinationCount = -1;
}
/**
* Calculates the next genotype in likelihood indexing order.
* @return never null.
*/
protected GenotypeAlleleCounts next() {
// if the ploidy is zero there is only one possible genotype.
if (distinctAlleleCount == 0)
return this;
// Worth make this case faster.
if (distinctAlleleCount == 1) {
if (ploidy == 1) // A -> B , D -> E etc...
return new GenotypeAlleleCounts(1, index + 1, sortedAlleleCounts[0] + 1, 1);
else // AAAAA -> AAAAB, DDD -> AAE etc...
return new GenotypeAlleleCounts(ploidy, index + 1, 0, ploidy - 1, sortedAlleleCounts[0] + 1, 1);
}
// Now, all the following ifs are just the way to avoid working with dynamically sizing List<int[]>
// as the final size of the resulting new sorted-allele-counts array varies depending on the situation.
// this is considerably faster and the logic complexity would not be that different actually so it is worth
// the if indentations.
//
// Notice that at this point distinctAlleleCount >= 2 thus sortedAlleleCounts.length >= 4.
//
// We only need to look at the two lowest allele indices to decide what to do.
final int allele0 = sortedAlleleCounts[0];
final int freq0 = sortedAlleleCounts[1];
final int allele1 = sortedAlleleCounts[2];
final int allele0Plus1 = allele0 + 1;
final boolean allele0And1AreConsecutive = allele0Plus1 == allele1;
final int[] newSortedAlleleCounts;
// The rest of the sorted allele counts array contains junk
final int sortedAlleleCountsLength = distinctAlleleCount << 1;
if (freq0 == 1) { // in this case allele0 wont be present in the result and all is frequency should go to allele0 + 1.
if (allele0And1AreConsecutive) { // need just to remove the first allele and 1 to the frequency of the second (freq1 += 1).
newSortedAlleleCounts = Arrays.copyOfRange(sortedAlleleCounts,2,sortedAlleleCountsLength);
newSortedAlleleCounts[1]++;
} else { // just need to mutate allele0 to allele0 + 1.
newSortedAlleleCounts = Arrays.copyOf(sortedAlleleCounts,sortedAlleleCountsLength);
newSortedAlleleCounts[0] = allele0Plus1;
// newSortedAlleleCounts[1] = 1; // :) no need to do it because it is already the case (freq0 == 1).
}
} else { // && freq0 > 1 as per sortedAlleleCounts format restrictions. In this case allele0 will muttated to '0' with frequency decreased by 1.
if (allele0And1AreConsecutive) { // we don't need to add a component for allele0 + 1 since it already exists.
newSortedAlleleCounts = sortedAlleleCounts.clone();
newSortedAlleleCounts[0] = 0;
newSortedAlleleCounts[1] = freq0 - 1;
newSortedAlleleCounts[3]++;
} else { // we need to insert allele0 + 1 in the sorted-allele-counts array.
newSortedAlleleCounts = new int[sortedAlleleCountsLength + 2];
newSortedAlleleCounts[0] = 0;
newSortedAlleleCounts[1] = freq0 - 1;
newSortedAlleleCounts[2] = allele0Plus1;
newSortedAlleleCounts[3]++; // = 1 as the array was freshly created with 0s.
System.arraycopy(sortedAlleleCounts,2,newSortedAlleleCounts,4,sortedAlleleCountsLength - 2);
}
}
return new GenotypeAlleleCounts(ploidy, index + 1, newSortedAlleleCounts);
}
/**
* Returns the number of different alleles that participate in the genotype.
*
* @return 0 or greater.
*/
public int distinctAlleleCount() {
return distinctAlleleCount;
}
/**
* Returns the index of the allele from its rank in the genotype.
*
* @param rank the query rank.
*
* @throws IllegalArgumentException if the {@code rank} provided is outside the valid range [0,{@link #distinctAlleleCount()}).
*
* @return 0 or greater.
*/
public int alleleIndexAt(final int rank) {
if (rank < 0 || rank >= distinctAlleleCount)
throw new IllegalArgumentException("the requested rank " + rank + " is out of range [0," + distinctAlleleCount + ")");
return sortedAlleleCounts[rank << 1];
}
/**
* Returns the rank of an allele in the genotype by its index.
*
* @param index the target index.
*
* @throws IllegalArgumentException if {@code index} is less that 0. Indices can be arbitrarily large.
*
* @return -1 or less if the allele index is not present in the genotype, 0 to {@link #distinctAlleleCount()} - 1 otherwise.
* If negative, the absolute value can be used to determine where would be that index inserted within {@code [0,{@link #distinctAlleleCount()}]} as
* {@code - result - 1}.
*
*/
public int alleleRankFor(final int index) {
if (index < 0)
throw new IllegalArgumentException("the index must be 0 or greater");
return alleleIndexToRank(index, 0, distinctAlleleCount);
}
/**
* Generates a string that would represent the unphased genotype with this allele counts.
*
* <p>
* In this string allele calls appear in alleleIndex order with as many repeats as copies of each allele. So
* for example:<br/>
* <pre>
* 0 # haploid reference.
* 0/0 # typical diploid calls
* 0/1
* 1/1
* 0/0/1/3/3 # pentaploid with to ref, one first alt. and 2 third alt. allele
* </pre>
*
* </p>
*
* @return never {@code null}.
*/
public String toUnphasedGenotypeString() {
if (ploidy == 0) return "";
final StringBuilder sb = new StringBuilder(distinctAlleleCount * 3);
for (int i = 0; i < distinctAlleleCount; i += 2) {
final int alleleIndex = sortedAlleleCounts[i];
final int alleleCount = sortedAlleleCounts[i + 1];
for (int j = 0; j < alleleCount; j++)
sb.append(alleleIndex).append('/');
}
sb.setLength(sb.length() - 1);
return sb.toString();
}
@Override
public String toString() {
// Perhaps we should change in the future, but the unphased genotype representation seems to be
// a good one.
return toUnphasedGenotypeString();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(final Object o) {
if (o instanceof GenotypeAlleleCounts)
return equals((GenotypeAlleleCounts)o);
else
return false;
}
/**
* Compares with another genotype.
* @param o the other genotype.
* @return never {@code null}.
*/
public boolean equals(final GenotypeAlleleCounts o) {
if (o == this)
return true;
if (o == null)
return false;
if (ploidy != o.ploidy)
return false;
return Arrays.equals(sortedAlleleCounts, o.sortedAlleleCounts);
}
/**
* Returns the index of this genotype allele count within all possible genotypes with the same ploidy.
*
* @return 0 or greater.
*/
public int index() {
return index;
}
/**
* Compares to genotypes.
*
* <p>A genotype with larger ploidy is considered greater than one with a lower ploidy. If both genotypes have
* the same ploidy, then the genotype with the largest allele index or largest count if these are the same</p>.
*
* @param other genotype to compare to.
*
* @throws IllegalArgumentException if {@code other} is {@code null}.
*
* @return 0 if both genotypes are equivalent, < 0 if this genotype is less than {@code other} and > 0
* if this genotype is greater than {@code other}.
*/
@Override
public int compareTo(final GenotypeAlleleCounts other) {
if (other == this)
return 0;
if (other == null)
throw new IllegalArgumentException("input genotype cannot be null");
if (other.ploidy == ploidy)
return index - other.index;
else
return ploidy - other.ploidy;
}
@Override
public int hashCode() {
return ((31 + ploidy) * 31 ) + index;
}
/**
* Implements binary search across allele indexes.
* @param index the target index.
* @param from first inclusive possible rank.
* @param to last exclusive possible rank.
* @return -1 or less if the allele index is not in the genotype false otherwise. You can obtain
* the potential insertion point (within the interval [from,to]) as {@code -result - 1}
*/
private int alleleIndexToRank(final int index,final int from, final int to) {
if (to <= from)
return - from - 1;
if (from == to - 1) {
final int onlyIndex = sortedAlleleCounts[from << 1];
return onlyIndex == index ? from : (onlyIndex > index) ? -from - 1 : -to - 1;
}
final int mid = (to + from) >> 1;
final int midIndex = sortedAlleleCounts[mid << 1];
if (midIndex == index)
return mid;
else if (midIndex < index)
return alleleIndexToRank(index,mid + 1,to);
else
return alleleIndexToRank(index,0,mid);
}
/**
* Returns the count of an allele in the genotype given is rank in the genotype (not the allele index itself).
*
* @param rank of the requested allele within the genotype.
*
* @throws IllegalArgumentException if {@code rank} is out the the valid range [0,{@link #distinctAlleleCount})
*
* @return 1 or greater.
*/
public int alleleCountAt(final int rank) {
if (rank < 0 || rank >= distinctAlleleCount)
throw new IllegalArgumentException("the rank is out of range");
return sortedAlleleCounts[(rank << 1) + 1];
}
/**
* Checks whether this genotype contain at least one call on a particular allele index.
*
* @param index the target allele.
*
* @throws IllegalArgumentException if {@code index} is negative.
*
* @return {@code true} iff the genotype contains that allele index.
*/
public boolean containsAllele(final int index) {
return alleleRankFor(index) >= 0;
}
/**
* Returns the count of an allele in the genotype given it index.
*
* @return 0 if the allele is not present in the genotype, 1 or more otherwise.
*/
public int alleleCountFor(final int index) {
final int rank = alleleRankFor(index);
return rank < 0 ? 0 : alleleCountAt(rank);
}
/**
* Returns the allele counts for each allele index to maximum.
* @param maximumAlleleIndex the maximum allele index required.
* @throws IllegalArgumentException if {@code maximumAlleleIndex} is less than 0.
* @return never {@code null}, an array of exactly {@code maximumAlleleIndex + 1} positions with the counts
* of each allele where the position in the array is equal to its index.
*/
public int[] alleleCountsByIndex(final int maximumAlleleIndex) {
if (maximumAlleleIndex < 0)
throw new IllegalArgumentException("the requested allele count cannot be less than 0");
final int[] result = new int[maximumAlleleIndex + 1];
copyAlleleCountsByIndex(result, 0, 0, maximumAlleleIndex);
return result;
}
private void copyAlleleCountsByIndex(final int[] dest, final int offset, final int minimumAlleleIndex, final int maximumAlleleIndex) {
// First we determine what section of the sortedAlleleCounts array contains the counts of interest,
// By the present allele rank range of interest.
final int minimumAlleleRank = alleleRankFor(minimumAlleleIndex);
final int maximumAlleleRank = alleleRankFor(maximumAlleleIndex);
// If the min or max allele index are absent (returned rank < 0) we note where the would be inserted; that
// way we avoid going through the rest of positions in the sortedAlleleCounts array.
// The range of interest is then [startRank,endRank].
final int startRank = minimumAlleleRank < 0 ? - minimumAlleleRank - 1 : minimumAlleleRank;
final int endRank = maximumAlleleRank < 0 ? - maximumAlleleRank - 2 : maximumAlleleRank;
// Iteration variables:
int nextIndex = minimumAlleleIndex; // next index that we want to output the count for.
int nextRank = startRank; // next rank to query in sortedAlleleCounts.
int nextSortedAlleleCountsOffset = nextRank << 1; // offset in sortedAlleleCounts where the info is present for the next rank.
int nextDestOffset = offset; // next offset in destination array where to set the count for the nextIndex.
while (nextRank++ <= endRank) {
final int alleleIndex = sortedAlleleCounts[nextSortedAlleleCountsOffset++];
// fill non-present allele counts with 0s.
while (alleleIndex > nextIndex) {
dest[nextDestOffset++] = 0;
nextIndex++;
}
// It is guaranteed that at this point alleleIndex == nextIndex
// thanks to the condition of the enclosing while: there must be at least one index of interest that
// is present in the remaining (nextRank,endRank] interval as otherwise endRank would be less than nextRank.
dest[nextDestOffset++] = sortedAlleleCounts[nextSortedAlleleCountsOffset++];
nextIndex++;
}
// Finally we take care of trailing requested allele indices.
while (nextIndex++ <= maximumAlleleIndex)
dest[nextDestOffset++] = 0;
}
/**
* Copies the sorted allele counts into an array.
*
* <p>
* Sorted allele counts are disposed as an even-sized array where even positions indicate the allele index and
* the following odd positions the number of copies of that allele in this genotype allele count:
* </p>
* <p><pre>
* [ allele_0, freq_0, allele_1, freq_1 ... ]
* </pre></p>
*
* <p>
* With {@code offset} you can indicate an alternative first position in the destination array.
* </p>
*
* @param dest where to copy the counts.
* @param offset starting position.
*
* @throws IllegalArgumentException if {@code dest} is {@code null}, {@code offset} is less than 0
* or {@code dest} is not large enough considering the number of alleles present in this genotype
* allele counts and the {@code offset} provided. A total of
* <code>{@link #distinctAlleleCount()} * 2 positions</code>
* are required for the job.
*/
public void copyAlleleCounts(final int[] dest, final int offset) {
if (dest == null)
throw new IllegalArgumentException("the destination cannot be null");
if (offset < 0)
throw new IllegalArgumentException("the offset cannot be negative");
final int sortedAlleleCountsLength = distinctAlleleCount << 1;
if (offset + sortedAlleleCountsLength > dest.length)
throw new IllegalArgumentException("the input array does not have enough capacity");
System.arraycopy(sortedAlleleCounts,0,dest,offset,sortedAlleleCountsLength);
}
/**
* Instantiates the first genotype possible provided a total ploidy.
* @param ploidy the ploidy of the genotype.
*
* @throws java.lang.IllegalArgumentException if ploidy is less than 0.
*
* @return never {@code null}.
*/
protected static GenotypeAlleleCounts first(final int ploidy) {
if (ploidy < 0)
throw new IllegalArgumentException("the ploidy must be 0 or greater");
else if (ploidy == 0)
return new GenotypeAlleleCounts(0,0);
else
return new GenotypeAlleleCounts(ploidy, 0, 0, ploidy);
}
/**
* Makes the next genotype in likelihood indexing order.
*
* @param g the original genotype.
*
* @throws IllegalArgumentException if {@code g} is {@code null}.
*
* @return never {@code null}.
*/
public static GenotypeAlleleCounts makeNextGenotype(final GenotypeAlleleCounts g) {
if (g == null)
throw new IllegalArgumentException("the next genotype");
return g.next();
}
/**
* Returns the largest allele index present in the genotype.
*
* @return -1 if there is no alleles (ploidy == 0), 0 or greater otherwise.
*/
public int maximumAlleleIndex() {
if (distinctAlleleCount == 0)
return -1;
else
return sortedAlleleCounts[(distinctAlleleCount - 1) << 1];
}
/**
* Returns the smallest allele index present in the genotype.
*
* @return -1 if there is no allele (ploidy == 0), 0 or greater otherwise.
*/
public int minimumAlleleIndex() {
if (distinctAlleleCount == 0)
return -1;
else
return sortedAlleleCounts[0];
}
/**
* Creates an independent copy of this genotype.
* @return never {@code null}.
*/
@Override
protected GenotypeAlleleCounts clone() {
final GenotypeAlleleCounts result;
try {
result = (GenotypeAlleleCounts) super.clone();
} catch (final CloneNotSupportedException e) {
throw new IllegalStateException(e);
}
result.sortedAlleleCounts = Arrays.copyOf(sortedAlleleCounts,distinctAlleleCount << 1);
return result;
}
/**
* Composes a list with the alleles.
*
* @param allelesToUse alleles to use.
*
* @throws IllegalArgumentException if {@code allelesToUse} is {@code null},
* or does not contain enough elements to accommodate the maximum allele index in this allele-counts.
*
* @return never null, but it might be restricted (unmodifiable or non-expandable).
*/
public <T extends Allele> List<T> asAlleleList(final List<T> allelesToUse) {
if (allelesToUse == null)
throw new IllegalArgumentException("the input allele list cannot be null");
if (allelesToUse.size() < maximumAlleleIndex())
throw new IllegalArgumentException("the provided alleles to use does not contain an element for the maximum allele ");
if (distinctAlleleCount == 1 ) {
if (ploidy == 1)
return Collections.singletonList(allelesToUse.get(sortedAlleleCounts[0]));
else
return Collections.nCopies(ploidy,allelesToUse.get(sortedAlleleCounts[0]));
} else {
final Allele[] myAlleles = new Allele[ploidy];
int nextIndex = 0;
for (int i = 0, ii = 0; i < distinctAlleleCount; i++) {
final Allele allele = allelesToUse.get(sortedAlleleCounts[ii++]);
final int repeats = sortedAlleleCounts[ii++];
for (int j = 0; j < repeats; j++)
myAlleles[nextIndex++] = allele;
}
return (List<T>) Arrays.asList(myAlleles);
}
}
/**
* Returns an array with the allele indices repeated based on the number of occurrences in the genotype.
*
* <p>
* indices are sorted from the smallest to the greatest.
* </p>
*
* <p>
* If a sufficiently large array is provided as {@code dest}, this is used as the destination. Unnecessary
* positions at the back of the array are left untouched.
* </p>
*
* <p>
* However if {@code dest} is {@code null} or it does not have enough space, a new array of with length equal to
* the ploidy will be used and returned instead.
* </p>
*
* @param dest destination array. Can be {@code null} or not have sufficient positions (ploidy); in that case a new
* one is created.
* @return never {@code null}, {@code dest} if sufficiently large otherwise an array of ploidy positions.
*/
public int[] toAlleleIndicesArray(final int[] dest) {
final int[] result = dest == null || dest.length < ploidy ? new int[ploidy] : dest;
int k = 0;
for (int i = 0,ii = 0; i < distinctAlleleCount; i++) {
final int index = sortedAlleleCounts[ii++];
final int repeats = sortedAlleleCounts[ii++];
for (int j = 0; j < repeats; j++)
result[k++] = index;
}
return result;
}
@FunctionalInterface
public interface IntBiConsumer {
void accept(final int alleleIndex, final int alleleCount);
}
@FunctionalInterface
public interface IntToDoubleBiFunction {
double apply(final int alleleIndex, final int alleleCount);
}
public void forEachAlleleIndexAndCount(final IntBiConsumer action) {
new IndexRange(0, distinctAlleleCount).forEach(n -> action.accept(sortedAlleleCounts[2*n], sortedAlleleCounts[2*n+1]));
}
public double sumOverAlleleIndicesAndCounts(final IntToDoubleBiFunction func) {
return new IndexRange(0, distinctAlleleCount).sum(n -> func.apply(sortedAlleleCounts[2*n], sortedAlleleCounts[2*n+1]));
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/AFCalculatorTestBuilder.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import org.apache.commons.lang.ArrayUtils;
import org.broadinstitute.gatk.tools.walkers.genotyper.AFPriorProvider;
import org.broadinstitute.gatk.tools.walkers.genotyper.GenotypingEngine;
import org.broadinstitute.gatk.tools.walkers.genotyper.UnifiedGenotypingEngine;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.Utils;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.utils.variant.HomoSapiensConstants;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class AFCalculatorTestBuilder {
final static Allele A = Allele.create("A", true);
final static Allele C = Allele.create("C");
final static Allele G = Allele.create("G");
final static Allele T = Allele.create("T");
final static Allele AA = Allele.create("AA");
final static Allele AT = Allele.create("AT");
final static Allele AG = Allele.create("AG");
static int sampleNameCounter = 0;
final int nSamples;
final int numAltAlleles;
final AFCalculatorImplementation modelType;
final PriorType priorType;
public AFCalculatorTestBuilder(final int nSamples, final int numAltAlleles,
final AFCalculatorImplementation modelType, final PriorType priorType) {
this.nSamples = nSamples;
this.numAltAlleles = numAltAlleles;
this.modelType = modelType;
this.priorType = priorType;
}
@Override
public String toString() {
return String.format("AFCalcTestBuilder nSamples=%d nAlts=%d model=%s prior=%s", nSamples, numAltAlleles, modelType, priorType);
}
public enum PriorType {
flat,
human
}
public int getNumAltAlleles() {
return numAltAlleles;
}
public int getnSamples() {
return nSamples;
}
public AFCalculator makeModel() {
return AFCalculatorFactory.createCalculator(modelType, nSamples, getNumAltAlleles(), HomoSapiensConstants.DEFAULT_PLOIDY);
}
public double[] makePriors() {
final int nPriorValues = 2*nSamples+1;
final double human_theta = 0.001;
switch ( priorType ) {
case flat:
return MathUtils.normalizeFromLog10(new double[nPriorValues], true); // flat priors
//TODO break dependency with human... avoid special reference to this species.
case human:
final AFPriorProvider log10priorProvider = GenotypingEngine.composeAlleleFrequencyPriorProvider(2*nSamples, human_theta, new ArrayList<Double>());
final double[] humanPriors = log10priorProvider.forTotalPloidy(2*nSamples);
return humanPriors;
default:
throw new RuntimeException("Unexpected type " + priorType);
}
}
public VariantContext makeACTest(final List<Integer> ACs, final int nNonInformative, final int nonTypePL) {
return makeACTest(ArrayUtils.toPrimitive(ACs.toArray(new Integer[]{})), nNonInformative, nonTypePL);
}
public VariantContext makeACTest(final int[] ACs, final int nNonInformative, final int nonTypePL) {
final int nChrom = nSamples * 2;
final int[] nhet = new int[numAltAlleles];
final int[] nhomvar = new int[numAltAlleles];
for ( int i = 0; i < ACs.length; i++ ) {
final double p = ACs[i] / (1.0 * nChrom);
nhomvar[i] = (int)Math.floor((nSamples - nNonInformative) * p * p);
nhet[i] = ACs[i] - 2 * nhomvar[i];
if ( nhet[i] < 0 )
throw new IllegalStateException("Bug! nhet[i] < 0");
}
final long calcAC = MathUtils.sum(nhet) + 2 * MathUtils.sum(nhomvar);
if ( calcAC != MathUtils.sum(ACs) )
throw new IllegalStateException("calculated AC " + calcAC + " not equal to desired AC " + Utils.join(",", ACs));
return makeACTest(nhet, nhomvar, nNonInformative, nonTypePL);
}
public VariantContext makeACTest(final int[] nhet, final int[] nhomvar, final int nNonInformative, final int nonTypePL) {
List<Genotype> samples = new ArrayList<Genotype>(nSamples);
for ( int altI = 0; altI < nhet.length; altI++ ) {
for ( int i = 0; i < nhet[altI]; i++ )
samples.add(makePL(GenotypeType.HET, nonTypePL, altI+1));
for ( int i = 0; i < nhomvar[altI]; i++ )
samples.add(makePL(GenotypeType.HOM_VAR, nonTypePL, altI+1));
}
final Genotype nonInformative = makeNonInformative();
samples.addAll(Collections.nCopies(nNonInformative, nonInformative));
final int nRef = Math.max((int) (nSamples - nNonInformative - MathUtils.sum(nhet) - MathUtils.sum(nhomvar)), 0);
samples.addAll(Collections.nCopies(nRef, makePL(GenotypeType.HOM_REF, nonTypePL, 0)));
samples = samples.subList(0, nSamples);
if ( samples.size() > nSamples )
throw new IllegalStateException("too many samples");
VariantContextBuilder vcb = new VariantContextBuilder("x", "1", 1, 1, getAlleles());
vcb.genotypes(samples);
return vcb.make();
}
public List<Allele> getAlleles() {
return Arrays.asList(A, C, G, T, AA, AT, AG).subList(0, numAltAlleles+1);
}
public List<Allele> getAlleles(final GenotypeType type, final int altI) {
switch (type) {
case HOM_REF: return Arrays.asList(getAlleles().get(0), getAlleles().get(0));
case HET: return Arrays.asList(getAlleles().get(0), getAlleles().get(altI));
case HOM_VAR: return Arrays.asList(getAlleles().get(altI), getAlleles().get(altI));
default: throw new IllegalArgumentException("Unexpected type " + type);
}
}
public Genotype makePL(final List<Allele> expectedGT, int ... pls) {
GenotypeBuilder gb = new GenotypeBuilder("sample" + sampleNameCounter++);
gb.alleles(expectedGT);
gb.PL(pls);
return gb.make();
}
private int numPLs() {
return GenotypeLikelihoods.numLikelihoods(numAltAlleles+1, 2);
}
public Genotype makeNonInformative() {
final int[] nonInformativePLs = new int[GenotypeLikelihoods.numLikelihoods(numAltAlleles, 2)];
return makePL(Arrays.asList(Allele.NO_CALL, Allele.NO_CALL), nonInformativePLs);
}
public Genotype makePL(final GenotypeType type, final int nonTypePL, final int altI) {
GenotypeBuilder gb = new GenotypeBuilder("sample" + sampleNameCounter++);
gb.alleles(getAlleles(type, altI));
final int[] pls = new int[numPLs()];
Arrays.fill(pls, nonTypePL);
int index = 0;
switch ( type ) {
case HOM_REF: index = GenotypeLikelihoods.calculatePLindex(0, 0); break;
case HET: index = GenotypeLikelihoods.calculatePLindex(0, altI); break;
case HOM_VAR: index = GenotypeLikelihoods.calculatePLindex(altI, altI); break;
}
pls[index] = 0;
gb.PL(pls);
return gb.make();
}
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2/M2_Panel_Of_Normals_Creation.md
# M2 Panel of Normals
In order to reduce false positives, we use a panel of "normal" (ie non-cancer) samples to filter out both germline events as well as systematic noise. The form of the panel is a VCF file, which is produced via a Queue scripts
You must supply:
- the reference (defaults to hg19)
- the intervals to evaluate
- the list of BAM files
### How To Run
The Queue script for producing the PON can be found in the gsa-unstable repository under ```private/gatk-tools-private/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2```
First, chose the appropriate settings (runnable as environment variables here)
```
QUEUE_JAR=<your-queue-jar>
GSA_UNSTABLE_HOME=<path-to-your-gsa-unstable-checkout>
TEMPDIR=/broad/hptmp/$USER
```
and then run the following Queue command
```
java \
-Djava.io.tmpdir=$TEMPDIR \
-jar $QUEUE_JAR \
-S $GSA_UNSTABLE_HOME/private/gatk-tools-private/src/main/java/org/broadinstitute/gatk/tools/walkers/cancer/m2/run_M2_ICE_NN.scala \
--job_queue gsa -qsub -jobResReq virtual_free=5G -startFromScratch \
-sc 50 \
--allbams <your-list-of-bams> \
--intervals <your-intervals> \
--outputprefix <your-output-prefix> \
--start_from_scratch --keep_intermediate_outputs \
-run
```
This will produce many VCFs (1 per sample), plus \<your-output-prefix\>.genotypes.vcf and \<your-output-prefix\>.vcf which are the panel of normals VCF both with and without sample-genotype information. Typically the latter is the one used as input to M2, although either will work.<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs;
import org.broadinstitute.gatk.utils.BaseTest;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.File;
import java.util.*;
public class BaseGraphUnitTest extends BaseTest {
SeqGraph graph;
SeqVertex v1, v2, v3, v4, v5;
@BeforeMethod
public void setUp() throws Exception {
graph = new SeqGraph(11);
v1 = new SeqVertex("A");
v2 = new SeqVertex("C");
v3 = new SeqVertex("C");
v4 = new SeqVertex("C");
v5 = new SeqVertex("C");
graph.addVertices(v1, v2, v3, v4, v5);
graph.addEdge(v1, v2);
graph.addEdge(v2, v4);
graph.addEdge(v3, v2);
graph.addEdge(v2, v3);
graph.addEdge(v4, v5);
}
@Test
public void testIncomingAndOutgoingVertices() throws Exception {
assertVertexSetEquals(graph.outgoingVerticesOf(v1), v2);
assertVertexSetEquals(graph.incomingVerticesOf(v1));
assertVertexSetEquals(graph.outgoingVerticesOf(v2), v3, v4);
assertVertexSetEquals(graph.incomingVerticesOf(v2), v1, v3);
assertVertexSetEquals(graph.outgoingVerticesOf(v3), v2);
assertVertexSetEquals(graph.incomingVerticesOf(v3), v2);
assertVertexSetEquals(graph.outgoingVerticesOf(v4), v5);
assertVertexSetEquals(graph.incomingVerticesOf(v4), v2);
assertVertexSetEquals(graph.outgoingVerticesOf(v5));
assertVertexSetEquals(graph.incomingVerticesOf(v5), v4);
}
@Test
public void testRemoveSingletonOrphanVertices() throws Exception {
// all vertices in graph are connected
final List<SeqVertex> kept = new LinkedList<SeqVertex>(graph.vertexSet());
final SeqVertex rm1 = new SeqVertex("CAGT");
final SeqVertex rm2 = new SeqVertex("AGTC");
graph.addVertices(rm1, rm2);
Assert.assertEquals(graph.vertexSet().size(), kept.size() + 2);
final BaseEdge rm12e = new BaseEdge(false, 1);
graph.addEdge(rm1, rm2, rm12e);
final SeqGraph original = (SeqGraph)graph.clone();
graph.removeSingletonOrphanVertices();
Assert.assertTrue(BaseGraph.graphEquals(original, graph), "Graph with disconnected component but edges between components shouldn't be modified");
graph.removeEdge(rm12e); // now we should be able to remove rm1 and rm2
graph.removeSingletonOrphanVertices();
Assert.assertTrue(graph.vertexSet().containsAll(kept));
Assert.assertFalse(graph.containsVertex(rm1));
Assert.assertFalse(graph.containsVertex(rm2));
}
@Test
public void testRemoveSingletonOrphanVerticesOnSingleRefNode() throws Exception {
final SeqGraph original = new SeqGraph(11);
original.addVertex(v1);
original.removeSingletonOrphanVertices();
Assert.assertTrue(original.containsVertex(v1));
Assert.assertEquals(original.vertexSet().size(), 1);
}
@Test
public void testIsRefSourceAndSink() throws Exception {
final SeqGraph g = new SeqGraph(11);
g.addVertex(v1);
Assert.assertTrue(g.isRefSource(v1));
Assert.assertTrue(g.isRefSink(v1));
Assert.assertTrue(g.isReferenceNode(v1));
g.addVertices(v2, v3, v4, v5);
g.addEdge(v1, v2);
g.addEdge(v2, v3);
final BaseEdge refEdge = new BaseEdge(true, 1);
g.addEdge(v3, v4, refEdge);
g.addEdge(v4, v5);
Assert.assertFalse(g.isRefSource(v1));
Assert.assertFalse(g.isRefSink(v1));
Assert.assertFalse(g.isReferenceNode(v1));
Assert.assertFalse(g.isRefSource(v2));
Assert.assertFalse(g.isRefSink(v2));
Assert.assertFalse(g.isReferenceNode(v2));
Assert.assertTrue(g.isRefSource(v3));
Assert.assertFalse(g.isRefSink(v3));
Assert.assertTrue(g.isReferenceNode(v3));
Assert.assertFalse(g.isRefSource(v4));
Assert.assertTrue(g.isRefSink(v4));
Assert.assertTrue(g.isReferenceNode(v4));
Assert.assertFalse(g.isRefSource(v5));
Assert.assertFalse(g.isRefSink(v5));
Assert.assertFalse(g.isReferenceNode(v5));
}
@Test
public void testRemovePathsNotConnectedToRef() throws Exception {
final SeqGraph graph = new SeqGraph(11);
SeqVertex src = new SeqVertex("A");
SeqVertex end = new SeqVertex("A");
SeqVertex g1 = new SeqVertex("C");
SeqVertex g2 = new SeqVertex("G");
SeqVertex g3 = new SeqVertex("T");
SeqVertex g4 = new SeqVertex("AA");
SeqVertex g5 = new SeqVertex("AA");
SeqVertex g6 = new SeqVertex("AA");
SeqVertex g8 = new SeqVertex("AA");
SeqVertex g7 = new SeqVertex("AA");
SeqVertex b1 = new SeqVertex("CC");
SeqVertex b2 = new SeqVertex("GG");
SeqVertex b3 = new SeqVertex("TT");
SeqVertex b4 = new SeqVertex("AAA");
SeqVertex b5 = new SeqVertex("CCC");
SeqVertex b6 = new SeqVertex("GGG");
SeqVertex b7 = new SeqVertex("AAAA");
SeqVertex b8 = new SeqVertex("GGGG");
SeqVertex b9 = new SeqVertex("CCCC");
graph.addVertices(src, end, g1, g2, g3, g4, g5, g6, g7, g8);
graph.addEdges(new BaseEdge(true, 1), src, g1, g2, g4, end);
graph.addEdges(src, g1, g5, g6, g7, end);
graph.addEdges(src, g1, g5, g8, g7, end);
graph.addEdges(src, g1, g3, end);
// the current state of the graph is the good one
final SeqGraph good = (SeqGraph)graph.clone();
// now add the bads to the graph
graph.addVertices(b1, b2, b3, b4, b5, b6, b7, b8, b9);
graph.addEdges(src, b1); // source -> b1 is dead
graph.addEdges(b6, src); // x -> source is bad
graph.addEdges(g4, b2); // off random vertex is bad
graph.addEdges(g3, b3, b4); // two vertices that don't connect to end are bad
graph.addEdges(end, b5); // vertex off end is bad
graph.addEdges(g3, b7, b8, b7); // cycle is bad
graph.addEdges(g3, b9, b9); // self-cycle is bad
final boolean debug = false;
if ( debug ) good.printGraph(new File("expected.dot"), 0);
if ( debug ) graph.printGraph(new File("bad.dot"), 0);
graph.removePathsNotConnectedToRef();
if ( debug ) graph.printGraph(new File("actual.dot"), 0);
Assert.assertTrue(BaseGraph.graphEquals(graph, good), "Failed to remove exactly the bad nodes");
}
@Test
public void testRemoveVerticesNotConnectedToRefRegardlessOfEdgeDirection() throws Exception {
final SeqGraph graph = new SeqGraph(11);
SeqVertex src = new SeqVertex("A");
SeqVertex end = new SeqVertex("A");
SeqVertex g1 = new SeqVertex("C");
SeqVertex g2 = new SeqVertex("G");
SeqVertex g3 = new SeqVertex("T");
SeqVertex g4 = new SeqVertex("AA");
SeqVertex g5 = new SeqVertex("AA");
SeqVertex g6 = new SeqVertex("AA");
SeqVertex g8 = new SeqVertex("AA");
SeqVertex g7 = new SeqVertex("AA");
SeqVertex gPrev = new SeqVertex("AA");
SeqVertex gPrev1 = new SeqVertex("AA");
SeqVertex gPrev2 = new SeqVertex("AA");
SeqVertex gAfter = new SeqVertex("AA");
SeqVertex gAfter1 = new SeqVertex("AA");
SeqVertex gAfter2 = new SeqVertex("AA");
SeqVertex b1 = new SeqVertex("CC");
SeqVertex b2 = new SeqVertex("GG");
SeqVertex b3 = new SeqVertex("TT");
SeqVertex b4 = new SeqVertex("AAA");
SeqVertex b5 = new SeqVertex("CCC");
SeqVertex b6 = new SeqVertex("GGG");
graph.addVertices(src, end, g1, g2, g3, g4, g5, g6, g7, g8, gPrev, gPrev1, gPrev2, gAfter, gAfter1, gAfter2);
graph.addEdges(new BaseEdge(true, 1), src, g1, g2, g4, end);
graph.addEdges(src, g1, g5, g6, g7, end);
graph.addEdges(src, g1, g5, g8, g7, end);
graph.addEdges(src, g1, g3, end);
// these should be kept, but are in the wrong direction
graph.addEdges(gPrev, src);
graph.addEdges(gPrev1, gPrev2, src);
graph.addEdges(end, gAfter);
graph.addEdges(end, gAfter1, gAfter2);
// the current state of the graph is the good one
final SeqGraph good = (SeqGraph)graph.clone();
// now add the bads to the graph
graph.addVertices(b1, b2, b3, b4, b5, b6);
graph.addEdges(b2, b3); // b2 -> b3
graph.addEdges(b4, b5, b4); // cycle
graph.addEdges(b6, b6); // isolated self cycle
final boolean debug = false;
if ( debug ) good.printGraph(new File("expected.dot"), 0);
if ( debug ) graph.printGraph(new File("bad.dot"), 0);
graph.removeVerticesNotConnectedToRefRegardlessOfEdgeDirection();
if ( debug ) graph.printGraph(new File("actual.dot"), 0);
Assert.assertTrue(BaseGraph.graphEquals(graph, good), "Failed to remove exactly the bad nodes");
}
@Test
public void testPrintEmptyGraph() throws Exception {
final File tmp = createTempFile("tmp", "dot");
new SeqGraph(11).printGraph(tmp, 10);
new TestGraph().printGraph(tmp, 10);
}
@Test
public void testComplexGraph() throws Exception {
final File tmp = createTempFile("tmp", "dot");
graph.printGraph(tmp, 10);
}
private void assertVertexSetEquals(final Collection<SeqVertex> actual, final SeqVertex ... expected) {
final Set<SeqVertex> actualSet = new HashSet<SeqVertex>(actual);
Assert.assertEquals(actualSet.size(), actual.size(), "Duplicate elements found in vertex list");
final Set<SeqVertex> expectedSet = expected == null ? Collections.<SeqVertex>emptySet() : new HashSet<SeqVertex>(Arrays.asList(expected));
Assert.assertEquals(actualSet, expectedSet);
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/EventBlockFinder.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import com.google.java.contract.Requires;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs.MultiSampleEdge;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading.HaplotypeGraph;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading.MultiDeBruijnVertex;
import org.broadinstitute.gatk.utils.collections.CountSet;
import org.broadinstitute.gatk.utils.collections.CountSet;
import org.broadinstitute.gatk.utils.collections.Pair;
import java.util.*;
/**
* Encapsulates the graph traversals needed to find event-blocks.
*
* @author <NAME> <<EMAIL>>
*/
public class EventBlockFinder {
private final HaplotypeGraph graph;
private final Map<Pair<MultiDeBruijnVertex,MultiDeBruijnVertex>,EventBlock> eventBlockCache;
/**
* Constructs a new engine.
*
* @param graph the base haplotype graph to iterate over.
*/
public EventBlockFinder(final HaplotypeGraph graph) {
if (graph == null) throw new NullPointerException();
this.graph = graph;
eventBlockCache = new HashMap<>(20);
}
/**
* Create a new traversal object based on a read anchoring.
* @param anchoring
* @return never {@code null}.
*/
public Traversal traversal(final ReadAnchoring anchoring) {
if (anchoring == null) throw new NullPointerException();
return new Traversal(anchoring);
}
public class Traversal implements Iterable<EventBlock> {
private final ReadAnchoring anchoring;
private EventBlock lastEventBlock;
private Traversal(final ReadAnchoring anchoring) {
this.anchoring = anchoring;
lastEventBlock = findLastEventBlock(anchoring);
}
@Override
public java.util.Iterator<EventBlock> iterator() {
return lastEventBlock == null ? Collections.EMPTY_SET.iterator() : new Iterator();
}
private class Iterator implements java.util.Iterator<EventBlock> {
private MultiDeBruijnVertex currentVertex;
private Iterator() {
currentVertex = anchoring.leftAnchorVertex;
}
@Override
public boolean hasNext() {
return currentVertex != null;
}
@Override
public EventBlock next() {
final EventBlock result;
if (currentVertex == null)
throw new NoSuchElementException("going beyond last event block");
else if (currentVertex == lastEventBlock.getSource()) {
result = lastEventBlock;
currentVertex = null;
} else {
final EventBlock candidate = findEventBlock(anchoring,false,currentVertex,lastEventBlock.getSource());
if (candidate == null) {
result = findEventBlock(anchoring,false,currentVertex,anchoring.rightAnchorVertex);
currentVertex = null;
} else {
result = candidate;
currentVertex = candidate.getSink();
}
}
return result;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
/**
* Finds the last event block.
* <p>
* It can do it forward or backwards.
* </p>
*
* @param anchoring target read anchoring information.
* @return {@code null} if there is no event block, depending on {@code backwards} before or after current
*/
private EventBlock findLastEventBlock(
final ReadAnchoring anchoring) {
return findEventBlock(anchoring,true,anchoring.leftAnchorVertex,anchoring.rightAnchorVertex);
}
/**
* Finds an event block forward or backwards along the reference route.
* @param anchoring the read anchoring information.
* @param backwards true if the block should be constructed from right to left.
* @param leftVertex the left vertex
* @param rightVertex the right vertex
* @return {@code null} if there is no such a event block between these coordinates.
*/
private EventBlock findEventBlock(
final ReadAnchoring anchoring, final boolean backwards,
final MultiDeBruijnVertex leftVertex, final MultiDeBruijnVertex rightVertex) {
MultiDeBruijnVertex currentVertex = backwards ? rightVertex : leftVertex;
boolean foundEvent = false;
final CountSet pathSizes = new CountSet(10); // typically more than enough.
pathSizes.setTo(0);
// Map between reference vertices where there is some expected open alternative path rejoining and the
// predicted length of paths rejoining at that point counting from the beginning of the block.
final Map<MultiDeBruijnVertex, CountSet> expectedAlternativePathRejoins = new HashMap<>(4);
// Keeps record of possible left-clipping veritces; those that are located before any event path furcation
// has been found. The value indicates the blockLength at the time we traverse that node.
final Deque<Pair<MultiDeBruijnVertex, Integer>> possibleClippingPoints = new LinkedList<>();
// We keep the distance from the beggining of the block (leftVertex).
int blockLength = 0;
while (currentVertex != null) {
int openingDegree = backwards ? graph.outDegreeOf(currentVertex) : graph.inDegreeOf(currentVertex);
if (openingDegree > 1) {
final CountSet joiningPathLengths = expectedAlternativePathRejoins.remove(currentVertex);
if (joiningPathLengths != null)
pathSizes.addAll(joiningPathLengths);
}
final boolean isValidBlockEnd = isValidBlockEnd(anchoring, currentVertex, expectedAlternativePathRejoins);
if (foundEvent && isValidBlockEnd) // !gotcha we found a valid block end.
break;
else if (!foundEvent && isValidBlockEnd) // if no event has been found yet, still is a good clipping point.
possibleClippingPoints.addLast(new Pair<>(currentVertex, blockLength));
// We reached the end:
if (currentVertex == (backwards ? leftVertex : rightVertex))
break;
// process next vertices, the next one on the reference and also possible start of alternative paths,
// updates traversal structures accordingly.
currentVertex = advanceOnReferencePath(anchoring, backwards, currentVertex, pathSizes, expectedAlternativePathRejoins);
foundEvent |= expectedAlternativePathRejoins.size() > 0;
pathSizes.incAll(1);
blockLength++;
}
// we have not found an event, thus there is no block to report:
if (!foundEvent)
return null;
// We try to clip off as much as we can from the beginning of the block before any event, but at least
// leaving enough block length to meet the shortest path unless all paths have the same size (SNPs only)
final int maxClipping = pathSizes.size() <= 1 ? blockLength : pathSizes.min();
MultiDeBruijnVertex clippingEnd = backwards ? anchoring.rightAnchorVertex : anchoring.leftAnchorVertex;
while (!possibleClippingPoints.isEmpty()) {
final Pair<MultiDeBruijnVertex, Integer> candidate = possibleClippingPoints.removeLast();
if (candidate.getSecond() <= maxClipping) {
clippingEnd = candidate.getFirst();
break;
}
}
return resolveEventBlock(backwards ? new Pair<>(currentVertex, clippingEnd) : new Pair<>(clippingEnd, currentVertex));
}
/**
* Gets or constructs a event-block through the cache.
* @param borders the source and sink vertex pair for the requested event block.
* @return never {@code null}
*/
@Requires("borders != null && border.getFirst() != null && border.getSecond() != null")
private EventBlock resolveEventBlock(final Pair<MultiDeBruijnVertex,MultiDeBruijnVertex> borders) {
EventBlock result = eventBlockCache.get(borders);
if (result == null)
eventBlockCache.put(borders,result = new EventBlock(graph, borders.getFirst(),borders.getSecond()));
return result;
}
/**
* Move on vertex along the reference path checking for the presence of new opening alternative paths.
*
* @param anchoring anchoring information on the targeted read.
* @param backwards whether we are extending the block backwards or forwards.
* @param currentVertex the current vertex.
* @param pathSizes current block path sizes.
* @param expectedAlternativePathRejoins information about location of vertices along the reference path where open alternative paths will rejoin.
* @return the next current-vertex, never {@code null} unless there is a bug.
*/
private MultiDeBruijnVertex advanceOnReferencePath(final ReadAnchoring anchoring, final boolean backwards, final MultiDeBruijnVertex currentVertex, final CountSet pathSizes, final Map<MultiDeBruijnVertex, CountSet> expectedAlternativePathRejoins) {
final Set<MultiSampleEdge> nextEdges = backwards ? graph.incomingEdgesOf(currentVertex) : graph.outgoingEdgesOf(currentVertex);
MultiDeBruijnVertex nextReferenceVertex = null;
for (final MultiSampleEdge e : nextEdges) {
final MultiDeBruijnVertex nextVertex = backwards ? graph.getEdgeSource(e) : graph.getEdgeTarget(e);
if (e.isRef())
nextReferenceVertex = nextVertex;
else {
final CountSet pathSizesPlusOne = pathSizes.clone();
pathSizesPlusOne.incAll(1);
graph.calculateRejoins(nextVertex, expectedAlternativePathRejoins, anchoring.referenceWithinAnchorsMap.keySet(), pathSizesPlusOne, true, backwards);
}
}
return nextReferenceVertex;
}
/**
* Check whether the current vertex is a valid block end.
*
* @param anchoring reads anchoring information necessary to make the evaluation.
* @param currentVertex target potential block end
* @param expectedAlternativePathRejoins traversal states regarding open alternative paths.
*
* @return {@code true} iff so.
*/
private boolean isValidBlockEnd(final ReadAnchoring anchoring, final MultiDeBruijnVertex currentVertex, final Map<MultiDeBruijnVertex, CountSet> expectedAlternativePathRejoins) {
final boolean isUniqueKmer = anchoring.uniqueKmerOffsets.containsKey(currentVertex);
final boolean isAnchorable = graph.getAnchorableVertices().contains(currentVertex) && isUniqueKmer && expectedAlternativePathRejoins.size() == 0;
return isUniqueKmer && isAnchorable;
}
}
<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/afcalc/ExactAFCalculator.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper.afcalc;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import java.util.*;
/**
* Uses the Exact calculation of Heng Li
*/
abstract class ExactAFCalculator extends AFCalculator {
protected static final int HOM_REF_INDEX = 0; // AA likelihoods are always first
// useful so that we don't keep printing out the same warning messages
protected static boolean printedMaxAltAllelesWarning = false;
/**
* Sorts {@link ExactAFCalculator.LikelihoodSum} instances where those with higher likelihood are first.
*/
protected static final Comparator<LikelihoodSum> LIKELIHOOD_SUM_COMPARATOR = new Comparator<LikelihoodSum>() {
@Override
public int compare(final LikelihoodSum o1, final LikelihoodSum o2) {
return - Double.compare(o1.sum,o2.sum);
}
};
/**
* Sorts {@link ExactAFCalculator.LikelihoodSum} instances where those with higher likelihood are first but make sure that
* NON_REF alleles are place are last.
*/
protected static final Comparator<LikelihoodSum> LIKELIHOOD_NON_REF_THEN_SUM_COMPARATOR = new Comparator<LikelihoodSum>() {
@Override
public int compare(final LikelihoodSum o1, final LikelihoodSum o2) {
if (o1.allele == GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE)
return 1;
else if (o2.allele == GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE)
return -1;
else
return o1.compareTo(o2);
}
};
/**
* Sorts {@link ExactAFCalculator.LikelihoodSum} instances where those with lower alternative allele index are first regardless of
* the likelihood sum.
*/
protected static final Comparator<LikelihoodSum> LIKELIHOOD_INDEX_COMPARATOR = new Comparator<LikelihoodSum>() {
@Override
public int compare(final LikelihoodSum o1, final LikelihoodSum o2) {
return Integer.compare(o1.index, o2.index);
}
};
protected ExactAFCalculator() {
}
/**
* Wrapper class that compares two likelihoods associated with two alleles
*/
protected static final class LikelihoodSum implements Comparable<LikelihoodSum> {
public double sum = 0.0;
public final Allele allele;
public final int index;
public LikelihoodSum(final Allele allele, final int index) { this.allele = allele; this.index = index; }
public int compareTo(LikelihoodSum other) {
final double diff = sum - other.sum;
return ( diff < 0.0 ) ? 1 : (diff > 0.0 ) ? -1 : 0;
}
}
/**
* Unpack GenotypesContext into arraylist of double values
* @param GLs Input genotype context
* @return ArrayList of doubles corresponding to GL vectors
*/
protected static ArrayList<double[]> getGLs(final GenotypesContext GLs, final boolean includeDummy) {
return getGLs(GLs, includeDummy, false);
}
/**
* Unpack GenotypesContext into arraylist of double values
* @param GLs Input genotype context
* @param keepUninformative Don't filter out uninformative genotype likelihoods (i.e. all log likelihoods near 0)
* This is useful for VariantContexts with a NON_REF allele
* @return ArrayList of doubles corresponding to GL vectors
*/
protected static ArrayList<double[]> getGLs(final GenotypesContext GLs, final boolean includeDummy, final boolean keepUninformative) {
final ArrayList<double[]> genotypeLikelihoods = new ArrayList<>(GLs.size() + 1);
if ( includeDummy ) genotypeLikelihoods.add(new double[]{0.0,0.0,0.0}); // dummy
for ( Genotype sample : GLs.iterateInSampleNameOrder() ) {
if ( sample.hasLikelihoods() ) {
final double[] gls = sample.getLikelihoods().getAsVector();
if ( MathUtils.sum(gls) < GATKVariantContextUtils.SUM_GL_THRESH_NOCALL || keepUninformative )
genotypeLikelihoods.add(gls);
}
}
return genotypeLikelihoods;
}
@Override
protected VariantContext reduceScope(final VariantContext vc, final int defaultPloidy, final int maximumAlternativeAlleles) {
// don't try to genotype too many alternate alleles
final List<Allele> inputAltAlleles = vc.getAlternateAlleles();
final List<Allele> outputAltAlleles = reduceScopeAlleles(vc, defaultPloidy, maximumAlternativeAlleles);
// only if output allele has reduced from the input alt allele set size we should care.
final int altAlleleReduction = inputAltAlleles.size() - outputAltAlleles.size();
if (altAlleleReduction == 0)
return vc;
final String message = String.format("This tool is currently set to genotype at most %d " +
"alternate alleles in a given context, but the context at %s: %d has %d " +
"alternate alleles so only the top alleles will be used; see the --max_alternate_alleles argument",
maximumAlternativeAlleles, vc.getContig(), vc.getStart(), vc.getAlternateAlleles().size());
if ( !printedMaxAltAllelesWarning ) {
printedMaxAltAllelesWarning = true;
logger.warn(message + ". Unless the DEBUG logging level is used, this warning message is output just once per run and further warnings are suppressed.");
} else {
logger.debug(message);
}
final List<Allele> alleles = new ArrayList<>(maximumAlternativeAlleles + 1);
alleles.add(vc.getReference());
alleles.addAll(reduceScopeAlleles(vc, defaultPloidy, maximumAlternativeAlleles));
final VariantContextBuilder builder = new VariantContextBuilder(vc);
builder.alleles(alleles);
builder.genotypes(reduceScopeGenotypes(vc, defaultPloidy, alleles));
if (altAlleleReduction < 0)
throw new IllegalStateException("unexpected: reduction increased the number of alt. alleles!: " + - altAlleleReduction + " " + vc + " " + builder.make());
return builder.make();
}
/**
* Returns a the new set of alleles to use.
* @param vc target variant context.
* @param numAllelesToChoose number of alleles to keep.
* @return the list of alternative allele to keep.
*/
protected List<Allele> reduceScopeAlleles(final VariantContext vc, final int defaultPloidy, final int numAllelesToChoose) {
// Look for the <NON_REF> allele to exclude it from the pruning if present.
final int numOriginalAltAlleles = vc.getAlternateAlleles().size();
final int nonRefAltAlleleIndex = GATKVariantContextUtils.indexOfAltAllele(vc,
GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE, false);
final boolean nonRefAltAllelePresent = nonRefAltAlleleIndex >= 0;
// <NON_REF> should not be considered in the downsizing, so we need to count it out when
// considering if alt. allele downsizing is required.
final int numProperOriginalAltAlleles = numOriginalAltAlleles - (nonRefAltAllelePresent ? 1 : 0);
// Avoid pointless allele reduction:
if (numAllelesToChoose >= numProperOriginalAltAlleles)
return vc.getAlternateAlleles();
final LikelihoodSum[] likelihoodSums = new LikelihoodSum[numOriginalAltAlleles];
for ( int i = 0; i < numOriginalAltAlleles; i++ ) {
final Allele allele = vc.getAlternateAllele(i);
likelihoodSums[i] = new LikelihoodSum(allele,i);
}
// Calculate the allele likelihood sums.
reduceScopeCalculateLikelihoodSums(vc, defaultPloidy, likelihoodSums);
// sort them by probability mass and choose the best ones
// Make sure that the <NON_REF> allele is last if present.
Collections.sort(Arrays.asList(likelihoodSums), nonRefAltAllelePresent ? LIKELIHOOD_NON_REF_THEN_SUM_COMPARATOR : LIKELIHOOD_SUM_COMPARATOR);
// We need to return the best likelihood alleles in the original alternative allele index order.
// This heap will keep track of that index order.
final PriorityQueue<LikelihoodSum> mostLikelyAllelesHeapByIndex = new PriorityQueue<>(numOriginalAltAlleles, LIKELIHOOD_INDEX_COMPARATOR);
for ( int i = 0; i < numAllelesToChoose; i++ )
mostLikelyAllelesHeapByIndex.add(likelihoodSums[i]);
// guaranteed no to have been added at this point thanks for checking on whether reduction was
// needed in the first place.
if (nonRefAltAllelePresent)
mostLikelyAllelesHeapByIndex.add(likelihoodSums[nonRefAltAlleleIndex]);
final ArrayList<Allele> orderedBestAlleles = new ArrayList<>(numAllelesToChoose);
while (!mostLikelyAllelesHeapByIndex.isEmpty())
orderedBestAlleles.add(mostLikelyAllelesHeapByIndex.remove().allele);
return orderedBestAlleles;
}
protected static final int PL_INDEX_OF_HOM_REF = 0;
/**
* Update the likelihood sums with using the variant context genotype likelihoods.
* @param vc source variant context.
* @param likelihoodSums where to update the likelihood sums.
*/
protected abstract void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums);
/**
* Transforms the genotypes of the variant context according to the new subset of possible alleles.
*
* @param vc original variant-context.
* @param allelesToUse possible alleles.
* @return never {@code null}, the new set of genotype calls for the reduced scope.
*/
protected abstract GenotypesContext reduceScopeGenotypes(final VariantContext vc, final int defaultPloidy, final List<Allele> allelesToUse);
}<file_sep>/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.genotyper;
import org.apache.log4j.Logger;
import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
import org.broadinstitute.gatk.utils.contexts.AlignmentContextUtils;
import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
import org.broadinstitute.gatk.utils.BaseUtils;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.utils.MathUtils;
import org.broadinstitute.gatk.utils.baq.BAQ;
import org.broadinstitute.gatk.utils.exceptions.UserException;
import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.gatk.utils.genotyper.DiploidGenotype;
import org.broadinstitute.gatk.utils.gga.GenotypingGivenAllelesUtils;
import org.broadinstitute.gatk.utils.pileup.PileupElement;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
import java.util.*;
public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsCalculationModel {
private final boolean useAlleleFromVCF;
private final double[] likelihoodSums = new double[4];
private final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap;
protected SNPGenotypeLikelihoodsCalculationModel(UnifiedArgumentCollection UAC, Logger logger) {
super(UAC, logger);
useAlleleFromVCF = UAC.genotypingOutputMode == GenotypingOutputMode.GENOTYPE_GIVEN_ALLELES;
perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap();
}
public VariantContext getLikelihoods(final RefMetaDataTracker tracker,
final ReferenceContext ref,
final Map<String, AlignmentContext> contexts,
final AlignmentContextUtils.ReadOrientation contextType,
final List<Allele> allAllelesToUse,
final boolean useBAQedPileup,
final GenomeLocParser locParser,
final Map<String, PerReadAlleleLikelihoodMap> sampleLikelihoodMap) {
sampleLikelihoodMap.clear(); // not used in SNP model, sanity check to delete any older data
final byte refBase = ref.getBase();
final int indexOfRefBase = BaseUtils.simpleBaseToBaseIndex(refBase);
// handle non-standard reference bases
if ( indexOfRefBase == -1 )
return null;
final Allele refAllele = Allele.create(refBase, true);
// calculate the GLs
ArrayList<SampleGenotypeData> GLs = new ArrayList<SampleGenotypeData>(contexts.size());
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) {
// Down-sample with bias according to the contamination level (global or per file)
ReadBackedPileup pileup = AlignmentContextUtils.stratify(sample.getValue(), contextType).getBasePileup();
final Double contamination = UAC.getSampleContamination().get(sample.getKey());
if( contamination > 0.0 ) //no need to enter if no contamination reduction
pileup = perReadAlleleLikelihoodMap.createPerAlleleDownsampledBasePileup(pileup, contamination);
if ( useBAQedPileup )
pileup = createBAQedPileup(pileup);
// create the GenotypeLikelihoods object
final DiploidSNPGenotypeLikelihoods GL = new DiploidSNPGenotypeLikelihoods(UAC.PCR_error);
final int nGoodBases = GL.add(pileup, true, true, UAC.MIN_BASE_QUALTY_SCORE);
if ( nGoodBases > 0 )
GLs.add(new SampleGenotypeData(sample.getKey(), GL, getFilteredDepth(pileup)));
}
// start making the VariantContext
final GenomeLoc loc = ref.getLocus();
final List<Allele> alleles = new ArrayList<Allele>();
alleles.add(refAllele);
final VariantContextBuilder builder = new VariantContextBuilder("UG_call", loc.getContig(), loc.getStart(), loc.getStop(), alleles);
// find the alternate allele(s) that we should be using
if ( allAllelesToUse != null ) {
alleles.addAll(allAllelesToUse.subList(1,allAllelesToUse.size())); // this includes ref allele
} else if ( useAlleleFromVCF ) {
final VariantContext vc = GenotypingGivenAllelesUtils.composeGivenAllelesVariantContextFromRod(tracker, ref.getLocus(), true, logger, UAC.alleles);
// ignore places where we don't have a SNP
if ( vc == null || !vc.isSNP() )
return null;
// make sure a user isn't passing the REF base in as an ALT
if ( vc.hasAlternateAllele(refAllele, true) )
throw new UserException.BadInput("Alternate allele '" + (char)refBase + "' passed in is the same as the reference at location " + vc.getChr() + ":" + vc.getStart());
alleles.addAll(vc.getAlternateAlleles());
} else {
alleles.addAll(determineAlternateAlleles(refBase, GLs));
// if there are no non-ref alleles...
if ( alleles.size() == 1 ) {
// if we only want variants, then we don't need to calculate genotype likelihoods
if ( UAC.outputMode == OutputMode.EMIT_VARIANTS_ONLY )
return builder.make();
else
// otherwise, choose any alternate allele (it doesn't really matter)
alleles.add(Allele.create(BaseUtils.baseIndexToSimpleBase(indexOfRefBase == 0 ? 1 : 0)));
}
}
// create the alternate alleles and the allele ordering (the ordering is crucial for the GLs)
final int numAlleles = alleles.size();
final int numAltAlleles = numAlleles - 1;
final int[] alleleOrdering = new int[numAlleles];
int alleleOrderingIndex = 0;
int numLikelihoods = 0;
for ( Allele allele : alleles ) {
alleleOrdering[alleleOrderingIndex++] = BaseUtils.simpleBaseToBaseIndex(allele.getBases()[0]);
numLikelihoods += alleleOrderingIndex;
}
builder.alleles(alleles);
// create the PL ordering to use based on the allele ordering.
final int[] PLordering = new int[numLikelihoods];
for ( int i = 0; i <= numAltAlleles; i++ ) {
for ( int j = i; j <= numAltAlleles; j++ ) {
// As per the VCF spec: "the ordering of genotypes for the likelihoods is given by: F(j/k) = (k*(k+1)/2)+j.
// In other words, for biallelic sites the ordering is: AA,AB,BB; for triallelic sites the ordering is: AA,AB,BB,AC,BC,CC, etc."
PLordering[(j * (j+1) / 2) + i] = DiploidGenotype.createDiploidGenotype(alleleOrdering[i], alleleOrdering[j]).ordinal();
}
}
// create the genotypes; no-call everyone for now
final GenotypesContext genotypes = GenotypesContext.create();
final int ploidy = UAC.genotypeArgs.samplePloidy;
final List<Allele> noCall = GATKVariantContextUtils.noCallAlleles(ploidy);
for ( SampleGenotypeData sampleData : GLs ) {
final double[] allLikelihoods = sampleData.GL.getLikelihoods();
final double[] myLikelihoods = new double[numLikelihoods];
for ( int i = 0; i < numLikelihoods; i++ )
myLikelihoods[i] = allLikelihoods[PLordering[i]];
// normalize in log space so that max element is zero.
final GenotypeBuilder gb = new GenotypeBuilder(sampleData.name);
final double[] genotypeLikelihoods = MathUtils.normalizeFromLog10(myLikelihoods, false, true);
gb.PL(genotypeLikelihoods);
gb.DP(sampleData.depth);
gb.alleles(noCall);
if (UAC.annotateAllSitesWithPLs)
gb.attribute(GATKVCFConstants.PL_FOR_ALL_SNP_ALLELES_KEY,GenotypeLikelihoods.fromLog10Likelihoods(MathUtils.normalizeFromLog10(allLikelihoods, false, true)));
genotypes.add(gb.make());
}
return builder.genotypes(genotypes).make();
}
// determines the alleles to use
protected List<Allele> determineAlternateAlleles(final byte ref, final List<SampleGenotypeData> sampleDataList) {
final int baseIndexOfRef = BaseUtils.simpleBaseToBaseIndex(ref);
final int PLindexOfRef = DiploidGenotype.createDiploidGenotype(ref, ref).ordinal();
for ( int i = 0; i < 4; i++ )
likelihoodSums[i] = 0.0;
// based on the GLs, find the alternate alleles with enough probability
for ( SampleGenotypeData sampleData : sampleDataList ) {
final double[] likelihoods = sampleData.GL.getLikelihoods();
final int PLindexOfBestGL = MathUtils.maxElementIndex(likelihoods);
if ( PLindexOfBestGL != PLindexOfRef ) {
GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindexOfBestGL);
if ( alleles.alleleIndex1 != baseIndexOfRef )
likelihoodSums[alleles.alleleIndex1] += likelihoods[PLindexOfBestGL] - likelihoods[PLindexOfRef];
// don't double-count it
if ( alleles.alleleIndex2 != baseIndexOfRef && alleles.alleleIndex2 != alleles.alleleIndex1 )
likelihoodSums[alleles.alleleIndex2] += likelihoods[PLindexOfBestGL] - likelihoods[PLindexOfRef];
}
}
final List<Allele> allelesToUse = new ArrayList<Allele>(3);
for ( int i = 0; i < 4; i++ ) {
if ( likelihoodSums[i] > 0.0 )
allelesToUse.add(Allele.create(BaseUtils.baseIndexToSimpleBase(i), false));
}
return allelesToUse;
}
public ReadBackedPileup createBAQedPileup( final ReadBackedPileup pileup ) {
final List<PileupElement> BAQedElements = new ArrayList<PileupElement>();
for( final PileupElement PE : pileup ) {
final PileupElement newPE = new BAQedPileupElement( PE );
BAQedElements.add( newPE );
}
return new ReadBackedPileupImpl( pileup.getLocation(), BAQedElements );
}
public static class BAQedPileupElement extends PileupElement {
public BAQedPileupElement( final PileupElement PE ) {
super(PE);
}
@Override
public byte getQual() {
if ( isDeletion() )
return super.getQual();
else
return BAQ.calcBAQFromTag(getRead(), offset, true);
}
}
private static class SampleGenotypeData {
public final String name;
public final DiploidSNPGenotypeLikelihoods GL;
public final int depth;
public SampleGenotypeData(final String name, final DiploidSNPGenotypeLikelihoods GL, final int depth) {
this.name = name;
this.GL = GL;
this.depth = depth;
}
}
}
<file_sep>/src/test/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/AssemblyResultSetUnitTest.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.tools.walkers.haplotypecaller;
import htsjdk.samtools.SAMFileHeader;
import org.broadinstitute.gatk.utils.BaseTest;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs.SeqGraph;
import org.broadinstitute.gatk.tools.walkers.haplotypecaller.readthreading.ReadThreadingGraph;
import org.broadinstitute.gatk.utils.GenomeLoc;
import org.broadinstitute.gatk.utils.GenomeLocParser;
import org.broadinstitute.gatk.utils.RandomDNA;
import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
import org.broadinstitute.gatk.utils.haplotype.Haplotype;
import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
/**
* Tests for {@link AssemblyResultSet}.
*
* @author <NAME> <<EMAIL>>
*/
public class AssemblyResultSetUnitTest extends BaseTest
{
private GenomeLocParser genomeLocParser;
private SAMFileHeader header;
@BeforeClass
public void init() {
header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
}
@Test
public void testEmptyResultSet() {
final AssemblyResultSet subject = new AssemblyResultSet();
Assert.assertEquals(subject.getHaplotypeList().size(), 0);
Assert.assertEquals(subject.getHaplotypeCount(),0);
Assert.assertEquals(subject.getReferenceHaplotype(),null);
Assert.assertEquals(subject.getFullReferenceWithPadding(),null);
Assert.assertEquals(subject.getPaddedReferenceLoc(),null);
Assert.assertEquals(subject.getRegionForGenotyping(),null);
Assert.assertEquals(subject.getUniqueReadThreadingGraph(10),null);
Assert.assertFalse(subject.hasMultipleKmerSizes());
}
@Test
public void testAddReferenceHaplotype() {
final Haplotype ref = new Haplotype("ACGT".getBytes(),true);
ref.setGenomeLocation(genomeLocParser.createGenomeLoc("chr1",1,ref.length() + 1 ));
final AssemblyResultSet subject = new AssemblyResultSet();
Assert.assertTrue(subject.add(ref));
Assert.assertFalse(subject.add(ref));
Assert.assertEquals(subject.getReferenceHaplotype(),ref);
Assert.assertEquals(subject.getHaplotypeCount(),1);
Assert.assertEquals(subject.getHaplotypeList().size(),1);
}
@Test(dataProvider="assemblyResults")
public void testAddManyHaplotypes(final java.util.List<AssemblyResult> assemblyResults,
final java.util.List<java.util.List<Haplotype>> haplotypes) {
final AssemblyResultSet subject = new AssemblyResultSet();
for (int i = 0; i < haplotypes.size(); i++) {
final int haplotypeCountBefore = subject.getHaplotypeCount();
final java.util.List<Haplotype> haplos = haplotypes.get(i);
final AssemblyResult ar = assemblyResults.get(i);
for (final Haplotype h : haplos) {
Assert.assertTrue(subject.add(h, ar));
Assert.assertFalse(subject.add(h,ar));
if (h.isReference())
Assert.assertEquals(subject.getReferenceHaplotype(),h);
}
final int haplotypeCountAfter = subject.getHaplotypeCount();
Assert.assertEquals(haplos.size(),haplotypeCountAfter - haplotypeCountBefore);
Assert.assertTrue(subject.getMaximumKmerSize() >= ar.getKmerSize());
Assert.assertTrue(subject.getMinimumKmerSize() <= ar.getKmerSize());
Assert.assertEquals(subject.getUniqueReadThreadingGraph(ar.getKmerSize()), ar.getThreadingGraph());
}
}
@Test(dataProvider="trimmingData")
public void testTrimTo(final Map<Haplotype,AssemblyResult> haplotypesAndResultSets, final ActiveRegion original) {
final AssemblyResultSet subject = new AssemblyResultSet();
for (final Map.Entry<Haplotype,AssemblyResult> entry : haplotypesAndResultSets.entrySet())
subject.add(entry.getKey(),entry.getValue());
subject.setRegionForGenotyping(original);
final GenomeLoc originalLocation = original.getExtendedLoc();
final int length = originalLocation.size();
final GenomeLoc newLocation = originalLocation.setStop(originalLocation.setStart(originalLocation,originalLocation.getStart() + length / 2),originalLocation.getStop() - length / 2);
final ActiveRegion newRegion = original.trim(newLocation);
final Map<Haplotype,Haplotype> originalHaplotypesByTrimmed = new HashMap<>(haplotypesAndResultSets.size());
for (final Haplotype h : haplotypesAndResultSets.keySet())
originalHaplotypesByTrimmed.put(h.trim(newRegion.getExtendedLoc()), h);
final AssemblyResultSet trimmed = subject.trimTo(newRegion);
Assert.assertFalse(subject.wasTrimmed());
Assert.assertTrue(trimmed.wasTrimmed());
for (final Haplotype h : trimmed.getHaplotypeList()) {
Assert.assertEquals(h.getGenomeLocation(),newLocation);
Assert.assertEquals(h.getBases().length,newLocation.size());
}
}
@DataProvider(name="trimmingData")
public Iterator<Object[]> trimmingData() {
final ActiveRegion activeRegion = new ActiveRegion(genomeLocParser.createGenomeLoc("chr1",1000,1100),genomeLocParser,25);
final int length = activeRegion.getExtendedLoc().size();
final RandomDNA rnd = new RandomDNA(13); // keep it prepoducible by fixing the seed to lucky 13.
final ActiveRegionTestDataSet actd = new ActiveRegionTestDataSet(10,new String(rnd.nextBases(length)),new String[] {
"Civar:*1T*" }, new String[0], new byte[0], new byte[0], new byte[0]);
final List<Haplotype> haplotypes = actd.haplotypeList();
for (final Haplotype h : haplotypes)
h.setGenomeLocation(activeRegion.getExtendedLoc());
final ReadThreadingGraph rtg = new ReadThreadingGraph(10);
for (final Haplotype h : haplotypes)
rtg.addSequence("seq-" + Math.abs(h.hashCode()), h.getBases(), h.isReference());
final SeqGraph seqGraph = rtg.convertToSequenceGraph();
final AssemblyResult ar = new AssemblyResult(AssemblyResult.Status.ASSEMBLED_SOME_VARIATION,seqGraph);
ar.setThreadingGraph(rtg);
final Map<Haplotype,AssemblyResult> result =
new HashMap<>();
for (final Haplotype h : haplotypes)
result.put(h,ar);
return Collections.singleton(new Object[] {result,activeRegion}).iterator();
}
@DataProvider(name="assemblyResults")
public java.util.Iterator<Object[]> assemblyResults() {
final int size = THREE_KS_GRAPH_AND_HAPLOTYPES.length * (1 + TEN_KS_GRAPH_AND_HAPLOTYPES.length);
final Object[][] result = new Object[size][];
for (int i = 0; i < THREE_KS_GRAPH_AND_HAPLOTYPES.length; i++) {
final ReadThreadingGraph rtg = new ReadThreadingGraph((String) THREE_KS_GRAPH_AND_HAPLOTYPES[i][0]);
final AssemblyResult ar = new AssemblyResult(AssemblyResult.Status.ASSEMBLED_SOME_VARIATION,rtg.convertToSequenceGraph());
ar.setThreadingGraph(rtg);
final Object[] haplotypeStrings = (Object[]) THREE_KS_GRAPH_AND_HAPLOTYPES[i][1];
final Haplotype[] haplotypes = new Haplotype[haplotypeStrings.length];
for (int j = 0; j < haplotypeStrings.length; j++) {
haplotypes[j] = new Haplotype(((String)haplotypeStrings[j]).getBytes(),j == 0);
haplotypes[j].setGenomeLocation(genomeLocParser.createGenomeLoc("chr1",1,haplotypes[j].length() + 1));
}
result[i] = new Object[] { Collections.singletonList(ar),Arrays.asList(Arrays.asList(haplotypes))};
for (int j = 0; j < TEN_KS_GRAPH_AND_HAPLOTYPES.length; j++) {
final ReadThreadingGraph rtg10 = new ReadThreadingGraph((String) TEN_KS_GRAPH_AND_HAPLOTYPES[j][0]);
final AssemblyResult ar10 = new AssemblyResult(AssemblyResult.Status.ASSEMBLED_SOME_VARIATION,rtg10.convertToSequenceGraph());
ar10.setThreadingGraph(rtg10);
final Object[] haplotypeStrings10 = (Object[]) TEN_KS_GRAPH_AND_HAPLOTYPES[j][1];
final Haplotype[] haplotype10 = new Haplotype[haplotypeStrings10.length];
for (int k = 0; k < haplotypeStrings10.length; k++) {
haplotype10[k] = new Haplotype(((String)haplotypeStrings10[k]).getBytes(),false);
haplotype10[k].setGenomeLocation(genomeLocParser.createGenomeLoc("chr1", 1, haplotype10[k].length() + 1));
}
result[THREE_KS_GRAPH_AND_HAPLOTYPES.length + i * TEN_KS_GRAPH_AND_HAPLOTYPES.length + j] = new Object[] { Arrays.asList(ar,ar10),
Arrays.asList( Arrays.asList(haplotypes), Arrays.asList(haplotype10)) };
}
}
return Arrays.asList(result).iterator();
}
private static final Object[][] THREE_KS_GRAPH_AND_HAPLOTYPES = new Object[][] {
{"[ks=3]{REF: ACT}",new Object[] {"ACT"}},
{"[ks=3]{REF: ACT(3) -> T(1) -> G(2) -> A}" +
"{ (3) -> A -> G -> (2) }" +
"{ (1) -> A -> G -> (2) }",new Object[] {"ACTTGA","ACTAGGA","ACTTAGGA"}},
{"[ks=3]{REF: ACT -> C(1) -> G}{ACT -> C(1) -> G}{ACT -> C(1) -> G}", new Object[] {"ACTCG"}} ,
{"[ks=3]{REF: ACT -> A(1) -> G -> A(2) -> C -> G -> T }" +
"{A(1) -> T -> A(2) }", new Object[] {"ACTAGACGT","ACTATACGT"}} ,
{"[ks=3]{REF: ACT -> A -> T(2) -> C -> A -> G -> T -> A -> C -> G -> T -> A(1) -> T}" +
"{ ACT -> A -> T(2) -> C -> T -> A -> C -> G -> T -> A(1) -> T}",
new Object[] {"ACTATCAGTACGTAT","ACTATCTACGTAT"}} ,
{"[ks=3]{REF: ACT -> A -> T -> C -> A -> G -> T -> A -> C -> G -> T -> A -> T}",
new Object[] {"ACTATCAGTACGTAT"}},
{"[ks=3]{REF: ACT -> A -> T(1) }" +
"{ ACT -> A -> T(1) }", new Object[] {"ACTAT"}},
{"[ks=3]{REF: TTT -> A(1) -> C -> T(2)}{ A(1) -> T(2) } ", new Object[] {"TTTACT","TTTAT"}}
};
private static final Object[][] TEN_KS_GRAPH_AND_HAPLOTYPES = new Object[][] {
{"[ks=10]{ACTAGTAAAT -> A -> T -> A -> A -> T -> A", new Object[] {"ACTAGTAAATATAATA"}},
{"[ks=10]{ATAGTAATAA(1) -> A -> C -> T -> A(2) -> C}{ (1) -> C -> C -> C -> A(2) -> C}",
new Object[] {"ATAGTAATAAACTAC","ATAGTAATAACCCAC"}},
};
}
<file_sep>/src/main/java/org/broadinstitute/gatk/utils/variant/TandemRepeatFinder.java
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE
* SOFTWARE LICENSE AGREEMENT
* FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 415 Main Street, Cambridge, MA 02142 ("BROAD") and the LICENSEE and is effective at the date the downloading is completed ("EFFECTIVE DATE").
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK3 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute.org/gatk on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. LICENSEE hereby automatically grants to BROAD a non-exclusive, royalty-free, irrevocable license to any LICENSEE bug fixes or modifications to the PROGRAM with unlimited rights to sublicense and/or distribute. LICENSEE agrees to provide any such modifications and bug fixes to BROAD promptly upon their creation.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. PHONE-HOME FEATURE
* LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic reporting system ("PHONE-HOME") which is enabled by default upon download. Unless LICENSEE requests disablement of PHONE-HOME, LICENSEE agrees that BROAD may collect limited information transmitted by PHONE-HOME regarding LICENSEE and its use of the PROGRAM. Such information shall include LICENSEE'S user identification, version number of the PROGRAM and tools being run, mode of analysis employed, and any error reports generated during run-time. Collection of such information is used by BROAD solely to monitor usage rates, fulfill reporting requirements to BROAD funding agencies, drive improvements to the PROGRAM, and facilitate adjustments to PROGRAM-related documentation.
*
* 4. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012-2016 Broad Institute, Inc.
* Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 5. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 6. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 7. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 8. MISCELLANEOUS
* 8.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 8.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 8.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 8.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 8.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 8.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 8.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
*/
package org.broadinstitute.gatk.utils.variant;
import org.broadinstitute.gatk.utils.Utils;
import java.util.Arrays;
/**
* Utility to find and quantify tandem repeat units in a byte array.
*
* <p>
* This class provide a more efficient implementation of deprecated
* {@link GATKVariantContextUtils#findNumberOfRepetitions(byte[], byte[], boolean)}
* and RepeatCovariate which are proven to be inefficient and buggy.
* </p>
*
* <p>
* For now it does not change the logic of those methods in order to preserve current behaviour, but this
* needs to be revisited at some point with the proper re-evaluation.
*
* Example.
*
* ttcttcttCtgca
*
* Where the current offset is in the capital C, will result in the STR unit returned to be TGCA with only one repeat.
* whereas the logical choice is TTC with 3 repeats.
*
* And for further proof, a small modification and its effect:
*
* ttcttcttCttca
*
* Unit T, repeated 2.
*
* I would say it should be 4 TTC instead.
*
* I think we might well be failing to model the actual PCR artifact appropriately:
*
* <a>http://nar.oxfordjournals.org/content/24/14/2807.full</a>
* <a>http://www.ncbi.nlm.nih.gov/pubmed/12560493</a>
*
* </p>
*
* @author <NAME> <<EMAIL>>
*/
public final class TandemRepeatFinder {
private final byte[] bases;
private final int maxRepeatCount;
private final int maxUnitLength;
public TandemRepeatFinder(final byte[] bases, final int maxUnitLength, final int maxRepeatLength) {
if (bases == null) throw new IllegalArgumentException();
if (maxRepeatLength < 0) throw new IllegalArgumentException();
if (maxUnitLength < 0) throw new IllegalArgumentException();
this.maxRepeatCount = maxRepeatLength;
this.maxUnitLength = maxUnitLength;
this.bases = bases;
}
/**
* Calculates the number of repeated units of certain length starting at a position.
*
* <p>
* The repeat unit is determined by the original byte array passed to this tandem repeat finder and the input
* offset and length passed to this method based on the following pseudo-code:
*
* <pre>
* if (length > 0) {
* unit = bytes[offset .. (offset + length - 1)]
* } else if (length < 0) {
* unit = bytes[offset + length + 1 .. offset]
* } else { // length == 0
* throw IllegalArgumentException() // not allowed.
* }
* </pre>
* </p>
*
* <p>
* 0 will be returned if given the offset and length, part of the unit falls outside the byte array.
* </p>
* <p>
* Otherwise,
* this method will return the number of repeats (minimum 1 indicating that there is no duplicates) only looking into
* a single direction: if <code>length > 0</code> forward in the byte array <code>byte[offset .. END]</code>,
* if <code>length < 0</code> then backward in the array <code>byte[0 .. offset]</code>.
* </p>
*
* @param offset the offset in the bases byte for which to start
* @param length the unit length, a negative indicates a backward unit.
* @return the number of repeats.
* @throws IllegalArgumentException if {@code length} is 0 or {@code offset} is outside boundaries: (0 .. bases.length - 1)
* where bases is the array passed to this finder at construction.
*/
protected int numberOfRepeats(final int offset, final int length) {
if (length == 0) throw new IllegalArgumentException();
if (offset < 0 || offset >= bases.length) throw new IllegalArgumentException();
int from = offset;
int to = offset + length;
if (to > bases.length || to < -1) return 0;
final int increment = length < 0 ? -1 : 1;
final int stop = length < 0? -1 : bases.length;
int totalLength = 0;
while (to != stop) {
if (bases[to] != bases[from]) break;
to += increment;
from += increment;
totalLength++;
}
return 1 + totalLength / Math.abs(length);
}
public final class Result {
private final int unitLength;
private final int unitOffset;
private final int repeatCount;
private Result(final int unitOffset, final int unitLength, final int repeatCount) {
this.unitOffset = unitOffset;
this.unitLength = unitLength;
this.repeatCount = repeatCount;
}
/**
* Returns the repeated unit byte sequence.
* @return never {@code null}.
*/
public byte[] getUnit() {
return Arrays.copyOfRange(bases,unitOffset, unitOffset + unitLength);
}
/**
* Returns the original search bases.
*
* @return never {@code null}.
*/
public byte[] getBases() {
return bases;
}
/**
* Returns the unit offset.
*
* @return 0 to {@link #getBases().length - 1}
*/
public int getUnitOffset() {
return unitOffset;
}
/**
* Returns the unit length.
*
* @return 0 to {@link #getBases().length - 1}
*/
public int getUnitLength() {
return unitLength;
}
/**
* Returns the number of repeats of the unit in the input sequence.
* @return 0 or greater.
*/
public int getRepeatCount() {
return repeatCount;
}
}
/**
* Re-implements {@link RepeatCovariate#findTandemRepeatUnits(byte[], int)}.
*
* @param offset search offset.
* @return never {@code null}.
*/
public Result findMostRelevantTandemRepeatUnitAt(final int offset) {
// Notice that this code is not very nice and is rather long but is just a copy of the existing one implemented
// in RepeatCovariate, eventually this should be improved.
// first we look forward for a repeat.
// first we find the best backward
int bestBWRepeatCount = 0;
int bestBWOffset = offset;
int bestBWLength = 1;
for (int str = 1; str <= maxUnitLength; str++) {
final int repeatCount = numberOfRepeats(offset, -str);
if (repeatCount == 0) {
break;
} else if ((bestBWRepeatCount = repeatCount) > 1) {
bestBWOffset = offset - str + 1;
bestBWLength = str;
break;
}
}
// The best forward:
final int bestFWOffset = offset + 1;
int bestFWLength = 1;
int bestFWRepeatCount = 0;
for (int str = 1; str <= maxUnitLength; str++) {
final int repeatCount = numberOfRepeats(bestFWOffset, str);
if (repeatCount == 0) {
break;
} else if ((bestFWRepeatCount = repeatCount) > 1) {
bestFWLength = str;
break;
}
}
// And we combine forward and backwards results; if different forward repeat has priority:
if (bestFWLength == bestBWLength && Utils.equalRange(bases, bestFWOffset, bases, bestBWOffset, bestFWLength)) {
return new Result(bestBWOffset, bestBWLength, Math.min(maxRepeatCount, bestBWRepeatCount + bestFWRepeatCount));
}
else {
final int bestFWBackwardRepeatCount = numberOfRepeats(bestFWOffset + bestFWLength - 1, - bestFWLength) - 1;
return new Result(bestFWOffset, bestFWLength, Math.min(maxRepeatCount, bestFWRepeatCount + bestFWBackwardRepeatCount));
}
}
}
| 0c568565d328cbb3572d4efdd480dd5acc40d757 | [
"Markdown",
"Java"
] | 68 | Java | bazykinlab/gatk-maternal-cell-contamination | dbdc98ad4f4be9b3a02f92130116b8cede73d723 | b576cee1a3efe08d9584b7fdc74ac866a03af613 | |
refs/heads/main | <repo_name>NjeriNjoroge/WeSplit<file_sep>/WeSplit/ContentView.swift
//
// ContentView.swift
// WeSplit
//
// Created by Grace on 12/01/2021.
//
import SwiftUI
struct ContentView: View {
@State private var name = ""
var body: some View {
Form {
TextField("Enter name", text: $name)
Text("Hello \(name)")
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
| b68c46e437e2cd5f47633fd1b32bac6a5868972e | [
"Swift"
] | 1 | Swift | NjeriNjoroge/WeSplit | e477eaf3107e59b57f63f7fee2f5c0841f74bd68 | 9a9d4ff79766183fd2d54c519c44964fcfe96cfe | |
refs/heads/master | <file_sep>#include <bits/stdc++.h>
using namespace std;
int main()
{
// freopen("input.txt", "r", stdin);
// freopen("output.txt", "w", stdout);
long long int n, x, cnt=-1;
vector<int>arr;
cin>>n;
for(int i=0; i<n; i++){
cin>>x;
arr.push_back(x);
}
for(int i=0; i<n; i++, cnt++){
if(i<n-2 && arr[i+2]==0)i++;
}
cout<<cnt<<endl;
return 0;
}
<file_sep>#include <bits/stdc++.h>
using namespace std;
int main()
{
// freopen("input.txt", "r", stdin);
// freopen("output.txt", "w", stdout);
long long int n, x, tc;
vector<int>arr;
cin>>n>>tc;
for(int i=0; i<n; i++){
cin>>x;
arr.push_back(x);
}
while(tc--){
int tmp=arr[0];
for(int i=0; i<n-1; i++){
arr[i]=arr[i+1];
}
arr[n-1]=tmp;
}
for(int i=0;i <n; i++)cout<<arr[i]<<' ';
return 0;
}
<file_sep># Interveiw-Preparation-Kit
HackerRank Problem Solution
<file_sep>#include <bits/stdc++.h>
using namespace std;
/// Typedef
typedef long long ll;
typedef unsigned long ul;
typedef unsigned long long ull;
typedef vector<int> vi;
typedef vector<vi> vvi;
typedef vector<ll> vll;
typedef pair<int, int> pii;
typedef pair<ll, ll> pll;
typedef vector<pii> vii;
#define pb push_back
#define ppb pop_back
#define MP make_pair
#define ff first
#define ss second
#define sf scanf
#define pf printf
#define SQR(x) ((x) * (x))
#define loop(i, y) for (int i = 0; i < int(y); i++)
#define FOR(i, x, y) for (int i = int(x); i < int(y); i++)
#define ROF(i, x, y) for (int i = int(x); i >= int(y); i--)
#define ALL(c) c.begin(), c.end()
#define SZ(c) int(c.size())
#define CLR(x, y) memset(x, y, sizeof(x))
#define READ(f) freopen(f, "r", stdin)
#define WRITE(f) freopen(f, "w", stdout)
#define FastIO ios_base::sync_with_stdio(false)
#define tr(it, container) \
for (auto it = container.begin(); it != container.end(); it++)
#define sci(x) scanf("%d", &x)
#define scii(x, y) scanf("%d %d", &x, &y)
#define sciii(x, y, z) scanf("%d %d %d", &x, &y, &z)
#define scl(x) scanf("%lld", &x)
#define scll(x, y) scanf("%lld %lld", &x, &y)
#define sclll(x, y, z) scanf("%lld %lld %lld", &x, &y, &z)
#define scllll(x, y, z, p) scanf("%lld %lld %lld %lld", &x, &y, &z, &p)
#define bitCheck(N, in) ((bool)(N & (1 << (in))))
#define bitOff(N, in) (N & (~(1LL << (in))))
#define bitOn(N, in) (N | (1LL << (in)))
#define bitFlip(a, k) (a ^ (1LL << (k)))
#define unq(v) sort(all(v)), (v).erase(unique(all(v)), v.end())
#define common(a, b) \
sort(all(a)), sort(all(b)), \
a.erase(set_intersection(all(a), all(b), a.begin()), a.end())
#define uncommon(a, b) \
sort(all(a)), sort(all(b)), \
a.erase(set_symmetric_difference(all(a), all(b), a.begin()), a.end())
#define dbg(x) cout << #x << " = " << x << endl;
// template <typename T> using orderset = tree <T, null_type, less<T>,
// rb_tree_tag,tree_order_statistics_node_update>; *X.find_by_order(k) //returns
// the kth largest element.(0-based) X.order_of_key(val) //returns the no. of
// values less than val
// uniform random generator -->
// uniform_int_distribution<int> dist(0, 1);
// default_random_engine gen;
template <class T> inline T bigMod(T p, T e, T M) {
ll ret = 1;
for (; e > 0; e >>= 1) {
if (e & 1)
ret = (ret * p) % M;
p = (p * p) % M;
}
return (T)ret;
}
template <class T> inline T modInverse(T a, T M) { return bigMod(a, M - 2, M); }
template <class T> inline T gcd(T a, T b) { return b == 0 ? a : gcd(b, a % b); }
template <class T> inline T lcm(T a, T b) {
a = abs(a);
b = abs(b);
return (a / gcd(a, b)) * b;
}
template <class T> inline string int2String(T a) {
ostringstream str;
str << a;
return str.str();
}
const int dr[] = {0, 1, 0, -1, -1, 1, 1, -1, -2, -2, 2, 2, -1, -1, 1, 1};
const int dc[] = {1, 0, -1, 0, 1, 1, -1, -1, -1, 1, -1, 1, -2, 2, -2, 2};
/// Constants
#define MAX 10000007
#define MOD 1000000009
#define base 1000000007
#define eps 1e-9
#define INF 1llu << 61 // 2,305,843,009,213,693,952
#define inf 1 << 29 // 536,870,912
#define PI acos(-1.0) // 3.1415926535897932
int arr[10][10];
int main() {
ll sum = 0, m, z = -23234235;
for (int i = 0; i < 6; i++) {
for (int j = 0; j < 6; j++) {
cin >> arr[i][j];
}
}
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
sum = arr[i][j] + arr[i][j + 1] + arr[i][j + 2] + arr[i + 1][j + 1] +
arr[i + 2][j] + arr[i + 2][j + 1] + arr[i + 2][j + 2];
if (sum > z)
z = sum;
}
}
cout << z << endl;
return 0;
}
<file_sep>#include <bits/stdc++.h>
using namespace std;
bool visited[100005];
int main()
{
// freopen("input.txt", "r", stdin);
// freopen("output.txt", "w", stdout);
long long int n, x, cnt=0;
cin>>n;
pair<int, int>number[n];
vector<int>arr;
for(int i=1; i<=n; i++){
cin>>x;
arr.push_back(x);
}
for(int i=0;i <n; i++){
number[i].first=arr[i];
number[i].second=i;
}
sort(number, number+n);
for(int i=0; i<n; i++){
if(visited[i] || number[i].second==i)continue;
x=0;
int j=i;
while(!visited[j]){
visited[j]=1;
j=number[j].second;
x++;
}
cnt+=(x-1);
}
cout<<cnt<<endl;
return 0;
}
| f5ce121d5bfa7ff39603304356d0e3fba5d7b290 | [
"Markdown",
"C++"
] | 5 | C++ | ismail5g/Interveiw-Preparation-Kit | 7caec9b5d22de6a77b966ceaf57e896b4b622a1d | 9c3c34c55d5e6f7f98a047b4b07f0bef57813c3b | |
refs/heads/master | <file_sep>import React, { useState, useEffect } from "react";
import Header from "../header/Header";
import headerBackground from "../../assets/images/Banner.jpg";
import Filter from "../filter/Filter";
import JobCard from "../jobCard/JobCard";
import Footer from "../footer/Footer";
import Layout from "../Layout";
/* const Home = () => {
const [jobCards, setJobCards] = useState([]);
useEffect(() => {
fetch("https://jobs-platzi-master.herokuapp.com/jobs")
.then((response) => response.json())
.then((data) => setJobCards(data.body));
}, []);
console.log("home data: ", jobCards);
return (
<React.Fragment>
<Header bgImage={headerBackground}>
<Filter />
</Header>
<Layout>
<JobCard />
</Layout>
<Footer />
</React.Fragment>
);
}; */
class Home extends React.Component {
constructor(props) {
super(props);
this.state = [];
}
componentDidMount() {
fetch("https://jobs-platzi-master.herokuapp.com/jobs")
.then((response) => response.json())
.then((data) => this.setState({ data }));
}
render() {
console.log("Este es el state: ", this.state.data);
return (
<React.Fragment>
<Header bgImage={headerBackground}>
<Filter />
</Header>
<Layout>
<JobCard />
</Layout>
<Footer />
</React.Fragment>
);
}
}
export default Home;
| 54139a8631b4fc4f6ae4877826f09e3978cae5f0 | [
"JavaScript"
] | 1 | JavaScript | andresmorenoj/Test-project | 5641370fec8bc1bcff11a73d898e7f75dc6849e7 | 986838273873671f279f5bbd85e9e72642662bb6 | |
refs/heads/master | <file_sep>// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef SERVICES_ML_MODEL_IMPL_MAC_H_
#define SERVICES_ML_MODEL_IMPL_MAC_H_
#include "common.h"
namespace ml {
class CompilationImplMac;
class ModelImplMac {
public:
ModelImplMac();
~ModelImplMac();
int32_t AddOperand(int32_t type, const std::vector<uint32_t>& dimensions, float scale, int32_t zeroPoint);
int32_t SetOperandValue(uint32_t index, const std::vector<float>& data);
int32_t AddOperation(int32_t type, const std::vector<uint32_t>& inputs, const std::vector<uint32_t>& outputs);
int32_t IdentifyInputsAndOutputs(const std::vector<uint32_t>& inputs, const std::vector<uint32_t>& outputs);
// void Finish(mojom::ModelInfoPtr model_info, FinishCallback callback);
void CreateCompilation();
private:
friend class CompilationImplMac;
std::vector<Operand> operands_;
std::vector<Operation> operations_;
std::map<uint32_t, ValueInfo> values_;
std::vector<uint32_t> inputs_;
std::vector<uint32_t> outputs_;
};
} // namespace ml
#endif // SERVICES_ML_MODEL_IMPL_MAC_H_
<file_sep>//
// main.cpp
// MPSNativeSample
//
// Created by mac-webgl-stable on 1/31/19.
// Copyright © 2019 mac-webgl-stable. All rights reserved.
//
#include <iostream>
#include "test_cases.h"
#include "depthwise_conv_test.h"
#include "resize_bilinear_test.h"
#include "average_pool_test.h"
int main(int argc, const char * argv[]) {
// ml::ConvFloat();
// ml::Depthwise28_28Conv5_5();
//
// ml::Depthwise28_28_528Conv5_5_528();
// ml::ResizeBilinear65_65To513_513();
//
// ml::ResizeBilinear65_65_21To513_513_21();
ml::AveragePool();
return 0;
}
<file_sep>// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef SERVICES_ML_DEPTHWISE_CONV_TEST_H_
#define SERVICES_ML_DEPTHWISE_CONV_TEST_H_
namespace ml {
void DepthwiseConv2dFloatLarge();
void Depthwise28_28Conv5_5();
void Depthwise28_28_528Conv5_5_528();
} // namespace ml
#endif // SERVICES_ML_DEPTHWISE_CONV_TEST_H_
<file_sep>// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "common.h"
#include "constants.h"
namespace ml {
uint32_t product(const std::vector<uint32_t>& dims) {
uint32_t prod = 1;
for (size_t i = 0; i < dims.size(); ++i) prod *= dims[i];
return prod;
}
uint32_t GetRequiredSize(int32_t type, const std::vector<uint32_t>& dimensions) {
if (type == FLOAT32) {
return sizeof(float);
} else if (type == INT32) {
return sizeof(int32_t);
} else if (type == UINT32) {
return sizeof(uint32_t);
} else if (type == TENSOR_FLOAT32) {
return product(dimensions) * sizeof(float);
} else if (type == TENSOR_INT32) {
return product(dimensions) * sizeof(int32_t);
} else if (type == TENSOR_QUANT8_ASYMM) {
return product(dimensions) * sizeof(int8_t);
}
return 0;
}
Operand::Operand() = default;
Operand::~Operand() = default;
Operand::Operand(const Operand&) = default;
uint32_t Operand::requiredSize() const {
return GetRequiredSize(type, dimensions);
}
Operation::Operation() = default;
Operation::~Operation() = default;
Operation::Operation(const Operation&) = default;
//
//OperandInfo::OperandInfo(uint32_t length, void* mapping) :
// length(length), mapping(std::move(mapping)) {}
//
//OperandInfo::~OperandInfo() {}
ValueInfo::ValueInfo() = default;
ValueInfo::~ValueInfo() = default;
ValueInfo::ValueInfo(const ValueInfo&) = default;
int32_t getScalarInt32(const ValueInfo& info, int8_t* memory) {
return info.data[0];
}
// There are no viable overloaded operator[] for type
// 'const std::map<uint32_t, ValueInfo>', so uses 'find' instead of it.
int32_t getScalarInt32(const std::map<uint32_t, ValueInfo>& values,
uint32_t key,
int8_t* memory) {
auto iter = values.find(key);
if (iter == values.end()) {
assert(0);
return -1;
}
return getScalarInt32(iter->second, memory);
}
float getScalarFloat(const ValueInfo& info, int8_t* memory) {
return info.data[0];
}
float getScalarFloat(const std::map<uint32_t, ValueInfo>& values,
uint32_t key,
int8_t* memory) {
auto iter = values.find(key);
if (iter == values.end()) {
assert(0);
return -1.0;
}
return getScalarFloat(iter->second, memory);
}
}
<file_sep>//
// constants.h
// MPSNativeSample
//
// Created by mac-webgl-stable on 1/31/19.
// Copyright © 2019 mac-webgl-stable. All rights reserved.
//
#ifndef constants_h
#define constants_h
// Operand types.
const int32_t FLOAT32 = 0;
const int32_t INT32 = 1;
const int32_t UINT32 = 2;
const int32_t TENSOR_FLOAT32 = 3;
const int32_t TENSOR_INT32 = 4;
const int32_t TENSOR_QUANT8_ASYMM = 5;
// Operation types.
const int32_t ADD = 0;
const int32_t AVERAGE_POOL_2D = 1;
const int32_t CONCATENATION = 2;
const int32_t CONV_2D = 3;
const int32_t DEPTHWISE_CONV_2D = 4;
const int32_t DEPTH_TO_SPACE = 5;
const int32_t DEQUANTIZE = 6;
const int32_t EMBEDDING_LOOKUP = 7;
const int32_t FLOOR = 8;
const int32_t FULLY_CONNECTED = 9;
const int32_t HASHTABLE_LOOKUP = 10;
const int32_t L2_NORMALIZATION = 11;
const int32_t L2_POOL_2D = 12;
const int32_t LOCAL_RESPONSE_NORMALIZATION = 13;
const int32_t LOGISTIC = 14;
const int32_t LSH_PROJECTION = 15;
const int32_t LSTM = 16;
const int32_t MAX_POOL_2D = 17;
const int32_t MUL = 18;
const int32_t RELU = 19;
const int32_t RELU1 = 20;
const int32_t RELU6 = 21;
const int32_t RESHAPE = 22;
const int32_t RESIZE_BILINEAR = 23;
const int32_t RNN = 24;
const int32_t SOFTMAX = 25;
const int32_t SPACE_TO_DEPTH = 26;
const int32_t SVDF = 27;
const int32_t TANH = 28;
const int32_t ATROUS_CONV_2D = 10003;
const int32_t ATROUS_DEPTHWISE_CONV_2D = 10004;
// Fused activation function types.
const int32_t FUSED_NONE = 0;
const int32_t FUSED_RELU = 1;
const int32_t FUSED_RELU1 = 2;
const int32_t FUSED_RELU6 = 3;
// Implicit padding algorithms.
const int32_t PADDING_SAME = 1;
const int32_t PADDING_VALID = 2;
// Execution preferences.
const int32_t PREFER_LOW_POWER = 0;
const int32_t PREFER_FAST_SINGLE_ANSWER = 1;
const int32_t PREFER_SUSTAINED_SPEED = 2;
const int32_t NOT_ERROR = 0;
const int32_t OUT_OF_MEMORY = 1;
const int32_t INCOMPLETE = 2;
const int32_t UNEXPECTED_NULL = 3;
const int32_t BAD_DATA = 4;
const int32_t OP_FAILED = 5;
const int32_t UNMAPPABLE = 5;
const int32_t BAD_STATE = 6;
#endif /* constants_h */
| a39728a532205678e18952bab20d07d2c4d34382 | [
"C",
"C++"
] | 5 | C++ | fujunwei/mps-native-sample | 182176516eca874bc347820e12fbf199e3e8072b | 00fcba414972dbb607b49a430ee0137bfff4fc39 | |
refs/heads/master | <file_sep>from tensorflow.keras.models import load_model
import streamlit as st
import numpy as np
from keras.preprocessing import image
from io import BytesIO
st.set_option('deprecation.showfileUploaderEncoding', False)
classifier = load_model("classifier.h5")
STYLE = """
<style>
img {
max-width: 50%;
}
</style>
"""
pneumonia_html="""
<div style="background-color:#F08080;padding:10px >
<h2 style="color:black ;text-align:center;"> Prediction: Pneumatic Lung</h2>
</div>
"""
normal_html="""
<div style="background-color:#74F61D;padding:10px >
<h2 style="color:white;text-align:center;"> Prediction: Normal Lung </h2>
</div>
"""
st.title("Lung X-RAY classification")
st.markdown(STYLE, unsafe_allow_html = True)
file = st.file_uploader("Upload the Lung X-RAY image to be analysed", type= ["PNG", "JPEG","JPG"])
show_file = st.empty()
if not file:
show_file.info("Please upload a file of type: " + ", ".join(["PNG","JPEG"]))
else:
content = file.getvalue()
if isinstance(file, BytesIO):
show_file.image(file)
test_image = image.load_img(file,target_size = (64,64))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image,axis=0)
result = classifier.predict(test_image)
if (result[0][0]) == 1:
st.markdown(pneumonia_html,unsafe_allow_html=True)
else:
st.markdown(normal_html,unsafe_allow_html=True)
<file_sep>streamlit == 0.64.0
tensorflow ==1.14.0
keras == 2.3.1
numpy == 1.18.2
| aea92d80522cea787b7b0dc63365a99c64061f9c | [
"Python",
"Text"
] | 2 | Python | calm-n-cool/Web_app | 79d16d967ef872ffb7b21d1cff292132e6f2d5b9 | a4e5fed869d69275c806be67cf3a973879032eae | |
refs/heads/master | <repo_name>paranoidq/simple-rpc<file_sep>/rpc-v1/src/main/java/me/test/RpcTest.java
package me.test;
import me.rpc.core.MessageSendExecutor;
import me.rpc.core.RpcSerializeProtocol;
import org.apache.commons.lang3.time.StopWatch;
import java.util.concurrent.CountDownLatch;
/**
* @author paranoidq
* @since 1.0.0
*/
public class RpcTest {
public static void main(String[] args) throws InterruptedException {
final MessageSendExecutor executor = new MessageSendExecutor("127.0.0.1:18888", RpcSerializeProtocol.JDK_SERIALIZE);
int parallel = 1000;
StopWatch sw = new StopWatch();
sw.start();
CountDownLatch signal = new CountDownLatch(1);
CountDownLatch finish = new CountDownLatch(parallel);
for (int index = 0; index < parallel; index++) {
CalcParallelRequestThread client = new CalcParallelRequestThread(
signal, finish, executor, index
);
new Thread(client).start();
}
signal.countDown();
finish.await();
sw.stop();
String tip = String.format("RPC调用总耗时:[%s] 毫秒", sw.getTime());
System.out.println(tip);
executor.stop();
}
}
<file_sep>/rpc-v1/src/main/java/me/rpc/core/MessageSendExecutor.java
package me.rpc.core;
import com.google.common.reflect.Reflection;
/**
* @author paranoidq
* @since 1.0.0
*/
public class MessageSendExecutor {
private RpcServerLoader loader = RpcServerLoader.getInstance();
public MessageSendExecutor(String serverAddress, RpcSerializeProtocol serializeProtocol) {
loader.load(serverAddress, serializeProtocol);
}
public void stop() {
loader.unload();
}
public static <T> T execute(Class<T> rpcInterface) {
return Reflection.newProxy(rpcInterface, new MessageSendProxy(rpcInterface));
}
}
<file_sep>/rpc-v2/src/main/java/me/framework/RpcServerBoot.java
package me.framework;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* @author paranoidq
* @since 1.0.0
*/
public class RpcServerBoot {
public static void main(String[] args) {
new ClassPathXmlApplicationContext("classpath:rpc-invoke-config-server.xml");
}
}
<file_sep>/rpc-v1/src/main/java/me/rpc/pool/policy/BlockingPolicy.java
package me.rpc.pool.policy;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
/**
* @author paranoidq
* @since 1.0.0
*/
public class BlockingPolicy implements RejectedExecutionHandler {
private static final Logger logger = LoggerFactory.getLogger(BlockingPolicy.class);
private final String threadName;
public BlockingPolicy(String threadName) {
this.threadName = threadName;
}
public BlockingPolicy() {
this(null);
}
/**
* Method that may be invoked by a {@link ThreadPoolExecutor} when
* {@link ThreadPoolExecutor#execute execute} cannot accept a
* task. This may occur when no more threads or queue slots are
* available because their bounds would be exceeded, or upon
* shutdown of the Executor.
* <p>
* <p>In the absence of other alternatives, the method may throw
* an unchecked {@link RejectedExecutionException}, which will be
* propagated to the caller of {@code execute}.
*
* @param r the runnable task requested to be executed
* @param executor the executor attempting to execute this task
* @throws RejectedExecutionException if there is no remedy
*/
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
if (!StringUtils.isEmpty(threadName)) {
logger.error("RPC thread pool [{}] is exhausted, executor={}", threadName, executor.toString());
}
if (!executor.isShutdown()) {
try {
executor.getQueue().put(r);
} catch (InterruptedException e) {
}
}
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/message/kryo/KryoEncoder.java
package me.framework.rpc.message.kryo;
import me.framework.rpc.message.MessageDecoder;
import me.framework.rpc.message.MessageEncoder;
import me.framework.rpc.serialize.support.MessageCodec;
/**
* @author paranoidq
* @since 1.0.0
*/
public class KryoEncoder extends MessageEncoder{
public KryoEncoder(MessageCodec codec) {
super(codec);
}
}
<file_sep>/README.md
# NettyRPC Project
high performance java rpc server base on netty framework,using kryo,hessian,protostuff support rpc message serialization.
注:代码基于唐杰的博客和Github进行修改优化,感谢原作者!
http://www.cnblogs.com/jietang/
----------
## NettyRPC 3.0 Build 2017/09 by Qianwei<file_sep>/rpc-v2/src/main/java/me/framework/rpc/core/client/RpcSendSerializeBinder.java
package me.framework.rpc.core.client;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
import io.netty.handler.codec.LengthFieldPrepender;
import io.netty.handler.codec.serialization.ClassResolvers;
import io.netty.handler.codec.serialization.ObjectDecoder;
import io.netty.handler.codec.serialization.ObjectEncoder;
import io.netty.handler.logging.LoggingHandler;
import me.framework.rpc.message.MessageSerializeBinder;
import me.framework.rpc.message.kryo.KryoDecoder;
import me.framework.rpc.message.kryo.KryoEncoder;
import me.framework.rpc.serialize.support.MessageCodec;
import me.framework.rpc.serialize.support.RpcSerializeProtocol;
import me.framework.rpc.serialize.support.kryo.KryoMessageCodec;
import me.framework.rpc.serialize.support.kryo.KryoPoolFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author paranoidq
* @since 1.0.0
*/
public class RpcSendSerializeBinder implements MessageSerializeBinder {
private static final Logger logger = LoggerFactory.getLogger(RpcSendSerializeBinder.class);
public RpcSendSerializeBinder() {}
@Override
public void bind(RpcSerializeProtocol protocol, ChannelPipeline pipeline) {
switch (protocol) {
case JDK_SERIALIZE: {
logger.info("Use JDK_SERIALIZE");
pipeline.addLast(new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, MessageCodec.MESSAGE_LENGTH_BYTES, 0, MessageCodec.MESSAGE_LENGTH_BYTES));
pipeline.addLast(new LengthFieldPrepender(MessageCodec.MESSAGE_LENGTH_BYTES));
pipeline.addLast(new ObjectEncoder());
pipeline.addLast(new ObjectDecoder(Integer.MAX_VALUE, ClassResolvers.weakCachingConcurrentResolver(this.getClass().getClassLoader())));
pipeline.addLast(new MessageSendHandler());
break;
}
case KRYO_SERIAZLIZE: {
logger.info("Use KRYO_SERIAZLIZE");
KryoMessageCodec util = new KryoMessageCodec();
pipeline.addLast(new LoggingHandler());
pipeline.addLast(new KryoEncoder(util));
pipeline.addLast(new KryoDecoder(util));
pipeline.addLast(new MessageSendHandler());
break;
}
case HESSIAN_SERIALIZE: {
logger.info("Use HESSIAN_SERIALIZE");
// HessianCodecUtil util = new HessianCodecUtil();
// pipeline.addLast(new HessianEncoder(util));
// pipeline.addLast(new HessianDecoder(util));
// pipeline.addLast(new MessageRecvHandler(handlerMap));
break;
}
}
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/util/pool/NamedThreadFactory.java
package me.framework.rpc.util.pool;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
/**
* @author paranoidq
* @since 1.0.0
*/
public class NamedThreadFactory implements ThreadFactory {
private static final AtomicInteger threadNumber = new AtomicInteger(1);
private final AtomicInteger mThreadNum = new AtomicInteger(1);
private final String prefix;
private final boolean daemonThread;
private final ThreadGroup threadGroup;
public NamedThreadFactory() {
this("srpc-thread-pool-" + threadNumber.getAndIncrement(), false);
}
public NamedThreadFactory(String prefix) {
this(prefix, false);
}
public NamedThreadFactory(String prefix, boolean daemonThread) {
this.prefix = prefix + "-thread-";
this.daemonThread = daemonThread;
SecurityManager s = System.getSecurityManager();
threadGroup = (s == null) ? Thread.currentThread().getThreadGroup() : s.getThreadGroup();
}
/**
* Constructs a new {@code Thread}. Implementations may also initialize
* priority, name, daemon status, {@code ThreadGroup}, etc.
*
* @param r a runnable to be executed by new thread instance
* @return constructed thread, or {@code null} if the request to
* create a thread is rejected
*/
public Thread newThread(Runnable r) {
String name = prefix + mThreadNum.getAndIncrement();
Thread thread = new Thread(threadGroup, r, name, 0); // ?? what is stack size
thread.setDaemon(daemonThread);
return thread;
}
public ThreadGroup getThreadGroup() {
return this.threadGroup;
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/core/server/MessageRecvExecutor.java
package me.framework.rpc.core.server;
import com.google.common.util.concurrent.*;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import me.framework.rpc.config.RpcSystemConfig;
import me.framework.rpc.model.MessageRequest;
import me.framework.rpc.model.MessageResponse;
import me.framework.rpc.model.ServiceHolder;
import me.framework.rpc.serialize.support.RpcSerializeProtocol;
import me.framework.rpc.spring.NettyRpcRegistry;
import me.framework.rpc.util.nettybuilder.NettyServerBootstrapBuilder;
import me.framework.rpc.util.pool.NamedThreadFactory;
import me.framework.rpc.util.pool.RpcThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import java.nio.channels.spi.SelectorProvider;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
/**
*
* v3 版本引入了spring之后,不在需要通过手动调用start进行启动了
*
* 启动在{@link NettyRpcRegistry#afterPropertiesSet()}中进行
* 该函数会首先从配置文件中获取对应的参数,并注入到{@link MessageRecvExecutor}实例中,
* 然后调用{@link MessageRecvExecutor#start()}启动线程池,坚挺即将到来的请求
*
* {@link me.framework.rpc.core.client.MessageSendExecutor}同理
*
* @author paranoidq
* @since 1.0.0
*/
public class MessageRecvExecutor implements ApplicationContextAware {
private static final Logger logger = LoggerFactory.getLogger(MessageRecvExecutor.class);
private final static String DELIMITER = ":";
/**
* 存储RPC服务映射关系
* <ClassName --> Object Instance>
*/
private Map<String, Object> handlerMap = new ConcurrentHashMap<>();
/**
* 通过spring注入方式设置具体值
* 参见:{@link me.framework.rpc.spring.NettyRpcRegistry}
*/
private String serverAddress;
/**
* 通过spring注入方式设置具体值
* 参见:{@link me.framework.rpc.spring.NettyRpcRegistry}
*/
private RpcSerializeProtocol serializeProtocol = RpcSerializeProtocol.JDK_SERIALIZE;
/**
* 通过spring注入方式设置具体值
* 参见:{@link me.framework.rpc.spring.NettyRpcRegistry}
*/
private String echoApiPort;
/**
* 并发执行executor
*/
private volatile ListeningExecutorService threadPoolExecutor;
/**
* 线程池线程数
*/
private static int threadNums = RpcSystemConfig.SYSTEM_PROPERTY_THREADPOOL_THREAD_NUMS;
/**
* 线程池队列长度
*/
private static int queueNums = RpcSystemConfig.SYSTEM_PROPERTY_THREADPOOL_QUEUE_NUMS;
private ThreadFactory threadFactory = new NamedThreadFactory("Netty RPC Factory");
private int parallel = Runtime.getRuntime().availableProcessors() * 2;
private EventLoopGroup boss = new NioEventLoopGroup();
private EventLoopGroup worker = new NioEventLoopGroup(parallel,threadFactory, SelectorProvider.provider());
public MessageRecvExecutor() {
handlerMap.clear();
}
private static class Holder {
private static final MessageRecvExecutor instance = new MessageRecvExecutor();
}
public static MessageRecvExecutor getInstance() {
return Holder.instance;
}
/**
* 提交请求去进行业务处理
* @param task
* @param ctx
* @param request
* @param response
*/
public void submit(Callable<Boolean> task, ChannelHandlerContext ctx, MessageRequest request, MessageResponse response) {
if (threadPoolExecutor == null) {
synchronized (MessageRecvExecutor.class) {
if (threadPoolExecutor == null) {
threadPoolExecutor =
MoreExecutors.listeningDecorator((ThreadPoolExecutor)
(RpcSystemConfig.isMonitorServerSupport()
? RpcThreadPool.getExecutorWithJmx(threadNums, queueNums)
: RpcThreadPool.getExecutor(threadNums, queueNums)
)
);
}
}
}
ListenableFuture<Boolean> listenableFuture = threadPoolExecutor.submit(task);
// rpc-v2通过listeningFuture移除了v1中callback,更加简洁、高效
Futures.addCallback(listenableFuture, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean result) {
ctx.writeAndFlush(response).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
logger.info("RPC Server send response. messageId=[{}]", request.getMessageId());
}
});
}
@Override
public void onFailure(Throwable t) {
logger.error("", t);
}
}, threadPoolExecutor);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
try {
ServiceHolder holder = (ServiceHolder) applicationContext.getBean(Class.forName(
"me.framework.rpc.model.ServiceHolder"
));
Map<String, Object> services = holder.getServices();
for (Map.Entry<String, Object> service : services.entrySet()) {
handlerMap.put(service.getKey(), service.getValue());
}
} catch (ClassNotFoundException e) {
logger.error("未找到ServiceHolder类", e);
}
}
public void start() {
try {
ServerBootstrap bootstrap = NettyServerBootstrapBuilder.getInstance(boss, worker)
.setSoBacklog(128)
.build();
bootstrap.childHandler(new MessageRecvChannelInitializer(handlerMap)
.setSerializeProtocol(serializeProtocol));
String[] ipAddr = serverAddress.split(MessageRecvExecutor.DELIMITER);
if (ipAddr.length == 2) {
String host = ipAddr[0];
int port = Integer.parseInt(ipAddr[1]);
ChannelFuture future = bootstrap.bind(host, port).sync();
future.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(final ChannelFuture future) throws Exception {
if (future.isSuccess()) {
System.out.printf("Netty RPC Server started success ip:%s port:%d\n", host, port);
// future.channel().closeFuture().sync().addListener(new ChannelFutureListener() {
// @Override
// public void operationComplete(ChannelFuture future) throws Exception {
// threadPoolExecutor.shutdown();
// }
// });
}
}
});
} else {
logger.error("Netty RPC Server started fail!\n");
}
} catch (InterruptedException e) {
logger.error("Netty RPC Server started fail!\n", e);
}
}
public void stop() {
worker.shutdownGracefully();
boss.shutdownGracefully();
}
public Map<String, Object> getHandlerMap() {
return handlerMap;
}
public void setHandlerMap(Map<String, Object> handlerMap) {
this.handlerMap = handlerMap;
}
public String getServerAddress() {
return serverAddress;
}
public void setServerAddress(String serverAddress) {
this.serverAddress = serverAddress;
}
public RpcSerializeProtocol getSerializeProtocol() {
return serializeProtocol;
}
public void setSerializeProtocol(RpcSerializeProtocol serializeProtocol) {
this.serializeProtocol = serializeProtocol;
}
public String getEchoApiPort() {
return echoApiPort;
}
public void setEchoApiPort(String echoApiPort) {
this.echoApiPort = echoApiPort;
}
}
<file_sep>/rpc-v1/src/main/java/me/rpc/model/ServiceHolder.java
package me.rpc.model;
import java.util.Map;
/**
* @author paranoidq
* @since 1.0.0
*/
public class ServiceHolder {
private Map<String, Object> services;
public Map<String, Object> getServices() {
return services;
}
public void setServices(Map<String, Object> services) {
this.services = services;
}
}
<file_sep>/rpc-v2/src/main/java/me/framework/rpc/serialize/support/kryo/KryoMessageCodec.java
package me.framework.rpc.serialize.support.kryo;
import com.esotericsoftware.kryo.pool.KryoPool;
import io.netty.buffer.ByteBuf;
import me.framework.rpc.serialize.support.MessageCodec;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* @author paranoidq
* @since 1.0.0
*/
public class KryoMessageCodec implements MessageCodec {
public KryoMessageCodec() {}
@Override
public void encode(ByteBuf writeBuffer, Object message) throws IOException {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
KryoSerializer serializer = new KryoSerializer();
serializer.serialize(byteArrayOutputStream, message);
byte[] body = byteArrayOutputStream.toByteArray();
int dataLength = body.length;
writeBuffer.writeInt(dataLength);
writeBuffer.writeBytes(body);
}
}
@Override
public Object decode(byte[] body) throws IOException {
try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(body)) {
KryoSerializer serializer = new KryoSerializer();
return serializer.deserialize(byteArrayInputStream);
}
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/filter/support/SimpleFilter.java
package me.framework.rpc.filter.support;
import me.framework.rpc.filter.Filter;
import me.framework.rpc.logger.AppLoggerInject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import java.lang.reflect.Method;
/**
* @author paranoidq
* @since 1.0.0
*/
public class SimpleFilter implements Filter {
@AppLoggerInject
private static Logger logger;
@Override
public boolean before(Method method, Object processor, Object[] requestObjects) {
logger.info(StringUtils.center("[SimpleFilter##before]", 48, "*"));
return true;
}
@Override
public void after(Method method, Object processor, Object[] requestObjects) {
logger.info(StringUtils.center("[SimpleFilter##after]", 48, "*"));
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/core/handler/NettyRpcRecvHandler.java
package me.framework.rpc.core.handler;
import io.netty.channel.ChannelPipeline;
import java.util.Map;
/**
* @author paranoidq
* @since 1.0.0
*/
public interface NettyRpcRecvHandler {
void handle(Map<String, Object> handlerMap, ChannelPipeline pipeline);
}
<file_sep>/rpc-v3/src/main/resources/rpc-server.properties
rpc.server.addr=127.0.0.1:18888
rpc.server.echo.api.port=18889<file_sep>/rpc-v2/src/main/java/me/framework/rpc/core/server/MessageRecvExecutor.java
package me.framework.rpc.core.server;
import com.google.common.util.concurrent.*;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import me.framework.rpc.model.MessageRequest;
import me.framework.rpc.model.MessageResponse;
import me.framework.rpc.model.ServiceHolder;
import me.framework.rpc.serialize.support.RpcSerializeProtocol;
import me.framework.rpc.util.nettybuilder.NettyServerBootstrapBuilder;
import me.framework.rpc.util.pool.NamedThreadFactory;
import me.framework.rpc.util.pool.RpcThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import java.nio.channels.spi.SelectorProvider;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
/**
* @author paranoidq
* @since 1.0.0
*/
public class MessageRecvExecutor implements ApplicationContextAware, InitializingBean {
private static final Logger logger = LoggerFactory.getLogger(MessageRecvExecutor.class);
private final static String DELIMITER = ":";
private static Map<String, Object> handlerMap = new ConcurrentHashMap<>();
private static volatile ListeningExecutorService threadPoolExecutor;
private String serverAddress;
// 默认JDK序列化方式
private RpcSerializeProtocol protocol = RpcSerializeProtocol.JDK_SERIALIZE;
public MessageRecvExecutor(String serverAddress, String protocol) {
this.serverAddress = serverAddress;
this.protocol = Enum.valueOf(RpcSerializeProtocol.class, protocol);
}
/**
* 提交请求去进行业务处理
* @param task
* @param ctx
* @param request
* @param response
*/
public static void submit(Callable<Boolean> task, ChannelHandlerContext ctx, MessageRequest request, MessageResponse response) {
if (threadPoolExecutor == null) {
synchronized (MessageRecvExecutor.class) {
if (threadPoolExecutor == null) {
threadPoolExecutor = MoreExecutors.listeningDecorator((ThreadPoolExecutor) RpcThreadPool.getExecutor(16, -1));
}
}
}
ListenableFuture<Boolean> listenableFuture = threadPoolExecutor.submit(task);
// rpc-v2通过listeningFuture移除了v1中callback,更加简洁、高效
Futures.addCallback(listenableFuture, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean result) {
ctx.writeAndFlush(response).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
logger.info("RPC Server send response. messageId=[{}]", request.getMessageId());
}
});
}
@Override
public void onFailure(Throwable t) {
logger.error("", t);
}
}, threadPoolExecutor);
}
@Override
public void afterPropertiesSet() throws Exception {
ThreadFactory threadFactory = new NamedThreadFactory("Netty RPC Factory");
int parallel = Runtime.getRuntime().availableProcessors() * 2;
EventLoopGroup boss = new NioEventLoopGroup();
EventLoopGroup worker = new NioEventLoopGroup(parallel,threadFactory, SelectorProvider.provider());
try {
ServerBootstrap bootstrap = NettyServerBootstrapBuilder.getInstance(boss, worker)
.setAcceptSocketsMax(128)
.build();
bootstrap.childHandler(new MessageRecvChannelInitializer(handlerMap)
.setSerializeProtocol(protocol));
String[] ipAddr = serverAddress.split(MessageRecvExecutor.DELIMITER);
if (ipAddr.length == 2) {
String host = ipAddr[0];
int port = Integer.parseInt(ipAddr[1]);
ChannelFuture future = bootstrap.bind(host, port);
System.out.printf("Netty RPC Server started success ip:%s port:%d\n", host, port);
future.channel().closeFuture().sync();
} else {
System.out.printf("Netty RPC Server started fail!\n");
}
} finally {
worker.shutdownGracefully();
boss.shutdownGracefully();
}
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
try {
ServiceHolder holder = (ServiceHolder) applicationContext.getBean(Class.forName(
"me.framework.rpc.model.ServiceHolder"
));
Map<String, Object> services = holder.getServices();
for (Map.Entry<String, Object> service : services.entrySet()) {
handlerMap.put(service.getKey(), service.getValue());
}
} catch (ClassNotFoundException e) {
logger.error("", e);
}
}
}
<file_sep>/rpc-v1/src/main/java/me/rpc/core/RpcServerLoader.java
package me.rpc.core;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import me.rpc.pool.RpcThreadPool;
import java.net.InetSocketAddress;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* @author paranoidq
* @since 1.0.0
*/
public class RpcServerLoader {
private static volatile RpcServerLoader instance;
private final static String DELIMITER = ":";
private RpcSerializeProtocol serializeProtocol = RpcSerializeProtocol.JDK_SERIALIZE;
private final static int parallel = Runtime.getRuntime().availableProcessors() * 2;
private EventLoopGroup eventLoopGroup = new NioEventLoopGroup(parallel);
private static ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) RpcThreadPool.getExecutor(16, -1);
private MessageSendHandler sendHandler;
private Lock lock = new ReentrantLock();
private Condition signal = lock.newCondition();
public static RpcServerLoader getInstance() {
if (instance == null) {
synchronized (RpcServerLoader.class) {
if (instance == null) {
instance = new RpcServerLoader();
}
}
}
return instance;
}
public void load(String serverAddress, RpcSerializeProtocol serializeProtocol) {
String[] ipAddr = serverAddress.split(DELIMITER);
if (ipAddr.length == 2) {
String host = ipAddr[0];
int port = Integer.parseInt(ipAddr[1]);
final InetSocketAddress remoteAddr = new InetSocketAddress(host, port);
threadPoolExecutor.submit(
new MessageSendInitializeTask(eventLoopGroup, remoteAddr, this)
);
}
}
public void unload() {
sendHandler.close();
threadPoolExecutor.shutdown();
eventLoopGroup.shutdownGracefully();
}
public void setMessageSendHandler(MessageSendHandler sendHandler) {
try {
lock.lock();
this.sendHandler = sendHandler;
signal.signalAll();
} finally {
lock.unlock();
}
}
public MessageSendHandler getSendHandler() throws InterruptedException {
try {
lock.lock();
if (sendHandler == null) {
signal.await();
}
return sendHandler;
} finally {
lock.unlock();
}
}
public void setSerializeProtocol(RpcSerializeProtocol serializeProtocol) {
this.serializeProtocol = serializeProtocol;
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/util/pool/policy/AbortPolicy.java
package me.framework.rpc.util.pool.policy;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
/**
* @author paranoidq
* @since 1.0.0
*/
public class AbortPolicy extends ThreadPoolExecutor.AbortPolicy {
private static final Logger logger = LoggerFactory.getLogger(AbortPolicy.class);
private final String threadName;
public AbortPolicy(String threadName) {
this.threadName = threadName;
}
public AbortPolicy() {
this(null);
}
/**
* Always throws RejectedExecutionException.
*
* @param r the runnable task requested to be executed
* @param e the executor attempting to execute this task
* @throws RejectedExecutionException always
*/
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
if (!StringUtils.isEmpty(threadName)) {
logger.error("RPC thread pool [{}] is exhausted, executor={}", threadName, executor.toString());
}
String msg = String.format("Rpc Server[" +
"Thread name: %sm Pool Size: %d (active: %d, netty: %d, max: %d, largest: %d), " +
"Task: %d (completed %d), " +
"Executor status: (isShutdown:%s, isTerminated:%s, isTerminating:%s)]",
threadName, executor.getPoolSize(), executor.getActiveCount(), executor.getMaximumPoolSize(), executor.getLargestPoolSize(),
executor.isShutdown(), executor.isTerminated(), executor.isTerminating());
logger.error(msg);
super.rejectedExecution(r, executor);
}
}
<file_sep>/rpc-v1/src/main/java/me/rpc/pool/RpcThreadPool.java
package me.rpc.pool;
import me.rpc.pool.policy.AbortPolicy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.*;
/**
* SRpc Worker thread pool
*
* 独立出工作线程池,主要考虑对应对复杂的业务操作,不阻塞netty的handler IO线程
* 当然如果业务足够简单,可以将处理逻辑写入netty的handler中也是可以的
*
* @author paranoidq
* @since 1.0.0
*/
public class RpcThreadPool {
private static Logger logger = LoggerFactory.getLogger(RpcThreadPool.class);
public static Executor getExecutor(int threads, int queues) {
String name = "RpcThreadPool";
return new ThreadPoolExecutor(threads, threads, 0, TimeUnit.MILLISECONDS,
queues == 0 ? new SynchronousQueue<>() :
(queues < 0 ? new LinkedBlockingQueue<>() : new LinkedBlockingQueue<>(queues)),
new NamedThreadFactory(name, true),
new AbortPolicy(name)
);
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/core/client/RpcServerLoader.java
package me.framework.rpc.core.client;
import com.google.common.util.concurrent.*;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import me.framework.rpc.serialize.support.RpcSerializeProtocol;
import me.framework.rpc.util.pool.RpcThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* @author paranoidq
* @since 1.0.0
*/
public class RpcServerLoader {
private static final Logger logger = LoggerFactory.getLogger(RpcServerLoader.class);
private static volatile RpcServerLoader instance;
private final static String DELIMITER = ":";
private final static int parallel = Runtime.getRuntime().availableProcessors() * 2;
private EventLoopGroup eventLoopGroup = new NioEventLoopGroup(parallel);
private static ListeningExecutorService threadPoolExecutor = MoreExecutors.listeningDecorator(
(ThreadPoolExecutor) RpcThreadPool.getExecutor(16, -1));
private volatile MessageSendHandler sendHandler;
private Lock lock = new ReentrantLock();
private Condition connectStatus = lock.newCondition();
private Condition handlerStatus = lock.newCondition();
public static RpcServerLoader getInstance() {
if (instance == null) {
synchronized (RpcServerLoader.class) {
if (instance == null) {
instance = new RpcServerLoader();
}
}
}
return instance;
}
public void load(String serverAddress, RpcSerializeProtocol serializeProtocol) {
String[] ipAddr = serverAddress.split(DELIMITER);
if (ipAddr.length == 2) {
String host = ipAddr[0];
int port = Integer.parseInt(ipAddr[1]);
final InetSocketAddress remoteAddr = new InetSocketAddress(host, port);
ListenableFuture<Boolean> listenableFuture = threadPoolExecutor.submit(
new MessageSendInitializeTask(eventLoopGroup, remoteAddr, serializeProtocol)
);
Futures.addCallback(listenableFuture, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean result) {
logger.info("连接服务端成功");
try {
lock.lock();
if (sendHandler == null) {
handlerStatus.await();
}
if (result == Boolean.TRUE && sendHandler != null) {
connectStatus.signalAll();
}
} catch (InterruptedException e) {
} finally {
lock.unlock();
}
}
@Override
public void onFailure(Throwable t) {
logger.error("连接失败", t);
}
}, threadPoolExecutor);
}
}
public void unload() {
sendHandler.close();
threadPoolExecutor.shutdown();
eventLoopGroup.shutdownGracefully();
}
public void setMessageSendHandler(MessageSendHandler sendHandler) {
try {
lock.lock();
this.sendHandler = sendHandler;
handlerStatus.signalAll();
} finally {
lock.unlock();
}
}
public MessageSendHandler getSendHandler() throws InterruptedException {
try {
lock.lock();
if (sendHandler == null) {
connectStatus.await();
}
return sendHandler;
} finally {
lock.unlock();
}
}
}
<file_sep>/rpc-v2/src/main/java/me/framework/rpc/message/MessageEncoder.java
package me.framework.rpc.message;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
import me.framework.rpc.serialize.support.MessageCodec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author paranoidq
* @since 1.0.0
*/
public class MessageEncoder extends MessageToByteEncoder<Object> {
private static final Logger logger = LoggerFactory.getLogger(MessageEncoder.class);
private MessageCodec codec;
public MessageEncoder(MessageCodec codec) {
this.codec = codec;
}
@Override
protected void encode(ChannelHandlerContext ctx, Object msg, ByteBuf out) throws Exception {
codec.encode(out, msg);
}
}
<file_sep>/rpc-v3/src/main/resources/log4j.properties
log4j.rootLogger=DEBUG
log4j.additivity.me.framework.rpc = false
log4j.logger.me.framework.rpc=DEBUG,console
log4j.logger.io.netty=DEBUG,console
log4j.logger.org.springframework=DEBUG,console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d [%t] %-5p %c - %m%n
log4j.appender.console.encoding=UTF-8<file_sep>/rpc-v3/src/main/java/me/framework/rpc/core/server/MethodInvoker.java
package me.framework.rpc.core.server;
import me.framework.rpc.model.MessageRequest;
import org.apache.commons.lang3.reflect.MethodUtils;
/**
* @author paranoidq
* @since 1.0.0
*/
public class MethodInvoker {
private Object serviceBean;
public Object getServiceBean() {
return serviceBean;
}
public void setServiceBean(Object serviceBean) {
this.serviceBean = serviceBean;
}
public Object invoke(MessageRequest request) throws Throwable {
String methodName = request.getMethodName();
Object[] parameters = request.getParameterValues();
return MethodUtils.invokeMethod(serviceBean, methodName, parameters);
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/event/ServerStartEvent.java
package me.framework.rpc.event;
import org.springframework.context.ApplicationEvent;
/**
* @author paranoidq
* @since 1.0.0
*/
public class ServerStartEvent extends ApplicationEvent {
/**
* Create a new ApplicationEvent.
*
* @param source the object on which the event initially occurred (never {@code null})
*/
public ServerStartEvent(Object source) {
super(source);
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/core/server/RpcRecvSerializeFrame.java
package me.framework.rpc.core.server;
import com.google.common.collect.ClassToInstanceMap;
import com.google.common.collect.MutableClassToInstanceMap;
import io.netty.channel.ChannelPipeline;
import me.framework.rpc.core.handler.JdkNativeRecvHandler;
import me.framework.rpc.core.handler.KryoRecvHandler;
import me.framework.rpc.core.handler.NettyRpcRecvHandler;
import me.framework.rpc.message.RpcSerializeFrame;
import me.framework.rpc.serialize.support.RpcSerializeProtocol;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/**
* @author paranoidq
* @since 1.0.0
*/
public class RpcRecvSerializeFrame implements RpcSerializeFrame {
private static final Logger logger = LoggerFactory.getLogger(RpcRecvSerializeFrame.class);
private Map<String, Object> handlerMap;
public RpcRecvSerializeFrame(Map<String, Object> handlerMap) {
this.handlerMap = handlerMap;
}
private static ClassToInstanceMap<NettyRpcRecvHandler> handler = MutableClassToInstanceMap.create();
/**
* 可以用注解代替
*/
static {
handler.putInstance(JdkNativeRecvHandler.class, new JdkNativeRecvHandler());
handler.putInstance(KryoRecvHandler.class, new KryoRecvHandler());
// ...
}
/**
* 将组装channelHandler的部分利用策略模式独立出去
* 不同的序列化方式组装的handler不同
* @param protocol
* @param pipeline
*/
@Override
public void select(RpcSerializeProtocol protocol, ChannelPipeline pipeline) {
switch (protocol) {
case JDK_SERIALIZE: {
logger.info("Use KRYO_SERIALIZE");
handler.getInstance(JdkNativeRecvHandler.class).handle(handlerMap, pipeline);
break;
}
case KRYO_SERIALIZE: {
logger.info("Use KRYO_SERIALIZE");
handler.getInstance(KryoRecvHandler.class).handle(handlerMap, pipeline);
break;
}
case HESSIAN_SERIALIZE: {
logger.info("Use HESSIAN_SERIALIZE");
break;
}
}
}
}
<file_sep>/rpc-v3/src/main/java/me/framework/rpc/serialize/support/kryo/KryoSerializer.java
package me.framework.rpc.serialize.support.kryo;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.pool.KryoFactory;
import com.esotericsoftware.kryo.pool.KryoPool;
import me.framework.rpc.serialize.support.RpcSerializer;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* 是否可以公用Serializer实例 ???
*
* @author paranoidq
* @since 1.0.0
*/
public class KryoSerializer implements RpcSerializer {
private KryoPool pool;
public KryoSerializer() {
pool = KryoPoolFactory.getKryoPoolInstance();
}
@Override
public void serialize(OutputStream outputStream, Object object) throws IOException {
Kryo instance = pool.borrow();
Output output = new Output(outputStream);
try {
instance.writeClassAndObject(output, object);
} finally {
output.close();
pool.release(instance);
}
}
@Override
public Object deserialize(InputStream inputStream) throws IOException {
Kryo instance = pool.borrow();
Input input = new Input(inputStream);
try {
return instance.readClassAndObject(input);
} finally {
input.close();
pool.release(instance);
}
}
}
| cbc7d722e64d8b9ee79d897571dc8c3c70678988 | [
"Markdown",
"Java",
"INI"
] | 25 | Java | paranoidq/simple-rpc | 2e0779ee43b81f6be6f1ab287933453281183ca7 | bf550169ec885d27f547b6626a5caae7c55d6880 | |
refs/heads/master | <file_sep>//
// ViewController.swift
// Gestures
//
// Created by Stacy on 24.04.21.
//
import UIKit
class ViewController: UIViewController {
@IBOutlet weak var gestureIndicator: UILabel!
@IBAction func tap(sender: AnyObject) {
gestureIndicator.text = "Gesture: tap\n Background color: green"
gestureIndicator.backgroundColor = .green
}
@IBAction func pinch(sender: AnyObject) {
gestureIndicator.text = "Gesture: pinch\n Background color: red"
gestureIndicator.backgroundColor = .red
}
@IBAction func rotation(sender: AnyObject) {
gestureIndicator.text = "Gesture: rotation\n Background color: blue"
gestureIndicator.backgroundColor = .blue
}
@IBAction func longPress(sender: AnyObject) {
gestureIndicator.text = "Gesture: long press\n Background color: orange"
gestureIndicator.backgroundColor = .orange
}
@IBAction func swipe(sender: AnyObject) {
gestureIndicator.text = "Gesture: swipe\n Background color: gray"
gestureIndicator.backgroundColor = .gray
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
gestureIndicator.isUserInteractionEnabled = true
gestureIndicator.textAlignment = .center
gestureIndicator.numberOfLines = 2
gestureIndicator.text = "Use gestures in this area"
gestureIndicator.backgroundColor = .yellow
}
}
<file_sep>//
// BrushWidth.swift
// Example
//
// Created by Stacy on 24.04.21.
//
import UIKit
enum BrushWidth {
case superSmall
case small
case medium
case large
case superLarge
init?(tag: Int) {
switch tag {
case 0:
self = .superSmall
case 1:
self = .small
case 2:
self = .medium
case 3:
self = .large
case 4:
self = .superLarge
default:
return nil
}
}
var size: CGFloat {
switch self {
case .superSmall:
return 15.0
case .small:
return 20.0
case .medium:
return 30.0
case .large:
return 40.0
case .superLarge:
return 50.0
}
}
}
<file_sep>//
// ViewController.swift
// Graphics
//
// Created by <NAME> on 8.05.21.
//
import UIKit
class ViewController: UIViewController {
@IBOutlet weak var NewView: UIView!
@IBOutlet weak var InfoLable: UILabel!
@IBOutlet weak var Circle: CircleView!
@IBOutlet weak var Triangle: TriangleView!
var isTogether: Bool = false
var TagID: Int = 100
override func viewDidLoad() {
super.viewDidLoad()
NewView.addSubview(Circle)
NewView.addSubview(Triangle)
}
@IBAction func tapAddTo2nd(_ sender: Any) {
if(!isTogether) {
isTogether = true
InfoLable.text = ""
Triangle.center = NewView.center
Circle.center = Triangle.center
}
else {
NewView.bringSubviewToFront(Triangle)
}
}
@IBAction func tapAddTo1st(_ sender: Any) {
if(!isTogether) {
isTogether = true
InfoLable.text = ""
Triangle.center = NewView.center
NewView.bringSubviewToFront(Circle)
Circle.center = Triangle.center
}
else {
NewView.bringSubviewToFront(Circle)
}
}
@IBAction func tapRotateButton(_ sender: Any) {
if(isTogether){
UIView.animate(withDuration: 0.5, animations: {
self.NewView.transform = CGAffineTransform(rotationAngle: CGFloat.pi)
})
UIView.animate(withDuration: 1, delay: 0.45, animations: { self.NewView.transform = CGAffineTransform(rotationAngle: CGFloat.pi * 2)
})
}
else {
InfoLable.text = "Please add figures to each other."
}
}
@IBAction func tapAlphaButton(_ sender: Any) {
if(isTogether){
UIView.animate(withDuration: 1.5, animations: {
self.NewView.alpha = 1.0
self.NewView.alpha = 0
self.NewView.alpha = 1.0
})
}
else {
InfoLable.text = "Please add figures to each other."
}
}
@IBAction func tapMoveButton(_ sender: Any) {
if(isTogether){
UIView.animate(withDuration: 0.5, animations: {
self.NewView.transform = CGAffineTransform(translationX: 0, y: -200)
UIView.animate(withDuration: 1, delay: 0.45, animations: { self.NewView.transform = CGAffineTransform(translationX: 0, y: 0)
})
})
}
else {
InfoLable.text = "Please add figures to each other."
}
}
@IBAction func tapBothButton(_ sender: Any) {
if(isTogether){
tapAlphaButton((Any).self)
if ((Int.random(in: 0..<2)) == 0) {
tapMoveButton((Any).self)
}
else {
tapRotateButton((Any).self)
}
}
else {
InfoLable.text = "Please add figures to each other."
}
}
func removeTr(triangle: TriangleView) {
triangle.removeFromSuperview()
}
@IBAction func tap(sender: AnyObject) {
if(Triangle.center != NewView.center) {
let tr = TriangleView.init(frame: Triangle.frame)
tr.backgroundColor = UIColor.clear
tr.InsideColor = UIColor.blue
tr.OutsideColor = UIColor.green
NewView.addSubview(tr)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.removeTr(triangle: tr)
}
}
}
@IBAction func pinch(sender: AnyObject) {
if(Triangle.center != NewView.center) {
let tr = TriangleView.init(frame: Triangle.frame)
tr.backgroundColor = UIColor.clear
tr.InsideColor = UIColor.black
tr.OutsideColor = UIColor.red
NewView.addSubview(tr)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.removeTr(triangle: tr)
}
}
}
@IBAction func rotation(sender: AnyObject) {
if(Triangle.center != NewView.center) {
let tr = TriangleView.init(frame: Triangle.frame)
tr.backgroundColor = UIColor.clear
tr.InsideColor = UIColor.systemYellow
tr.OutsideColor = UIColor.orange
NewView.addSubview(tr)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.removeTr(triangle: tr)
}
}
}
@IBAction func longPress(sender: AnyObject) {
if(Triangle.center != NewView.center) {
let tr = TriangleView.init(frame: Triangle.frame)
tr.backgroundColor = UIColor.clear
tr.InsideColor = UIColor.black
tr.OutsideColor = UIColor.cyan
NewView.addSubview(tr)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.removeTr(triangle: tr)
}
}
}
@IBAction func swipe(sender: AnyObject) {
if(Triangle.center != NewView.center) {
let tr = TriangleView.init(frame: Triangle.frame)
tr.backgroundColor = UIColor.clear
tr.InsideColor = UIColor.magenta
tr.OutsideColor = UIColor.white
NewView.addSubview(tr)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.removeTr(triangle: tr)
}
}
}
}
<file_sep>//
// Pensil.swift
// Example
//
// Created by Stacy on 24.04.21.
//
import UIKit
enum Pencil {
case black
case grey
case red
case blue
case darkGreen
case darkOrange
init?(tag: Int) {
switch tag {
case 0:
self = .black
case 1:
self = .blue
case 2:
self = .darkGreen
case 3:
self = .darkOrange
case 4:
self = .grey
case 5:
self = .red
default:
return nil
}
}
var color: UIColor {
switch self {
case .black:
return .black
case .grey:
return UIColor(white: 105/255.0, alpha: 1.0)
case .red:
return UIColor(red: 1, green: 0, blue: 0, alpha: 1.0)
case .blue:
return UIColor(red: 0, green: 0, blue: 1, alpha: 1.0)
case .darkGreen:
return UIColor(red: 102/255.0, green: 204/255.0, blue: 0, alpha: 1.0)
case .darkOrange:
return UIColor(red: 1, green: 102/255.0, blue: 0, alpha: 1.0)
}
}
}
<file_sep>//
// GraphicsCircle.swift
// Graphics
//
// Created by <NAME> on 23.04.21.
//
import UIKit
@IBDesignable
class CircleView: UIView {
@IBInspectable var InsideColor: UIColor = .clear
@IBInspectable var OutsideColor: UIColor = .clear
override func draw(_ rect: CGRect) {
let endRadius = min(frame.height, frame.width) / 2
let colors = [InsideColor.cgColor, OutsideColor.cgColor] as CFArray
let shadowColors = [UIColor.black.cgColor, UIColor.clear.cgColor] as CFArray
let center = CGPoint(x: bounds.size.width / 2, y: bounds.size.height / 2)
let shadowCenter = CGPoint(x: bounds.size.width / 2, y: bounds.size.height / 2 + 50)
guard let gradient = CGGradient(colorsSpace: nil, colors: colors, locations: nil) else { return }
guard let shadow = CGGradient(colorsSpace: nil, colors: shadowColors, locations: nil) else { return }
UIGraphicsGetCurrentContext()!.drawRadialGradient(shadow, startCenter: shadowCenter, startRadius: 10, endCenter: shadowCenter, endRadius: endRadius, options: CGGradientDrawingOptions.drawsBeforeStartLocation)
UIGraphicsGetCurrentContext()!.drawRadialGradient(gradient, startCenter: center, startRadius: 10, endCenter: center, endRadius: endRadius, options: CGGradientDrawingOptions.drawsBeforeStartLocation)
self.backgroundColor = UIColor.clear
}
}
<file_sep>//
// GraphicsTriangle.swift
// Graphics
//
// Created by <NAME> on 24.04.21.
//
import UIKit
@IBDesignable
final class TriangleView: UIView {
@IBInspectable var InsideColor: UIColor = .clear
@IBInspectable var OutsideColor: UIColor = .clear
override func draw(_ rect: CGRect) {
let startPoint = CGPoint(x: bounds.size.width/2, y: 0)
let endPoint = CGPoint(x: bounds.size.width/2, y: bounds.size.height)
let radius = 30.0
let colors = [InsideColor, OutsideColor]
let triangle = UIBezierPath.init(cgPath: createRoundedTriangle(width: bounds.size.width, height: bounds.size.height - 50, radius: CGFloat(radius)))
let shadowColors = [UIColor.black.withAlphaComponent(0.6), UIColor.clear]
let shadow = UIBezierPath.init(cgPath: createRoundedTriangle(width: bounds.size.width, height: bounds.size.height - 50, radius: CGFloat(radius)))
shadow.apply(CGAffineTransform.init(translationX: 0, y: 50))
drawLinearGradient(inside: shadow, start: startPoint, end: endPoint, colors: shadowColors)
drawLinearGradient(inside: triangle, start: startPoint, end: endPoint, colors: colors)
self.backgroundColor = UIColor.clear
}
func drawLinearGradient(inside path:UIBezierPath, start:CGPoint, end:CGPoint, colors:[UIColor])
{
guard let ctx = UIGraphicsGetCurrentContext() else { return }
ctx.saveGState()
defer { ctx.restoreGState() }
path.addClip()
let cgColors = colors.map({ $0.cgColor })
guard let gradient = CGGradient(colorsSpace: nil, colors: cgColors as CFArray, locations: nil)
else { return }
ctx.drawLinearGradient(gradient, start: start, end: end, options: [])
}
private func setUpGradient() -> CAGradientLayer {
let gradient = CAGradientLayer()
gradient.colors = [UIColor.orange.cgColor, UIColor.yellow.cgColor]
return gradient
}
func createRoundedTriangle(width: CGFloat, height: CGFloat, radius: CGFloat) -> CGMutablePath {
let point1 = CGPoint(x: width / 2, y: 0)//-width/2
let point2 = CGPoint(x: width, y: height)
let point3 = CGPoint(x: 0, y: height)
let path = CGMutablePath()
path.move(to: CGPoint(x: 0, y: height))
path.addArc(tangent1End: point1, tangent2End: point2, radius: radius)
path.addArc(tangent1End: point2, tangent2End: point3, radius: radius)
path.addArc(tangent1End: point3, tangent2End: point1, radius: radius)
path.closeSubpath()
return path
}
}
<file_sep>//
// ViewController.swift
// Example
//
// Created by Stacy on 24.04.21.
//
import UIKit
import CoreGraphics
class ViewController: UIViewController {
@IBOutlet var mainImageView: UIImageView!
@IBOutlet weak var tempImageView: UIImageView!
var lastPoint = CGPoint.zero
var color = UIColor.black
var brushWidth: CGFloat = 15.0
var swiped = false
var tapGestureRecognizerNumberPadView : UITapGestureRecognizer?
@IBAction func resetPressed(_ sender: Any) {
mainImageView.image = nil
tempImageView.image = nil
}
@IBAction func saveTapped(_ sender: Any) {
if let image = tempImageView.image {
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
}
@IBAction func widthPressed(_ sender: UIButton) {
guard let width = BrushWidth(tag: sender.tag) else {
return
}
brushWidth = width.size
}
@IBAction func pencilPressed(_ sender: UIButton) {
guard let pencil = Pencil(tag: sender.tag) else {
return
}
color = pencil.color
}
func drawLine(from fromPoint: CGPoint, to toPoint: CGPoint) {
UIGraphicsBeginImageContext(view.frame.size)
guard let context = UIGraphicsGetCurrentContext() else {
return
}
tempImageView.image?.draw(in: view.bounds)
context.move(to: fromPoint)
context.addLine(to: toPoint)
context.setLineCap(.round)
context.setBlendMode(.normal)
context.setLineWidth(brushWidth)
context.setStrokeColor(color.cgColor)
context.strokePath()
tempImageView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else {
return
}
swiped = false
lastPoint = touch.location(in: view)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else {
return
}
swiped = true
let currentPoint = touch.location(in: view)
drawLine(from: lastPoint, to: currentPoint)
lastPoint = currentPoint
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if !swiped {
drawLine(from: lastPoint, to: lastPoint)
}
UIGraphicsBeginImageContext(mainImageView.frame.size)
mainImageView.image?.draw(in: view.bounds, blendMode: .normal, alpha: 1.0)
mainImageView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
}
| e8b968638bf059d298790484e6cbb9dfee9bb122 | [
"Swift"
] | 7 | Swift | grapestacy/TP-Lab8 | 0f9882a8b4afc27d245a8b33daa8aa917d61523a | 64dc8d145e0fb97eb8a1d7fa04d76ef5c016b3f3 | |
refs/heads/master | <repo_name>tjucwb/yolo_relavant<file_sep>/README.md
# yolo_relavant
inlcudes functions regarding yolov3, trying to update
yolo-kmeans is used for create k anchors for yolov3,
the annotation follows CityPersons: A Diverse Dataset for Pedestrian Detection
and to used this code, just modify some path for saving and the name in the annotation file(like aim label in this code to other name)
the idea is inspired by https://github.com/dleam/YOLO-v3-kmeans
<file_sep>/yolo_kmeans.py
import numpy as np
import scipy.io
class kmeans_yolo():
def __init__(self, n_anchors, label_txt_path, anno_mat=None):
"""
:param n_anchors: number of anchors to be clustered
:param label_txt_path: the summary of label path (all labels in one txt file)
(only w and h are saved here)
:param anno_mat: the original label path
mat format, the satndard form of anno_mat follows
can be none if already in standard form
"""
self.n_anchors = n_anchors
self.label_txt_path = label_txt_path
self.anno_mat = anno_mat
self.class_num = 2
def convert_mat_txt(self):
anno_mat = self.anno_mat
label_txt_path = self.label_txt_path
if not anno_mat:
return np.loadtxt(label_txt_path)
label_txt_path = self.label_txt_path
data = scipy.io.loadmat(anno_mat)
data = data['aim_label'][0]
f = open(label_txt_path,'w')
for record in data:
bbxs = record['bbx'][0][0]
for bb in bbxs:
f.write(' '.join([str(int(a)) for a in bb[3:]]) + '\n')
f.close()
_, txt_name, extention = label_txt_path.split(".")
class_path = '.'+txt_name + '_with_class.' + extention
f1 = open(class_path,'w')
for record in data:
bbxs = record['bbx'][0][0]
for bb in bbxs:
f1.write(' '.join([str(int(a)) for a in bb[[0,3,4]]]) + '\n')
f1.close()
return np.loadtxt(label_txt_path), np.loadtxt(class_path)
def get_iou(self, bbxs, clusters):
"""
this function aims to get the iou of each bbx with each cluster
:return: the iou between bbx and clusters
"""
n_anchor = self.n_anchors
n = len(bbxs)
bbxs_w = np.repeat(bbxs[:,0], n_anchor)
bbxs_w = np.reshape(bbxs_w,(n, n_anchor))
bbxs_h = np.repeat(bbxs[:,1], n_anchor)
bbxs_h = np.reshape(bbxs_h, (n, n_anchor))
#cluster_w = np.repeat(clusters[:,0], n)
#cluster_w = np.reshape(cluster_w,(n, n_anchor))
cluster_w = np.tile(clusters[:,0],(n,1))
#cluster_h = np.repeat(clusters[:, 1], n)
#cluster_h = np.reshape(cluster_h, (n, n_anchor))
cluster_h = np.tile(clusters[:,1],(n,1))
cluster_area = np.multiply(clusters[:, 0], clusters[:, 1])
bbx_area = np.multiply(bbxs[:, 0], bbxs[:, 1])
cluster_area = np.tile(cluster_area, (n, 1))
bbx_area = np.tile(bbx_area,(n_anchor, 1))
bbx_area = bbx_area.T
w_min = np.minimum(bbxs_w, cluster_w)
h_min = np.minimum(bbxs_h, cluster_h)
#area_averlap = w_min * h_min
area_averlap = np.multiply(w_min, h_min)
#area_averlap = np.reshape(area_averlap, (n, n_anchor))
area_over = cluster_area + bbx_area - area_averlap
iou = area_averlap/area_over
#print(iou)
return iou
def get_miou(self,bbxs,clusters):
iou = self.get_iou(bbxs,clusters)
#print(np.sum(iou>1))
iou = np.max(iou,axis=1)
return np.mean(iou)
def kmeans(self, bbxs):
"""
this function is used to update the height and width of the clusters
:return:
"""
# initiate the clusters
n_anchors = self.n_anchors
np.random.seed()
bb_num = len(bbxs)
bb_ind = np.random.choice(bb_num, n_anchors, replace=False)
cluster = bbxs[bb_ind, :]
last_max_pos = np.zeros(bb_num)
while True:
iou = self.get_iou(bbxs, cluster)
current_max_pos = np.argmax(iou, axis=1)
if (current_max_pos == last_max_pos).all():
break
else:
for i in range(len(cluster)):
if not bbxs[current_max_pos == i].size:
continue
print(bbxs[current_max_pos == i,:])
cluster[i,:] = (np.median(bbxs[current_max_pos == i,:],axis=0))
last_max_pos = current_max_pos
return cluster
def average_bbx(self,bbx_with_class):
cn = self.class_num
anchor = []
class_list = bbx_with_class[:,0]
for cl in range(cn):
bb1 = None
bb2 = None
cl_data = bbx_with_class[class_list == cl, :]
if (cl_data[:, 1] > cl_data[:, 2]).any():
bb1 = np.median(cl_data[cl_data[:,1] > cl_data[:,2], 1:],axis=0)
if (cl_data[:, 2] > cl_data[:, 1]).any():
bb2 = np.median(cl_data[cl_data[:,2] > cl_data[:,1], 1:],axis=0)
anchor.append(bb1)
anchor.append(bb2)
return anchor
def save_res(self, anchors,miou, path):
with open(path,'w') as f:
for anchor in anchors:
f.write(' '.join([str(a) for a in anchor])+'\n')
f.write('miou is ' + str(miou))
if __name__ == '__main__':
k = kmeans_yolo(6, './label.txt', 'train_anno')
no_class, with_class = k.convert_mat_txt()
anchors = k.kmeans(no_class)
anchors2 = k.average_bbx(with_class)
print(anchors2)
miou = k.get_miou(no_class, anchors)
k.save_res(anchors,miou,'./anchor.txt')
| c6a2b0c46e8957b7ac8902c6de0c3fbbec2870a0 | [
"Markdown",
"Python"
] | 2 | Markdown | tjucwb/yolo_relavant | 107dd961c24217b6cd886263aca522ac8e3a715a | b3f7a8922f422f8e4304feb196947197e8f8baf2 | |
refs/heads/master | <repo_name>thieny1991/MergeSort<file_sep>/main.cpp
#include<iostream>
void mergeSort(int num[],int left,int right);
void merge(int num[],int left, int mid,int right);
using namespace std;
int main() {
int num[] = { 8,4,5,0,1, 9, 3, 9, 6, 2, 7 };
mergeSort(num, 0, 10);
for (int i = 0;i < 11;i++)
cout << num[i] << endl;
system("pause");
return 0;
}
void mergeSort(int num[],int left, int right)
{
if (left < right) {
//find pivot
int mid = (left + right) / 2;
mergeSort(num, left, mid);
mergeSort(num, mid + 1, right);
merge(num, left, mid, right);
}
}
void merge(int num[], int left, int mid, int right)
{
int i = left;
int j = mid + 1;
int k = 0;
int n = right - left + 1;
int *temp = new int[n];
while (i <= mid &&j <= right) {
if (num[i] < num[j]) {
temp[k] = num[i];
k++;
i++;
}
else {
temp[k] = num[j];
j++;
k++;
}
}
while (i <= mid) {
temp[k] = num[i];
k++;
i++;
}
while (j <= right) {
temp[k] = num[j];
k++;
j++;
}
i = left;
for (k = 0;k < n;k++) {
num[i] = temp[k];
i++;
}
delete[] temp;
temp = nullptr;
}
| 7be316fd486ca7cda78cc0f4bf3809d9983885f4 | [
"C++"
] | 1 | C++ | thieny1991/MergeSort | 58e149c9bccc9f8445ca13e4fca65102902a79fd | 265116f04fc07558495a507798f89e0ce3b2fa4c | |
refs/heads/master | <repo_name>TeamWater/TestLogin<file_sep>/app/controllers/index.js
var fb = Alloy.Globals.fbModule;
fb.appid = 1580469318879903;
fb.permissions = ['publish_stream'];
fb.addEventListener('login', function(event) {
if (event.success) {
alert('Logged In');
} else if (event.error) {
alert(event.error);
} else if (event.cancelled) {
alert("Canceled");
}
});
fb.addEventListener('logout', function(event) {
alert('Logged out');
});
$.fbBtn.style = fb.BUTTON_STYLE_WIDE;
$.index.open();
| 6a34f36c57b02609b120b97590add545b7c6e8e3 | [
"JavaScript"
] | 1 | JavaScript | TeamWater/TestLogin | e79a5877b6a5f6f23b6bf1cc7920523b979cbd87 | 28d872536d6ac9bccbcfdd15a1d950bf16cd163d | |
refs/heads/master | <file_sep><?php Response::setMetaDescription('Download/install the latest version of LBRY for Linux.') ?>
<?php ob_start() ?>
<h1>Install LBRY on Linux <span class="icon-linux"></span></h1>
<?php echo View::render('get/alphaNotice') ?>
<div class="meta spacer1 text-center">Choose an install option.</div>
<div class="row-fluid">
<div class="span6">
<h3>For the Efficient and Lazy</h3>
<ol>
<li><code>(mkdir lbry; cd lbry; wget https://raw.githubusercontent.com/lbryio/lbry-setup/master/lbry_setup.sh; chmod 755 lbry_setup.sh; ./lbry_setup.sh; bin/lbry/lbrycrdd -server -daemon)</code></li>
</ol>
</div>
<div class="span6">
<h3>For the Shrewd and Frivolous</h3>
<ol>
<li>Clone and follow the build steps for <a href="https://github.com/lbryio/lbrycrd" class="link-primary">lbrycrd</a>, a miner for LBRY credits.</li>
<li>Clone and follow the build steps for <a href="https://github.com/lbryio/lbry" class="link-primary">lbry</a>, a console based application for using the LBRY protocol.</li>
</ol>
</div>
</div>
<?php $html = ob_get_clean() ?>
<?php echo View::render('get/get-shared', ['installHtml' => $html]) ?>
<file_sep><?php NavActions::setNavUri('/learn') ?>
<?php Response::setMetaImage('http://lbry.io/img/cover-team.jpg') ?>
<?php Response::setMetaDescription('LBRY is founded by a team passionate about connecting producers and consumers and breaking down broken models. Learn more about them.') ?>
<?php echo View::render('nav/header', ['isDark' => false]) ?>
<main>
<div class="content">
<h1>About Us</h1>
</div>
<div class="hero hero-quote hero-img spacer2" style="background-image: url(/img/cover-team.jpg)">
<div class="hero-content-wrapper">
<div class="hero-content">
<blockquote class="blockquote-large">
<p>LBRY is so simple your Grandma can use it. I’m ready to see blockchain technology become useful for regular people.</p>
</blockquote>
<cite><NAME> <em>Technology Evangelist</em></cite>
</div>
</div>
</div>
<div class="content photo-grid spacer2">
<h2>The Team</h2>
<div class="row-fluid">
<div class="span6 spacer2">
<div class="photo-container">
<img src="/img/jeremy-644x450.jpg" alt="photo of <NAME>"/>-
</div>
<h4><NAME> <a href="mailto:<EMAIL>" class="link-primary"><span class="icon icon-envelope"></span></a></h4>
<div class="meta spacer1">Founder, Director</div>
<p>
Jeremy is the founder and CEO of <a href="//usetopscore.com" class="link-primary">TopScore</a>, a startup that
processes millions of dollars monthly in event and activity registrations.
Jeremy attended <a href="//rpi.edu" class="link-primary">Rensselaer Polytechnic Institute</a>, where he received degrees in physics and computer science.
</p>
<p>
Jeremy knows how to build and scale a startup starting from day one. He knows how to deliver usable products and get those products in front of the right people.
</p>
<p>
Jeremy is responsible for the packing, presentation, and strategy of LBRY, as well as some design aspects. He is a longtime BitTorrent community enthusiast.
</p>
</div>
<div class="span6 spacer2">
<div class="photo-container">
<img src="/img/jimmy-644x450.jpg" alt="photo of <NAME>"/>
</div>
<h4>
<NAME>
<a href="mailto:<EMAIL>" class="link-primary"><span class="icon icon-envelope"></span></a>
</h4>
<div class="meta spacer1">Founder, Developer</div>
<p>
Because graduating from RPI with degrees in physics and computer science is the hip thing to do, Jimmy did the same.
After, he found himself mired in government bureaucracy, spending too much time to get too little done.
</p>
<p>
Ready to work on a project he believed in, Jimmy quit his national security job to start LBRY.
Jimmy created the LBRY protocol and the first LBRY application.
</p>
<p>
Jimmy is a Bitcoin fanatic and has been since its early days. He has long been interested in the benefits of decentralization.
</p>
</div>
</div>
<div class="row-fluid">
<div class="span6 spacer2">
<div class="photo-container">
<img src="/img/mike-644x450.jpg" alt="photo of Mike Vine"/>
</div>
<h4><NAME> <a href="mailto:<EMAIL>" class="link-primary"><span class="icon icon-envelope"></span></a></h4>
<div class="meta spacer1">Technology Evangelist</div>
<p>
With a humble BA in Philosophy from <a href="http://tulane.edu/" class="link-primary">Tulane University</a>, Mike has
built a successful financial services marketing company, <a href="http://www.centinel.net/" class="link-primary">Centinel Consulting</a>.
Centinel has helped clients grow from close to nothing to hundreds of thousands of visitors. He manages
email marketing lists and social media accounts of the same size.
</p>
<p>
Mike has been involved with the Bitcoin community since the early days. His friends have launched companies like
<a class="link-primary" href="//lamassu.is">Lamassu BTM</a>,
<a class="link-primary" href="//coinapult.com">Coinapult</a>,
<a class="link-primary" href="//shapeshift.io">Shapeshift</a>.
Now, he wants a turn to help change the world by harnessing blockchain technology.
Mike heads up LBRY’s marketing efforts and serves as an ambassador for our platform to media, investors, and the public.
</p>
</div>
<div class="span6 spacer2">
<div class="photo-container">
<img src="/img/jack-robison-644x450.jpg" alt="photo of <NAME>"/>
</div>
<h4><NAME> <a href="mailto:<EMAIL>" class="link-primary"><span class="icon icon-envelope"></span></a></h4>
<div class="meta spacer1">Core Developer</div>
<p>
Jack's path to developer with LBRY is fairly typical:
<a href="http://www.masslive.com/localbuzz/index.ssf/2009/06/actionreaction_how_one_teens_c.html" class="link-primary">
face sixty years in prison for innocent chemistry experiments</a>; lose interest in chemistry;
<a href="https://www.youtube.com/watch?v=dXZi4UZjiiI&t=10" class="link-primary">program insane electric guitars for Kiss</a>;
decide to revolutionize the internet.
</p>
<p>
Jack was one of the first people to discover LBRY and took to it so fast he may understand more
about it than anyone.
<p>
Jack has Asperger's Syndrome and is actively involved in the autism community. He was a regular on Wrong Planet's
<a href="https://www.youtube.com/user/theWrongPlanet" class="link-primary">Autism Talk TV</a>, has appeared on
<em>National Public Radio</em>, the <em>New York Times</em>, and presents around the country.
</p>
</div>
</div>
<h2>Advisory Team</h2>
<div class="row-fluid">
<?php /*
<div class="span6 spacer2">
<div class="photo-container">
<img src="/img/alex-tabarrok-644x450.jpg" alt="Photo of <NAME>"/>
</div>
<h4><NAME></h4>
<div class="meta spacer1">Advisor, Economics</div>
<p><NAME> is <NAME> Chair in Economics at the <a href="http://mercatus.org/" class="link-primary">Mercatus Center</a>
and a professor of economics at <a href="//gmu.edu" class="link-primary">George Mason University</a>. He specializes in intellectual property reform, the effectiveness of markets, and the justice system.
</p>
<p>Tabarrok is the coauthor, with Mercatus colleague <NAME>, of the popular economics blog <a class="link-primary" href="http://www.marginalrevolution.com/"><em>Marginal Revolution</em></a>
and cofounder of the online educational platform <a class="link-primary" href="http://mruniversity.com/">Marginal Revolution University</a>.
He is the coauthor of
<em><a href="http://www.amazon.com/Modern-Principles-Economics-Tyler-Cowen/dp/1429239972" class="link-primary">Modern Principles of Economics</a></em>,
and author of the recent book
<em><a href="http://www.amazon.com/Launching-The-Innovation-Renaissance-Market-ebook/dp/B006C1HX24" class="link-primary">Launching the Innovation Renaissance</em></a>.
His articles have appeared in the<em> New York Times</em>, the<em> Washington Post</em>, the<em> Wall Street Journal</em>, and many
other prestigious publications.
</p>
<p>Tabarrok received his PhD in economics from
<a class="link-primary" href="http://en.wikipedia.org/wiki/George_Mason_University" title="George Mason University">George Mason University</a>.
</p>
</div>*/ ?>
<div class="span6 spacer2">
<div class="photo-container">
<img src="/img/stephan-644x450.jpg" alt="Photo of <NAME>"/>
</div>
<h4><NAME></h4>
<div class="meta spacer1">Advisor, Legal</div>
<p>
Stephan is a registered patent attorney and has over twenty years’ experience in patent, intellectual property,
and general commercial and corporate law. He is the founder and director of the <a href="http://c4sif.org/" class="link-primary">Center for the Study of Innovative Freedom</a>.
Stephan has published numerous articles and books on intellectual property law and legal topics including
<a href="http://www.amazon.com/International-Investment-Political-Dispute-Resolution/dp/0379215225" class="link-primary">
<em>International Investment, Political Risk, and Dispute Resolution: A Practitioner’s Guide</em>
</a>
and
<a href="https://mises.org/library/against-intellectual-property-0" class="link-primary">
<em>Against Intellectual Property</em>
</a>.
</p>
<p>
He received an LL.M. in international business law from <a href="http://www.kcl.ac.uk/" class="link-primary">King’s College London</a>, a JD from the Paul M. Hebert Law Center at
<a href="//lsu.edu" class="link-primary">Lousiana State University</a>,
as well as BSEE and MSEE degrees. His websites are <a href="stephankinsella.com" class="link-primary">stephankinsella.com</a>
and <a href="kinsellalaw.com" class="link-primary">kinsellalaw.com</a>
</p>
</div>
</div>
<h2>Newest Member</h2>
<div class="row-fluid">
<div class="span3"></div>
<div class="span6">
<img src="/img/spooner-644x450.jpg" alt="photo of you!"/>
<h4>You</h4>
<div class="meta spacer1">Developer, Designer, Economist, Marketer, Investor, ???</div>
<p>
Do you think opening up information would facilitate human flourishing?
Do you want to join a bright core of people with an obsession for upending broken systems?
<a href="mailto:<EMAIL>" class="link-primary">Say hello.</a>
</p>
</div>
</div>
</div>
<?php echo View::render('nav/learnFooter') ?>
</main>
<?php echo View::render('nav/footer') ?>
<file_sep><?php if ($success): ?>
<?php js_start() ?>
ga('send', 'event', 'Sign Up', 'Join List', '<?php echo $listId ?>');
<?php js_end() ?>
<?php endif ?>
<form action="/list-subscribe" method="post" novalidate>
<?php if ($error): ?>
<div class="notice notice-error spacer1"><?php echo $error ?></div>
<?php elseif ($success): ?>
<?php echo View::render('analytics/subTwitter') ?>
<div class="notice notice-success spacer1"><?php echo $success ?></div>
<?php endif ?>
<div class="mail-submit">
<input type="hidden" name="returnUrl" value="<?php echo $returnUrl ?>"/>
<input type="hidden" name="listId" value="<?php echo $listId ?>"/>
<input type="email" value="" name="email" class="required email standard" placeholder="<EMAIL>">
<input type="submit" value="<?php echo isset($submitLabel) ? $submitLabel : 'Subscribe' ?>" name="subscribe" id="mc-embedded-subscribe" class="<?php echo $btnClass ?>">
<?php if (isset($mergeFields)): ?>
<input type="hidden" name="mergeFields" value="<?php echo htmlentities(serialize($mergeFields)) ?>" />
<?php endif ?>
</div>
</form><file_sep><?php Response::setMetaTitle(__('Join LBRY Email List')) ?>
<?php Response::setMetaDescription(__('Join our email list and receive updates about LBRY via email.')) ?>
<?php echo View::render('nav/header', ['isDark' => false ]) ?>
<main>
<div class="content">
<div class="row-fluid">
<div class="span9">
<h1><?php echo __('Join Email List') ?></h1>
<p>
<?php echo __('Join our email list and receive updates about LBRY via email.') ?>
</p>
<?php echo View::render('mail/joinList', [
'submitLabel' => 'Subscribe',
'returnUrl' => '/join-list',
'listId' => Mailchimp::LIST_GENERAL_ID
]) ?>
<div class="meta">
<?php echo __('You will receive 1-2 messages a month, only from LBRY, Inc. and only about LBRY.') ?>
<?php echo __('You can easily unsubscribe at any time.') ?>
</div>
</div>
<div class="span3">
<h3><?php echo __('Also On') ?></h3>
<div class="spacer1">
<a href="//twitter.com/lbryio" class="link-primary"><span class="icon icon-twitter"></span><span class="btn-label">Twitter</span></a>
</div>
<div class="spacer1">
<a href="//www.facebook.com/lbryio" class="link-primary"><span class="icon icon-facebook"></span> <span class="btn-label">Facebook</span></a>
</div>
<div class="spacer1">
<a href="//reddit.com/r/lbry" class="link-primary"><span class="icon icon-reddit"></span><span class="btn-label">Reddit</span></a>
</div>
</div>
</div>
</div>
</main>
<?php echo View::render('nav/footer') ?><file_sep><?php
include $_SERVER['ROOT_DIR'] . '/autoload.php';
i18n::register();
Session::init();
Controller::dispatch(strtok($_SERVER['REQUEST_URI'], '?'));<file_sep><?php Response::setMetaDescription('Download or install the latest version of LBRY.') ?>
<?php echo View::render('nav/header', ['isDark' => false]) ?>
<main>
<div class="content spacer1">
<h1>LBRY for CLI</h1>
<div class="notice notice-info">
<strong>This is a pre-release, alpha version of LBRY.</strong> It is only designed to show what LBRY makes possible.
Expect future releases to involve a full network reboot of both credits and metadata.
</div>
</div>
<div class="content">
<h2>Install</h2>
<div class="row-fluid">
<div class="span6">
<h3><span class="icon-linux"></span> Linux</h3>
<div>
<h4>The Brave and Lazy</h4>
<ol>
<li>Make a folder called <code>lbry</code> where you want everything to reside.</li>
<li>Download and run <a href="https://raw.githubusercontent.com/lbryio/lbry-setup/master/lbry_setup.sh" class="link-primary">this shell script</a> from that folder.</li>
</ol>
<h4>The Shrewd and Frivolous</h4>
<ol>
<li>Clone and follow the build steps for <a href="https://github.com/lbryio/lbrycrd" class="link-primary">lbrycrd</a>, a miner for LBRY credits.</li>
<li>Clone and follow the build steps for <a href="https://github.com/lbryio/lbry" class="link-primary">lbry</a>, a console based application for using the LBRY protocol.</li>
</ol>
</div>
</div>
<div class="span6">
<h3><span class="icon-apple"></span> OS X</h3>
<div>
<h4>OS X Programmers</h4>
<p>You can attempt to follow the Linux build instructions.</p>
<h4>Everyone Else</h4>
<p>Sorry, we do not have an OS X version of LBRY other than source. We promise one will exist sooner rather than later.</p>
</div>
</div>
</div>
<h2>Test</h2>
<p>To ensure LBRY is installed co:</p>
<div class="text-center spacer1">
<code>get wonderfullife</code>
</div>
<p class="meta">In the graphical version, this can accessed by typing "wonderfullife" into the address bar and pressing "Go". In the console version, select "[7] Add a stream from a short name".</p>
<div class="spacer2">
<h2>Feedback</h2>
<p>We've prepared a short form for feedback regarding your LBRY experience, available below.</p>
<p>We're providing 10,000 LBC (~$100) to the first 500 people who download LBRY and submit their feedback.</p>
<p><a href="https://docs.google.com/forms/d/1zqa5jBYQMmrZO1utoF2Ok9ka-gXzXLDZKXNNoprufC8/viewform" class="btn-primary">Provide Your Feedback</a></p>
</div>
</div>
</main>
<?php echo View::render('nav/footer') ?>
<?php /*
*
* <div class="span4">
<h3><span class="icon-windows"></span> Windows</h3>
<p class="meta">
If you have a standard Windows install, it will insinuate several times that you are an idiot for following the steps below.
And perhaps you are, but not because this code is dangerous or will harm your computer in any way. Future releases will involve more reputable install steps.
</p>
<ol>
<li>Download <a href="https://github.com/lbryio/lbry/releases/download/alpha/lbry-windows.zip" class="link-primary">this ZIP</a> file.</li>
<li>There is no installer. Extract the ZIP to wherever you want the program to reside, such as <code>Program Files</code>.</li>
<li>Run lbry.exe.</li>
</ol>
</div>
*/<file_sep><!DOCTYPE html>
<html>
<head prefix="og: http://ogp.me/ns#">
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width">
<?php $title = Response::getMetaTitle() ?: Response::guessMetaTitle($content) ?>
<?php $title = $title ?
$title . (strpos($title, 'LBRY') === false ? ' - LBRY' : '') :
'LBRY' ?>
<title><?php echo $title ?></title>
<link href='//fonts.googleapis.com/css?family=Raleway:600,300' rel='stylesheet' type='text/css'>
<link href="/css/all.css" rel="stylesheet" type="text/css" media="screen,print" />
<link rel="apple-touch-icon" sizes="60x60" href="/img/fav/apple-touch-icon-60x60.png">
<link rel="apple-touch-icon" sizes="114x114" href="/img/fav/apple-touch-icon-114x114.png">
<link rel="apple-touch-icon" sizes="120x120" href="/img/fav/apple-touch-icon-120x120.png">
<link rel="apple-touch-icon" sizes="144x144" href="/img/fav/apple-touch-icon-144x144.png">
<link rel="apple-touch-icon" sizes="180x180" href="/img/fav/apple-touch-icon-180x180.png">
<link rel="icon" type="image/png" href="/img/fav/favicon-32x32.png" sizes="32x32">
<link rel="icon" type="image/png" href="/img/fav/favicon-194x194.png" sizes="194x194">
<link rel="icon" type="image/png" href="/img/fav/favicon-96x96.png" sizes="96x96">
<link rel="icon" type="image/png" href="/img/fav/android-chrome-192x192.png" sizes="192x192">
<link rel="icon" type="image/png" href="/img/fav/favicon-16x16.png" sizes="16x16">
<link rel="manifest" href="/img/fav/manifest.json">
<meta name="description" content="<?php echo Response::getMetaDescription() ?>">
<meta name="msapplication-TileColor" content="#155B4A">
<meta name="msapplication-TileImage" content="/mstile-144x144.png">
<meta name="theme-color" content="#155B4A">
<!-- Twitter Card data -->
<meta name="twitter:site" content="@lbryio">
<meta name="twitter:creator" content="@lbryio">
<!-- Open Graph data -->
<meta property="og:title" content="<?php echo $title ?>" />
<meta property="og:type" content="article" />
<meta property="og:image" content="<?php echo Response::getMetaImage() ?>" />
<meta property="og:description" content="<?php echo Response::getMetaDescription() ?>"/>
<meta property="og:site_name" content="LBRY" />
<base target="_parent" />
</head>
<body>
<?php echo $content ?>
<div class="hide">
<div id="fb-root"></div>
<div id="js">
<?php foreach(Response::getJsAssets() as $src): ?>
<script src="<?php echo $src ?>"></script>
<?php endforeach ?>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-60403362-1', 'auto');
ga('send', 'pageview');
(function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) return;
js = d.createElement(s); js.id = id;
js.src = "//connect.facebook.net/en_US/sdk.js#xfbml=1&version=v2.5&appId=1477813539180850";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
window.twttr = (function(d,s,id) {
var js, fjs = d.getElementsByTagName(s)[0],
t = window.twttr || {};
if (d.getElementById(id)) return;
js = d.createElement(s);
js.id = id;
js.src = "//platform.twitter.com/widgets.js";
fjs.parentNode.insertBefore(js, fjs);
t._e = [];
t.ready = function(f) {
t._e.push(f);
};
return t;
}(document, "script", "twitter-wjs"));
</script>
<script>
<?php echo implode("\n", Response::getJsCalls()) ?>
</script>
</div>
</div>
</body>
</html><file_sep><?php Response::addJsAsset('//platform.twitter.com/oct.js') ?>
<?php js_start() ?>
twttr.conversion.trackPid('nty1x', { tw_sale_amount: 0, tw_order_quantity: 0 });
<?php js_end() ?>
<noscript>
<img height="1" width="1" style="display:none;" alt="" src="https://analytics.twitter.com/i/adsct?txn_id=nty1x&p_id=Twitter&tw_sale_amount=0&tw_order_quantity=0" />
<img height="1" width="1" style="display:none;" alt="" src="//t.co/i/adsct?txn_id=nty1x&p_id=Twitter&tw_sale_amount=0&tw_order_quantity=0" />
</noscript> | 23f65ffb8267c523885883986a279bd9d773e374 | [
"PHP"
] | 8 | PHP | teran-mckinney/lbry.io | bc339e7d13a16b2f56ed65eeec26573d88da91a9 | ae42b14c47bd37560d8c3eb89d45825ac6269cd6 | |
refs/heads/master | <repo_name>edlingao/members-only<file_sep>/app/helpers/sessions_helper.rb
module SessionsHelper
def log_in(user)
session[:user_id] = user.id # automatically encrypted
end
def remember(user)
user.remember # user model method
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
# used to retrieve user on subsequent pages
# because sessions_helper.rb is included in the main app controller,
# it can be called in many different places like:
# <%= current_user.name %> in a view
# redirect_to current_user in a controller
# refactored at 9.9
def current_user
if session[:user_id] #sessions are destroyed after browser closes
@current_user ||= User.find_by(id: session[:user_id])
elsif cookies.signed[:user_id]
user = User.find_by(id: cookies.signed[:user_id])
if user && user.authenticated?(cookies[:remember_token])
log_in user
@current_user = user
end
end
end
def logged_in?
!current_user.nil?
end
def forget(user)
user.forget #model method
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
def log_out
forget(current_user)
session.delete(:user_id)
@current_user = nil
end
end
<file_sep>/app/models/post.rb
class Post < ApplicationRecord
validates :title, presence: true, length: {minimum: 3, maximum: 35}
validates :body, presence: true, length: {minimum: 3, maximum: 500}
validates :user_id, presence: true
belongs_to :user
end
<file_sep>/app/controllers/posts_controller.rb
class PostsController < ApplicationController
before_action :logged_in_user, only: [:new]
def new
@post = Post.new
end
def create
@post = Post.new(post_params)
@post.user_id = current_user.id
if @post.save
flash[:success] = "Post Created!"
redirect_to @post
else
flash.now[:warning] = "There were errors with your post."
render new_post_path
end
end
def index
@posts = Post.all
end
def show
@post = Post.find_by(id: params[:id])
if @post != nil
@user = User.find_by(id: @post.user_id)
end
end
private
def post_params
params.require(:post).permit(:title, :body)
end
end
| 551219e2f907d1621fa6d7eb05ff5da6222b15fe | [
"Ruby"
] | 3 | Ruby | edlingao/members-only | 257dbcf70aa1a28acb2122445c4783f10dbae9cc | cb5650b4f798ad2c959ef3c56b0c078e8bcc49e1 | |
refs/heads/master | <repo_name>jeesun/thymelte<file_sep>/README.md
# thymelte
## 项目简介
这是一个基于Spring Boot+AdminLTE+Thymeleaf的管理端项目,目的是简便管理端的开发。
## 功能列表
- 登录页面:
1. 4种登录方式,用户名+密码,手机号+密码,邮箱+密码,app扫码;
2. 验证码;
- 管理页面:使用Thymeleaf将AdminLTE页面拆分成多个组件,降低耦合;
- 日志保存到文件并按日归档;
- Druid监控功能。访问[http://localhost:8090/druid/index.html](http://localhost:8090/druid/index.html),用户名druid,密码<PASSWORD>。
## 截图



## 使用流程
1. 建表
请参照`oauthserver`模块的说明文档,执行建表语句。
`oauthserver`项目地址:
- [gitee](https://gitee.com/jeesun/oauthserver)
- [github](https://github.com/jeesun/oauthserver)<file_sep>/src/main/java/com/jeesun/thymelte/custom/CustomTokenAuthProvider.java
package com.jeesun.thymelte.custom;
import com.jeesun.thymelte.domain.QrCode;
import com.jeesun.thymelte.domain.UserEntity;
import com.jeesun.thymelte.repository.QrCodeRepository;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.stereotype.Component;
import java.util.Collection;
@Component
public class CustomTokenAuthProvider implements AuthenticationProvider {
private static final Logger logger = Logger.getLogger(CustomTokenAuthProvider.class);
@Autowired
private CustomUserDetailsService userDetailsService;
@Autowired
private QrCodeRepository qrCodeRepository;
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
logger.error("authenticate");
String username = authentication.getName();
String password = (String) authentication.getCredentials();
UserEntity userEntity = (UserEntity) userDetailsService.loadUserByUsername(username);
if (null == userEntity){
throw new BadCredentialsException("用户名错误");
}
String sid = "";
QrCode qrCode = qrCodeRepository.findBySid(password);
if(password.length() >= 32){
//说明是uuid,此时是扫码登录
sid = qrCode.getSid();
if (username.equals(qrCode.getUsername())){
logger.info("用户名和绑定的一样");
}else{
throw new BadCredentialsException("uuid错误");
}
}
Collection<? extends GrantedAuthority> authorities = userEntity.getAuthorities();
return new UsernameTokenAuthenticationToken(authorities, username, qrCode.getToken(), sid);
}
@Override
public boolean supports(Class<?> aClass) {
return UsernameTokenAuthenticationToken.class.equals(aClass);
}
}
<file_sep>/src/main/resources/static/js/login-qrcode.js
//扫码登录
var qrCode = new QRCode(document.getElementById("qrcode"), {
width : 200,
height : 200
});
var loopVar;
function initQrCode(){
console.log("initQrCode");
setTokenInHeader();
var qrCodeData = $.cookie('qrCodeData');
//console.log(qrCodeData);
if(!qrCodeData || "null" == qrCodeData){
refreshQrCode();
}else{
qrCode.makeCode(qrCodeData);
loop();
}
}
function refreshQrCode() {
$.getJSON("users/uuid", function (data) {
//存cookie
var expiresDate= new Date();
expiresDate.setTime(expiresDate.getTime() + (2 * 60 * 1000));//2分钟过期
$.cookie('qrCodeData', JSON.stringify(data),{
expires: expiresDate
});
qrCode.makeCode(JSON.stringify(data));
loop();
});
}
//循环检查是否被扫码
function loop() {
clearInterval(loopVar);//先清除上次的定时任务(防止用户反复在账号登录和二维码登录的图片间切换造成的多次循环检查问题。)
loopVar = setInterval(function () {
//获取qrCodeData
var qrCodeData = $.cookie('qrCodeData');
//检查qrCodeData是否失效
if(!qrCodeData){
console.log("二维码已失效");
clearInterval(loop);
$("#refreshQrCode").css("visibility","visible");
}else{
var cookieQrCode = JSON.parse(qrCodeData);
var sid = cookieQrCode.sid;
$.getJSON("users/loopCheck/" + sid, function(data){
//console.log(data);
if(data['code'] == 200){
clearInterval(loop);
$.cookie('qrCodeData', null);//赋值"null"字符串
$("#refreshQrCode").css("display", "none");
$("#jumpHint").css("display", "block");
setTimeout(function(){
window.location.href = "/index";
},2000);
}else if(data['code'] == 500){
clearInterval(loopVar);
//console.log(data['message']);
}
});
}
}, 1000);
}
$("#refreshQrCode > a").click(function () {
$("#refreshQrCode").css("visibility","hidden");
initQrCode();
});<file_sep>/src/main/java/com/jeesun/thymelte/config/AuthSuccessHandler.java
package com.jeesun.thymelte.config;
import com.alibaba.fastjson.JSON;
import com.jeesun.thymelte.domain.LogLogin;
import com.jeesun.thymelte.domain.UserEntity;
import com.jeesun.thymelte.repository.LogLoginRepository;
import com.jeesun.thymelte.util.DateUtil;
import com.jeesun.thymelte.util.IpUtil;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.Authentication;
import org.springframework.security.web.authentication.SavedRequestAwareAuthenticationSuccessHandler;
import org.springframework.stereotype.Component;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.util.Date;
@Component
public class AuthSuccessHandler extends SavedRequestAwareAuthenticationSuccessHandler {
private static Logger logger = Logger.getLogger(AuthSuccessHandler.class);
@Autowired
private LogLoginRepository logLoginRepository;
@Override
public void onAuthenticationSuccess(HttpServletRequest request, HttpServletResponse response, Authentication authentication) throws IOException, ServletException {
UserEntity userDetails = (UserEntity) authentication.getPrincipal();
LogLogin logLogin = new LogLogin();
logLogin.setUsername(userDetails.getUsername());
logLogin.setCreateTime(new Date());
logLogin.setIp(IpUtil.getIpAddr(request));
logLogin = logLoginRepository.save(logLogin);
logger.info(JSON.toJSONString(logLogin));
HttpSession httpSession = request.getSession();
logger.info("session create time = " + DateUtil.format(httpSession.getCreationTime(), "yyyy-MM-dd HH:mm:ss"));
super.onAuthenticationSuccess(request, response, authentication);
}
}
<file_sep>/src/main/resources/static/js/file_upload.js
/**
*
* User: simon
* Date: 2018/06/06
* Time: 14:45
**/
$(function () {
'use strict';
//必须加这段代码,不然无法上传
var token = $("meta[name='_csrf']").attr("content");
var header = $("meta[name='_csrf_header']").attr("content");
$(document).ajaxSend(function (e, xhr, options) {
xhr.setRequestHeader(header, token);
});
var uploader = $('#file_upload');
uploader.fileupload({
url: "uploadFiles/upload",
dataType: 'json',
type: "post",
multipart: true,
acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i,
maxFileSize: 1000 * 1024 * 1024,
maxNumberOfFiles: 50,
change: function (e, data){
$('.preview').empty();
$('.progress-bar').css('width', '0%');
$('.proportion').html('');
$('input[name="imageUrl"]').remove();
},
done: function (e, data) {
var result = data.result;
//done方法就是上传完毕的回调函数,其他回调函数可以自行查看api
//注意data要和jquery的ajax的data参数区分,这个对象包含了整个请求信息
//返回的数据在data.result中,这里dataType中设置的返回的数据类型为json
if(200 == result.code) {
// 上传成功:
$(".preview").append('<div style="margin-top:10px;"><img src="uploadFiles/file/'+result.data[0] + '"></div>');
$(".preview").append("<div><button class='delete_file' type='button'>删除</button></div>");
//设置filePath
$('#file_paths').append('<input type="hidden" name="imageUrl" value="uploadFiles/file/' + result.data[0] + '">');
} else {
// 上传失败:
$(".upstatus").append("<div style='color:red;'>"+result.msg+"</div>");
}
},messages: {
maxFileSize: '文件大小超过限制',
acceptFileTypes: '文件格式不支持'
},progressall: function (e, data) {
var progress = parseInt(data.loaded / data.total * 100, 10);
$(".progress-bar").css("width", progress + "%");
$(".proportion").html("上传总进度:"+progress+"%");
},processfail: function (e, data) {
var currentFile = data.files[data.index];
if (data.files.error && currentFile.error) {
alert(currentFile.error);
}
}
});
$(document).on('click', '.delete_file', function () {
console.log("clicked");
$('.preview').empty();
$('.progress-bar').css('width', '0%');
$('.proportion').html('');
$('input[name="imageUrl"]').remove();
});
});<file_sep>/src/main/resources/static/js/table_hint.js
/**
*
* User: simon
* Date: 2018/06/06
* Time: 19:48
**/
window.hideHint = function () {
$('#hint').css('visibility', 'hidden');
};
window.showHint = function (message) {
$('#hint').text(message);
$('#hint').css('visibility', 'visible');
};<file_sep>/src/main/resources/static/plug-in/buttonLoader/jquery/plugin/jquery.buttonLoader.js
/*A jQuery plugin which add loading indicators into buttons
* By <NAME>
* MIT Licensed.
*/
(function ($) {
$('.has-spinner').attr("disabled", false);
$.fn.buttonLoader = function (action, message) {
var self = $(this);
if (action == 'loading') {
if ($(self).attr("disabled") == "disabled") {
return false;
}
$('.has-spinner').attr("disabled", true);
$(self).attr('data-btn-text', $(self).text());
var text = message;
console.log($(self).attr('data-load-text'));
if($(self).attr('data-load-text') != undefined && $(self).attr('data-load-text') != ""){
text = $(self).attr('data-load-text');
}
$(self).html('<span class="spinner"><i class="fa fa-spinner fa-spin" title="button-loader"></i></span> '+text);
$(self).addClass('active');
}
if (action == 'success') {
$(self).html('<i class="fa fa-check" aria-hidden="true"></i> ' + message);
$(self).removeClass('active');
$('.has-spinner').attr("disabled", false);
$('.has-spinner').removeClass('btn-default btn-primary');
$('.has-spinner').addClass('btn-success');
}
if (action == 'error') {
$(self).html('<i class="fa fa-close" aria-hidden="true"></i> ' + message);
$(self).removeClass('active');
$('.has-spinner').attr("disabled", false);
$('.has-spinner').removeClass('btn-default btn-primary');
$('.has-spinner').addClass('btn-danger');
}
}
})(jQuery);
<file_sep>/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.jeesun</groupId>
<artifactId>thymelte</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>war</packaging>
<name>thymelte</name>
<description>Demo project for Spring Boot</description>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>1.5.14.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-oauth2</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-security</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
<groupId>org.thymeleaf.extras</groupId>
<artifactId>thymeleaf-extras-springsecurity4</artifactId>
<version>2.1.2.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<!--<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
</exclusions>-->
</dependency>
<!-- 打war包时加入此项, 告诉spring-boot tomcat相关jar包用外部的,不要打进去 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
<scope>provided</scope>
</dependency>
<!--<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jetty</artifactId>
</dependency>-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</dependency>
<!-- rabbit mq -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-amqp</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>1.3.2</version>
</dependency>
<!-- 引入spring boot自带的pagehelper插件 -->
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper-spring-boot-starter</artifactId>
<version>1.2.5</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatisplus-spring-boot-starter</artifactId>
<version>1.0.5</version>
</dependency>
<!-- 上传图片需要的依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-hateoas</artifactId>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.ulisesbocchio</groupId>
<artifactId>jasypt-spring-boot-starter</artifactId>
<version>1.8</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<!-- swagger2依赖start -->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>2.6.1</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>2.6.1</version>
</dependency>
<!-- swagger2依赖end -->
<!--<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
</dependency>-->
<!--启用不严格检查html-->
<dependency>
<groupId>net.sourceforge.nekohtml</groupId>
<artifactId>nekohtml</artifactId>
<version>1.9.22</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.7</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.11</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>adminlte</artifactId>
<version>2.4.2</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>jquery</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>bootstrap</artifactId>
<version>3.3.7</version>
</dependency>
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>font-awesome</artifactId>
<version>4.7.0</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>ionicons</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>html5shiv</artifactId>
<version>3.7.3</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>respond</artifactId>
<version>1.4.2</version>
</dependency>
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>bootstrap-table</artifactId>
<version>1.11.1</version>
</dependency>
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>moment</artifactId>
<version>2.21.0</version>
</dependency>
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>jquery-validation</artifactId>
<version>1.17.0</version>
</dependency>
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>jquery-file-upload</artifactId>
<version>9.18.0</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>qrcodejs</artifactId>
<version>1c78ccd</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>jquery-cookie</artifactId>
<version>1.4.1-1</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>excanvas</artifactId>
<version>3</version>
</dependency>
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>echarts</artifactId>
<version>4.0.4</version>
</dependency>
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>toastr</artifactId>
<version>2.1.3</version>
</dependency>
<!-- 自定义的jar start -->
<dependency>
<groupId>org.webjars.bower</groupId>
<artifactId>FroalaWysiwygEditor</artifactId>
<version>2.8.1</version>
</dependency>
<!-- 自定义的jar end -->
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.4-1206-jdbc42</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.9</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.47</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.5</version>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.3.3</version>
</dependency>
<!-- Spring Boot打包成war包,需要的jar -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.1.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-joda</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-parameter-names</artifactId>
</dependency>
<!-- MyBatis start -->
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-maven-plugin</artifactId>
<version>1.3.7</version>
</dependency>
<dependency>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-core</artifactId>
<version>1.3.7</version>
</dependency>
<!-- 分页插件 -->
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper-spring-boot-starter</artifactId>
<version>1.2.5</version>
</dependency>
<!-- mybatis-plus插件 -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>2.3</version>
</dependency>
<!-- 通用Mapper插件 -->
<dependency>
<groupId>tk.mybatis</groupId>
<artifactId>mapper-spring-boot-starter</artifactId>
<version>2.0.3</version>
</dependency>
<dependency>
<groupId>tk.mybatis</groupId>
<artifactId>mapper-generator</artifactId>
<version>1.0.4</version>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
<version>2.3.27-incubating</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-core</artifactId>
<version>4.1.5</version>
</dependency>
<!-- MyBatis end -->
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>Camden.SR5</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<!-- Spring Boot項目打包成war,默认会带版本号。此处设置不带版本号。 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<configuration>
<warName>thymelte</warName>
</configuration>
</plugin>
<!-- mybatis generator 自动生成代码插件 -->
<plugin>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-maven-plugin</artifactId>
<version>1.3.6</version>
<configuration>
<configurationFile>${basedir}/src/main/resources/generator/generatorConfig.xml</configurationFile>
<overwrite>true</overwrite>
<verbose>true</verbose>
</configuration>
<executions>
<execution>
<id>Generate MyBatis Artifacts</id>
<goals>
<goal>generate</goal>
</goals>
</execution>
</executions>
<dependencies>
<!-- 配置这个依赖主要是为了等下在配置MG的时候可以不用配置classPathEntry这样的一个属性 -->
<!-- 避免代码的耦合度太高 -->
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.4-1206-jdbc42</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
<configuration>
<dotGitDirectory>${project.basedir}/.git</dotGitDirectory>
</configuration>
</plugin>
</plugins>
<resources>
<!-- 如果mapper.xml是放在src/main/java目录下,需配置以下-->
<!--<resource>
<directory>src/main/java</directory>
<filtering>false</filtering>
<includes>
<include>**/mapping/*.xml</include>
</includes>
</resource>-->
<!-- 解决yml配置文件无法使用@..@占位符引用Maven项目的属性 -->
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
</build>
</project>
<file_sep>/src/main/resources/messages.properties
usernameNotFound=\u7528\u6237\u540d\u4e0d\u5b58\u5728
accountDisabled=\u60a8\u5df2\u88ab\u5c01\u53f7
passwordError=\<PASSWORD>
tokenWasNotRecognised=\<KEY>
helloWorld=\u4f60\u597d\uff0c\u4e16\u754c
invalidToken=\u<PASSWORD>\u6548\u7684\u<PASSWORD>\u<PASSWORD>\u<PASSWORD>\<PASSWORD>\<PASSWORD>
approve=\u540c\u610f
deny=\u62d2\u7edd
authorize=\u6388\u6743
appApproval=\u5e94\u7528\u6388\u6743
doYouAuthorize=\u4f60\u540c\u610f\u6388\u6743\u5e94\u7528
toAccessYouProtectedResources=\u83b7\u53d6\u60a8\u7684\u53d7\u4fdd\u62a4\u7684\u8d44\u6e90\u5417\uff1f
login=\u767b\u5f55
signIn=\u767b\u5f55
username=\u7528\u6237\u540d
password=\<PASSWORD>
error=\u9519\u8bef
userNotValid=\u7528\u6237\u65e0\u6548
veriCodeInvalid=\u9a8c\u8bc1\u7801\u8fc7\u671f<file_sep>/src/main/java/com/jeesun/thymelte/config/DruidConfiguration.java
package com.jeesun.thymelte.config;
import com.alibaba.druid.support.http.StatViewServlet;
import com.alibaba.druid.support.http.WebStatFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
@Configuration
public class DruidConfiguration {
@Autowired
private Environment env;
@Bean
public ServletRegistrationBean statViewServlet(){
ServletRegistrationBean bean = new ServletRegistrationBean(new StatViewServlet(), "/druid/*");
//IP白名单
bean.addInitParameter("allow", env.getProperty("druid.allow"));
//IP黑名单(共同存在时,deny优先于allow)
bean.addInitParameter("deny", env.getProperty("druid.deny"));
//控制台管理用户
bean.addInitParameter("loginUsername", env.getProperty("druid.login-username"));
bean.addInitParameter("loginPassword", <PASSWORD>("<PASSWORD>"));
//是否能够重置数据
bean.addInitParameter("resetEnable", env.getProperty("druid.reset-enable"));
return bean;
}
@Bean
public FilterRegistrationBean statFilter(){
FilterRegistrationBean bean = new FilterRegistrationBean(new WebStatFilter());
//添加过滤规则
bean.addUrlPatterns("/*");
//忽略过滤的格式
bean.addInitParameter("exclusions", "*.js,*.gif,*.svg,*.jpg,*.png,*.css,*.ico,/druid/*");
return bean;
}
}
<file_sep>/src/main/java/com/jeesun/thymelte/repository/UserInfoRepository.java
package com.jeesun.thymelte.repository;
import com.jeesun.thymelte.domain.UserInfo;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
/**
* Created by simon on 2016/8/13.
*/
@Repository
public interface UserInfoRepository extends JpaRepository<UserInfo, Long> {
UserInfo findById(Long id);
UserInfo findByUserId(Long userId);
UserInfo findByPhone(String phone);
UserInfo findByUsername(String username);
UserInfo findByEmail(String email);
}
<file_sep>/src/main/java/com/jeesun/thymelte/repository/LogLoginRepository.java
package com.jeesun.thymelte.repository;
import com.jeesun.thymelte.domain.LogLogin;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface LogLoginRepository extends JpaRepository<LogLogin, Long> {
}
<file_sep>/src/main/resources/static/js/register_form.js
/**
*
* User: simon
* Date: 2018/07/08
* Time: 10:01
**/
$().ready(function() {
//createCode();
setTokenInHeader();
jQuery.validator.addMethod("password", function (value, element, param) {
var rex = /^(?![A-Z]+$)(?![a-z]+$)(?!\d+$)(?![\W_]+$)\S{6,16}$/;
return this.optional(element) || (rex.test(value));
}, "必须包含数字、字母或特殊字符");
$("#registerForm").validate({
debug: false,
onkeyup: false,
rules:{
username: {
required: true,
minlength: 1,
remote: {
type: 'POST',
url: 'users/checkExists',
data: {
username: function () {
return $('#username').val();
}
}
}
},
password: {
required: true,
rangelength: [6,20],
password: true
},
verifyPassword: {
required: true,
rangelength: [6,20],
password: <PASSWORD>,
equalTo: '#password'
}
},
messages: {
username:{
required: "用户名必填",
remote: "用户名已存在"
},
password: {
required: '密码必填'
},
verifyPassword: {
equalTo: '密码不相同'
}
},
errorPlacement: function(error, element) {
error.appendTo(element.parent());
},
errorElement: "span"
});
});<file_sep>/src/main/resources/static/js/login.js
$(function(){
$(".switchMode").on({
click:function(e){
if($(this).attr("src").indexOf('qrcode-40x40.png') != -1){
//$(this).attr("src", "img/pc-40x40.png");
//window.location.href = "login?qrcode=true";
var qrcode = $.cookie('qrcode');
console.log("qrcode=" + qrcode);
if(qrcode == 'false' || !qrcode){
console.log("准备执行initQrCode");
$.cookie('qrcode', true);
initQrCode();
}
$('#form-qrcode').css('display', 'inline');
$('#form-normal').css('display', 'none');
}else {
//$(this).attr("src", "img/qrcode-40x40.png");
//window.location.href = "login";
if(qrcode == true || !qrcode){
$.cookie('qrcode', false);
}
$('#form-qrcode').css('display', 'none');
$('#form-normal').css('display', 'inline');
}
}
});
});<file_sep>/src/main/java/com/jeesun/thymelte/domain/Authority.java
package com.jeesun.thymelte.domain;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.security.core.GrantedAuthority;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
/**
* @author SimonSun
* @create 2018-08-17 23:33:01
**/
@ApiModel(description = "Authority")
@Data
@Entity
@Table(name="authorities")
public class Authority implements GrantedAuthority, Serializable {
private static final long serialVersionUID = 1L;
@Id
@ApiModelProperty(value = "user_id")
@Column(name = "user_id", nullable = false)
private Long userId;
@ApiModelProperty(value = "authority")
@Column(name = "authority", nullable = false)
private String authority;
}<file_sep>/src/main/resources/static/js/login-form.js
//账号密码登录
$().ready(function() {
//createCode();
jQuery.validator.addMethod("password", function (value, element, param) {
var rex = /^(?![A-Z]+$)(?![a-z]+$)(?!\d+$)(?![\W_]+$)\S{6,16}$/;
return this.optional(element) || (rex.test(value));
}, "必须包含数字、字母或特殊字符");
jQuery.validator.addMethod("vericode", function (value, element, param) {
return validate(value);
}, "验证码错误");
$("#loginForm").validate({
rules:{
username: {
required: true,
minlength: 1
},
password: {
required: true,
rangelength: [6,20],
password: <PASSWORD>
},
input_vericode: {
required: true,
minlength: 4,
vericode: true
}
},
errorPlacement: function(error, element) {
error.appendTo(element.parent());
},
errorElement: "span"
});
});<file_sep>/src/main/resources/static/js/util.js
function getUrlParam(name) {
var reg = new RegExp("(^|&)" + name + "=([^&]*)(&|$)");
var r = window.location.search.substr(1).match(reg);
if (r != null)return decodeURI(r[2]);
return null;
}
function getLocalTime(timestamp) {
return new Date(parseInt(timestamp)).toLocaleString().replace(/:\d{1,2}$/, ' ');
}<file_sep>/src/main/resources/static/js/common.js
/**
*
* User: simon
* Date: 2018/06/07
* Time: 13:01
**/
function setTokenInHeader() {
var token = $("meta[name='_csrf']").attr("content");
var header = $("meta[name='_csrf_header']").attr("content");
$(document).ajaxSend(function (e, xhr, options) {
xhr.setRequestHeader(header, token);
});
}
//左侧菜单栏选中事件初始化
$('.sidebar-menu li').each(function () {
$(this).removeClass("active");
});
var pathValue = window.location.href;
var pathName = pathValue.substring(pathValue.lastIndexOf('/') + 1);
$('.sidebar-menu li').each(function () {
if(pathName == '' && $(this).children('a').attr('href').indexOf('index') != -1){
$(this).addClass("active");
$(this).parents(".treeview").addClass("active");
}else if(pathName != ''&& $(this).children('a').attr('href').indexOf(pathName) != -1){
$(this).addClass("active");
$(this).parents(".treeview").addClass("active");
}
});
/*var theme = $.cookie('theme');
console.log(theme);
if(!theme || null == theme){
theme = 'blue';
$.cookie('theme', theme);
}
changeTheme(theme);*/
function changeTheme(themeName) {
/*$('body').removeClass('skin-blue skin-blue-light skin-green skin-green-light skin-red skin-red-light skin-black skin-black-light skin-purple skin-purple-light skin-yellow skin-yellow-light');*/
$('body').removeClass($.cookie('theme'));
$('body').addClass(themeName);
$.cookie('theme', themeName, {expires: 30});
}<file_sep>/src/main/resources/static/js/news_info.js
/**
*
* User: simon
* Date: 2018/06/06
* Time: 19:23
**/
$(function(){
setTokenInHeader();
initTable('table', {
url: 'newsInfos/pageable',
columns: [{
field: 'state',
checkbox: true
},{
field: 'title',
title: '标题',
align: 'center',
searchable: true
},{
field: 'imageUrl',
title: '缩略图',
align: 'center',
formatter: function (value, row, index, field) {
return '<img src="' + value + '" />';
}
},{
field: 'content',
title: '内容',
align: 'center',
formatter: function (value, row, index, field) {
//console.log(decodeURIComponent(value));
return '<div style="max-width:600px; text-align: center">' + decodeURIComponent(value) + '</div>';
}
},{
field: 'publishDate',
title: '发布时间',
align: 'center',
formatter: function (value) {
return new Date(value).format("yyyy-MM-dd hh:mm:ss");
}
}],
ignoreColumn: [0, 2]
});
$('#commit_change').click(function () {
$.ajax({
cache: true,
type: "POST",
url:"newsInfos",
data:$('#form_add').serialize(),// 你的form id
async: true,
error: function(request) {
alert("Connection error:"+request.error);
},
success: function(data) {
$('#myModal').modal('hide');
showHint(data.message);
setTimeout('hideHint()', 2000);
refreshTable('table');
}
});
});
$('#plus').click(function () {
$('#form_add input[name="id"]').val('');
$('#form_add input[name="title"]').val('');
$("#form_add .preview").empty();
$('#file_paths').empty();
$('#form_add input[name="content"]').val('');
$('.preview').empty();
$('.progress-bar').css('width', '0%');
$('.proportion').html('');
$('input[name="imageUrl"]').remove();
$('#myModal').modal('show');
});
$('#edit').click(function () {
var $select = $('#table').bootstrapTable('getSelections');
if($select.length <= 0){
alert('请至少选中一项');
}else if($select.length > 1){
alert('请选择一项');
}else{
$('#form_add input[name="id"]').val($select[0].id);
$('#form_add input[name="title"]').val($select[0].title);
$("#form_add .preview").empty();
$("#form_add .preview").append('<div style="margin-top:10px;"><img src="' +$select[0].imageUrl + '"></div>');
//$('#form_add input[name="content"]').val($select[0].content);//对froalaEditor无效
$('textarea').froalaEditor('html.set', $select[0].content);
//设置filePath
$('#file_paths').empty();
$('#file_paths').append('<input type="hidden" name="imageUrl" value="' + $select[0].imageUrl + '">');
$('#myModal').modal('show');
}
});
$('#minus').click(function () {
var $select = $('#table').bootstrapTable('getSelections');
if ($select.length <= 0) {
alert('请至少选中一项');
} else if ($select.length > 1) {
var i;
var ids = [];
for(i = 0; i < $select.length; i++){
ids[i] = $select[i].id;
}
$.get('newsInfos/delete/ids/' + ids.join(','), function(data){
if(200 == data['code']){
showHint(data.message);
setTimeout('hideHint()', 2000);
}
refreshTable('table');
});
} else {
$.get('newsInfos/delete/id/'+$select[0].id, function(data){
if(200 == data['code']){
showHint(data.message);
setTimeout('hideHint()', 2000);
}
refreshTable('table');
});
}
});
});<file_sep>/src/main/java/com/jeesun/thymelte/domain/NewsTag.java
package com.jeesun.thymelte.domain;
import com.simon.common.utils.SnowflakeGenId;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.hibernate.annotations.GenericGenerator;
import tk.mybatis.mapper.annotation.KeySql;
import javax.persistence.*;
import java.io.Serializable;
/**
* @author SimonSun
* @create 2018-08-18 00:41:52
**/
@ApiModel(description = "NewsTag")
@Data
@Entity
@Table(name="news_tag")
public class NewsTag implements Serializable{
private static final long serialVersionUID = 1L;
@Id
@KeySql(genId = SnowflakeGenId.class)
@GeneratedValue(generator = "sequenceId")
@GenericGenerator(name = "sequenceId", strategy = "com.simon.common.utils.snowflake.SequenceId")
private Long id;
@ApiModelProperty(value = "news_info_id")
@Column(name = "news_info_id")
private Long newsInfoId;
@ApiModelProperty(value = "tag_id")
@Column(name = "tag_id")
private Long tagId;
}<file_sep>/src/main/java/com/jeesun/thymelte/domain/LogLogin.java
package com.jeesun.thymelte.domain;
import com.simon.common.utils.SnowflakeGenId;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.hibernate.annotations.GenericGenerator;
import tk.mybatis.mapper.annotation.KeySql;
import javax.persistence.*;
import java.io.Serializable;
import java.util.Date;
/**
* @author SimonSun
* @create 2018-08-17 23:33:01
**/
@ApiModel(description = "LogLogin")
@Data
@Entity
@Table(name="log_login")
public class LogLogin implements Serializable{
private static final long serialVersionUID = 1L;
@Id
@KeySql(genId = SnowflakeGenId.class)
@GeneratedValue(generator = "sequenceId")
@GenericGenerator(name = "sequenceId", strategy = "com.simon.common.utils.snowflake.SequenceId")
private Long id;
@ApiModelProperty(value = "create_time")
@Column(name = "create_time")
private Date createTime;
@ApiModelProperty(value = "ip")
@Column(name = "ip")
private String ip;
@ApiModelProperty(value = "username")
@Column(name = "username")
private String username;
}<file_sep>/src/main/java/com/jeesun/thymelte/custom/ErrorPageInterceptor.java
package com.jeesun.thymelte.custom;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.handler.HandlerInterceptorAdapter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Arrays;
import java.util.List;
/**
* 错误页面拦截器
*
* @author simon
* @create 2018-06-13 1:39
**/
@Component
public class ErrorPageInterceptor extends HandlerInterceptorAdapter {
private List<Integer> errorCodeList = Arrays.asList(404, 403, 500, 501);
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws
Exception {
if (errorCodeList.contains(response.getStatus())) {
response.sendRedirect("error/" + response.getStatus());
return false;
}
return super.preHandle(request, response, handler);
}
}
<file_sep>/src/main/java/com/jeesun/thymelte/controller/UploadFileController.java
package com.jeesun.thymelte.controller;
import com.jeesun.thymelte.domain.ResultMsg;
import com.jeesun.thymelte.util.FileUploadUtil;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ResourceLoader;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
/**
* 上传文件
*
* @author simon
* @create 2018-06-06 2:12
**/
@Controller
@RequestMapping("uploadFiles")
public class UploadFileController {
private static Logger logger = Logger.getLogger(UploadFileController.class);
private final ResourceLoader resourceLoader;
private final String ROOT = "upload";
@Autowired
public UploadFileController(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
if(!Files.exists(Paths.get(ROOT))){
try{
Files.createDirectories(Paths.get(ROOT));
}catch (IOException e){
logger.error(e);
}
}
}
@RequestMapping(value = "upload", method = RequestMethod.POST)
@ResponseBody
public ResultMsg post(@RequestParam("file") MultipartFile[] files){
String[] savedFiles = FileUploadUtil.saveFiles(files);
if(null == savedFiles || savedFiles.length <= 0){
return new ResultMsg(404, "保存文件失败", null);
}else{
return new ResultMsg(200, "保存文件成功", savedFiles);
}
}
@RequestMapping(value = "uploadFile", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> uploadTest(@RequestParam("file") MultipartFile[] files){
Map<String, Object> resultMap = new HashMap<>();
String[] savedFiles = FileUploadUtil.saveFiles(files);
if(null == savedFiles || savedFiles.length <= 0){
resultMap.put("link", null);
}else{
StringBuffer filePath = new StringBuffer();
filePath.append("uploadFiles");
filePath.append("/file/");
filePath.append(savedFiles[0]);
resultMap.put("link", filePath);
}
return resultMap;
}
@RequestMapping(value = "/file/**", method = RequestMethod.GET)
@ResponseBody
public ResponseEntity<?> getFile(HttpServletRequest request, HttpServletResponse response) {
String filePath = request.getRequestURI();
//logger.info(filePath);
filePath = filePath.substring(filePath.indexOf("uploadFiles/file/") + "uploadFiles/file/".length());
//logger.info(filePath);
String fileRoot = filePath.substring(0, filePath.lastIndexOf("/"));
//logger.info(fileRoot);
String fileName = filePath.substring(filePath.lastIndexOf("/") + 1);
//logger.info(fileName);
try {
return ResponseEntity.ok(resourceLoader.getResource("file:" + Paths.get(fileRoot, fileName).toString()));
} catch (Exception e) {
return ResponseEntity.notFound().build();
}
}
}
<file_sep>/src/main/resources/static/js/vcode.js
var code = "";
$().ready(function() {
//将函数返回值赋给code
code = createCode();
//点击canvas图片更换验证码
$("#codeimg").click(function () {
code = createCode();
});
/*随机字符函数*/
function rand(){
//去掉i,I,l,o,O等易混淆字母
var str="abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ0123456789";
//将字符串分隔为数组
var arr=str.split("");
//随机字符在[0,56]之间
var ranNum=Math.floor(Math.random()*57);
var captcha=arr[ranNum];
return captcha;
}
/*随机干扰线条函数*/
function drawline(canvas, context) {
//若省略beginPath,则每点击一次验证码会累积干扰线的条数
context.beginPath();
//起点与终点在canvas宽高内随机
context.moveTo(Math.floor(Math.random() * canvas.width), Math.floor(Math.random() * canvas.height));
context.lineTo(Math.floor(Math.random() * canvas.width), Math.floor(Math.random() * canvas.height));
context.lineWidth = 1;
context.strokeStyle = '#275DB3';
context.stroke();
}
/*生成验证码*/
function createCode(){
//每次生成code先将其清空防止叠加
code = "";
var canvas = document.getElementById("codeimg");
if(!canvas){
return null;
}
var context = canvas.getContext("2d");
//清空画布
context.clearRect(0, 0, canvas.width, canvas.height);
context.strokeStyle = "#FFF";
context.strokeRect(0, 0, canvas.width, canvas.height);
//生成干扰线,数量随意
for (var i = 0; i < 10; i++) {
drawline(canvas, context);
}
//循环生成4位验证码
for (var k = 0; k < 4; k++) {
context.font='76px Arial';
//将初始状态保存
context.save();
//获得-1到1的随机数
var rA = 1-Math.random()*2;
//获取随机倾斜角
var angle = rA / 8 ;
var ranNum = rand();
//旋转生成的随机字符
context.rotate(angle);
//把rand()生成的随机数文本依次填充到canvas中,注意x坐标
context.fillText(ranNum,20+45*k,100);
//恢复初始状态,以便下一次循环
context.restore();
code += ranNum;
}
//返回生成的验证码字符串
return code;
}
});
function validate(value) {
return value.toUpperCase() === code.toUpperCase();
}<file_sep>/src/main/resources/messages_en_US.properties
usernameNotFound=username not found
accountDisabled=account was suspended
passwordError=<PASSWORD>
tokenWasNotRecognised=Token was not recognised
helloWorld=hello, world
invalidToken=invalid token
approve=Approve
deny=Deny
authorize=authorize
appApproval=App Approval
doYouAuthorize=Do you authorize app named
toAccessYouProtectedResources=to access your protected resources?
login=login
signIn=sign in
username=username
password=<PASSWORD>
error=error
userNotValid=user not valid
veriCodeInvalid=verification code is invalid | 2810559d3498deb2a639bf4ee8cb5fa01c54ad3e | [
"Markdown",
"JavaScript",
"Maven POM",
"INI",
"Java"
] | 25 | Markdown | jeesun/thymelte | ca4b992f2abb0029b4adf2a4254ca755585439d1 | 5bad0081d9e9b6776b21a0847fdb8df7dc46e013 | |
refs/heads/master | <file_sep>var searchData=
[
['tablestruct_5fmensaje_5f2eproto_73',['TableStruct_mensaje_2eproto',['../struct_table_struct__mensaje__2eproto.html',1,'']]]
];
<file_sep>var searchData=
[
['directmessage_57',['DirectMessage',['../classchat_1_1_direct_message.html',1,'chat']]],
['directmessagedefaulttypeinternal_58',['DirectMessageDefaultTypeInternal',['../classchat_1_1_direct_message_default_type_internal.html',1,'chat']]],
['directmessagerequest_59',['DirectMessageRequest',['../classchat_1_1_direct_message_request.html',1,'chat']]],
['directmessagerequestdefaulttypeinternal_60',['DirectMessageRequestDefaultTypeInternal',['../classchat_1_1_direct_message_request_default_type_internal.html',1,'chat']]],
['directmessageresponse_61',['DirectMessageResponse',['../classchat_1_1_direct_message_response.html',1,'chat']]],
['directmessageresponsedefaulttypeinternal_62',['DirectMessageResponseDefaultTypeInternal',['../classchat_1_1_direct_message_response_default_type_internal.html',1,'chat']]]
];
<file_sep>var searchData=
[
['errorresponse_26',['ErrorResponse',['../classchat_1_1_error_response.html',1,'chat']]],
['errorresponsedefaulttypeinternal_27',['ErrorResponseDefaultTypeInternal',['../classchat_1_1_error_response_default_type_internal.html',1,'chat']]]
];
<file_sep># DOC-ProyectoChat
Documentación del proyecto #1 de la clase de Sistemas Operativos.
<file_sep>var searchData=
[
['myinfoacknowledge_28',['MyInfoAcknowledge',['../classchat_1_1_my_info_acknowledge.html',1,'chat']]],
['myinfoacknowledgedefaulttypeinternal_29',['MyInfoAcknowledgeDefaultTypeInternal',['../classchat_1_1_my_info_acknowledge_default_type_internal.html',1,'chat']]],
['myinforesponse_30',['MyInfoResponse',['../classchat_1_1_my_info_response.html',1,'chat']]],
['myinforesponsedefaulttypeinternal_31',['MyInfoResponseDefaultTypeInternal',['../classchat_1_1_my_info_response_default_type_internal.html',1,'chat']]],
['myinfosynchronize_32',['MyInfoSynchronize',['../classchat_1_1_my_info_synchronize.html',1,'chat']]],
['myinfosynchronizedefaulttypeinternal_33',['MyInfoSynchronizeDefaultTypeInternal',['../classchat_1_1_my_info_synchronize_default_type_internal.html',1,'chat']]]
];
<file_sep>var searchData=
[
['broadcastmessage_1',['BroadcastMessage',['../classchat_1_1_broadcast_message.html',1,'chat']]],
['broadcastmessagedefaulttypeinternal_2',['BroadcastMessageDefaultTypeInternal',['../classchat_1_1_broadcast_message_default_type_internal.html',1,'chat']]],
['broadcastrequest_3',['BroadcastRequest',['../classchat_1_1_broadcast_request.html',1,'chat']]],
['broadcastrequestdefaulttypeinternal_4',['BroadcastRequestDefaultTypeInternal',['../classchat_1_1_broadcast_request_default_type_internal.html',1,'chat']]],
['broadcastresponse_5',['BroadcastResponse',['../classchat_1_1_broadcast_response.html',1,'chat']]],
['broadcastresponsedefaulttypeinternal_6',['BroadcastResponseDefaultTypeInternal',['../classchat_1_1_broadcast_response_default_type_internal.html',1,'chat']]]
];
<file_sep>var searchData=
[
['broadcastmessage_38',['BroadcastMessage',['../classchat_1_1_broadcast_message.html',1,'chat']]],
['broadcastmessagedefaulttypeinternal_39',['BroadcastMessageDefaultTypeInternal',['../classchat_1_1_broadcast_message_default_type_internal.html',1,'chat']]],
['broadcastrequest_40',['BroadcastRequest',['../classchat_1_1_broadcast_request.html',1,'chat']]],
['broadcastrequestdefaulttypeinternal_41',['BroadcastRequestDefaultTypeInternal',['../classchat_1_1_broadcast_request_default_type_internal.html',1,'chat']]],
['broadcastresponse_42',['BroadcastResponse',['../classchat_1_1_broadcast_response.html',1,'chat']]],
['broadcastresponsedefaulttypeinternal_43',['BroadcastResponseDefaultTypeInternal',['../classchat_1_1_broadcast_response_default_type_internal.html',1,'chat']]]
];
<file_sep>var searchData=
[
['myinfoacknowledge_65',['MyInfoAcknowledge',['../classchat_1_1_my_info_acknowledge.html',1,'chat']]],
['myinfoacknowledgedefaulttypeinternal_66',['MyInfoAcknowledgeDefaultTypeInternal',['../classchat_1_1_my_info_acknowledge_default_type_internal.html',1,'chat']]],
['myinforesponse_67',['MyInfoResponse',['../classchat_1_1_my_info_response.html',1,'chat']]],
['myinforesponsedefaulttypeinternal_68',['MyInfoResponseDefaultTypeInternal',['../classchat_1_1_my_info_response_default_type_internal.html',1,'chat']]],
['myinfosynchronize_69',['MyInfoSynchronize',['../classchat_1_1_my_info_synchronize.html',1,'chat']]],
['myinfosynchronizedefaulttypeinternal_70',['MyInfoSynchronizeDefaultTypeInternal',['../classchat_1_1_my_info_synchronize_default_type_internal.html',1,'chat']]]
];
<file_sep>var searchData=
[
['directmessage_20',['DirectMessage',['../classchat_1_1_direct_message.html',1,'chat']]],
['directmessagedefaulttypeinternal_21',['DirectMessageDefaultTypeInternal',['../classchat_1_1_direct_message_default_type_internal.html',1,'chat']]],
['directmessagerequest_22',['DirectMessageRequest',['../classchat_1_1_direct_message_request.html',1,'chat']]],
['directmessagerequestdefaulttypeinternal_23',['DirectMessageRequestDefaultTypeInternal',['../classchat_1_1_direct_message_request_default_type_internal.html',1,'chat']]],
['directmessageresponse_24',['DirectMessageResponse',['../classchat_1_1_direct_message_response.html',1,'chat']]],
['directmessageresponsedefaulttypeinternal_25',['DirectMessageResponseDefaultTypeInternal',['../classchat_1_1_direct_message_response_default_type_internal.html',1,'chat']]]
];
<file_sep>var searchData=
[
['tablestruct_5fmensaje_5f2eproto_36',['TableStruct_mensaje_2eproto',['../struct_table_struct__mensaje__2eproto.html',1,'']]]
];
| f3191fc6dc30a467dde3141292b5813e5558f7f0 | [
"JavaScript",
"Markdown"
] | 10 | JavaScript | chchew/DOC-ProyectoChat | 45993e7b4492dd9c1708bbb75832feb7ebf07b89 | f23b08fa73d6277235c91f282fb6acef3eb31a7e | |
refs/heads/main | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Drawing;
namespace PhotoEnhancer
{
public static class Convertors
{
public static Photo Bitmap2Photo(Bitmap bmp)
{
var result = new Photo(bmp.Width, bmp.Height);
for(var x = 0; x < bmp.Width; x++)
for(var y = 0; y < bmp.Height; y++)
{
var pixel = bmp.GetPixel(x, y);
//result[x, y].R = (double)pixel.R / 255;
//result[x, y].G = (double)pixel.G / 255;
//result[x, y].B = (double)pixel.B / 255;
result[x, y] = new Pixel(
(double)pixel.R / 255,
(double)pixel.G / 255,
(double)pixel.B / 255
);
}
return result;
}
public static Bitmap Photo2Bitmap(Photo photo)
{
var result = new Bitmap(photo.Width, photo.Height);
for(var x = 0; x < photo.Width; x++)
for(var y = 0; y < photo.Height; y++)
{
result.SetPixel(x, y, Color.FromArgb(
(int)(photo[x, y].R * 255),
(int)(photo[x, y].G * 255),
(int)(photo[x, y].B * 255)
));
}
return result;
}
public static double GetPixelHue(Pixel p)
{
Color color = Pixel2Color(p);
return color.GetHue();
}
public static double GetPixelSaturation(Pixel p)
{
var color = Pixel2Color(p);
return color.GetSaturation();
}
public static double GetPixelLightness(Pixel p)
{
var color = Pixel2Color(p);
return color.GetBrightness();
}
public static Color Pixel2Color(Pixel p)
{
return Color.FromArgb((int)(p.R * 255), (int)(p.G * 255), (int)(p.B * 255));
}
public static Pixel HSL2Pixel(double hue, double saturation, double lightness)
{
double q;
if (lightness < 0.5)
q = lightness * (1 + saturation);
else
q = lightness + saturation - lightness * saturation;
double p = 2 * lightness - q;
double h = hue / 360;
var t = new[] { h + 1.0 / 3, h, h - 1.0 / 3 };
for (var i = 0; i < 3; i++)
if (t[i] < 0)
t[i] += 1;
else if (t[i] > 1)
t[i] -= 1;
var rgb = new double[3];
for (var i = 0; i < 3; i++)
if (t[i] < 1.0 / 6)
rgb[i] = p + ((q - p) * 6 * t[i]);
else if (t[i] < 0.5)
rgb[i] = q;
else if (t[i] < 2.0 / 3)
rgb[i] = p + ((q - p) * (2.0 / 3 - t[i]) * 6);
else
rgb[i] = p;
return new Pixel(rgb[0], rgb[1], rgb[2]);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PhotoEnhancer
{
class ReductionInSizeParameters : IParameters
{
public double ReductionInSizeParameter { get; set; }
public ParameterInfo[] GetDescription()
{
return new[]
{
new ParameterInfo()
{
Name = "Коэффициент уменьшения",
MinValue = 1,
MaxValue = 10,
DefailtValue =1,
Increment = 0.5
}
};
}
public void SetValues(double[] values)
{
ReductionInSizeParameter = values[0];
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PhotoEnhancer
{
class GammaCorectionColorsParameters : IParameters
{
public double GammaCorectionColorsRByUser { get; set; }
public double GammaCorectionColorsGByUser { get; set; }
public double GammaCorectionColorsBByUser { get; set; }
public ParameterInfo[] GetDescription()
{
return new[]
{
new ParameterInfo() {
Name = "Канал R",
MinValue = 0.2,
MaxValue = 5,
DefailtValue = 1,
Increment = 0.01
},
new ParameterInfo()
{
Name = "Канал G",
MinValue = 0.2,
MaxValue = 5,
DefailtValue = 1,
Increment = 0.01
},
new ParameterInfo()
{
Name = "Канал B",
MinValue = 0.2,
MaxValue = 5,
DefailtValue = 1,
Increment = 0.01
}
};
}
public void SetValues(double[] values)
{
GammaCorectionColorsRByUser = values[0];
GammaCorectionColorsGByUser = values[1];
GammaCorectionColorsBByUser = values[2];
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Variant5
{
public struct Record
{
public int ClientID; // идентификационный номер клиента
public int Year; // год
public int Month; // номер месяца
public int Duration; // продолжительность занятий в данном месяце
// данного года (в часах) }
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PhotoEnhancer
{
class GammaCorectionColorsFilter : PixelFilter
{
public GammaCorectionColorsFilter() : base(new GammaCorectionColorsParameters()) {}
public override string ToString()
{
return "Гамма-корекция цвета";
}
public override Pixel ProcessPixel(Pixel originalPixel, IParameters parameters)
{
var newR = Math.Pow(originalPixel.R, 1 / (parameters as GammaCorectionColorsParameters).GammaCorectionColorsRByUser);
var newG = Math.Pow(originalPixel.G, 1 / (parameters as GammaCorectionColorsParameters).GammaCorectionColorsGByUser);
var newB = Math.Pow(originalPixel.R, 1 / (parameters as GammaCorectionColorsParameters).GammaCorectionColorsBByUser);
return new Pixel(newR, newG, newB);
}
}
}
<file_sep>using System;
namespace PhotoEnhancer
{
public abstract class ParametrizedFilter : IFilter
{
IParameters parameters;
public ParametrizedFilter(IParameters p)
{
parameters = p;
}
public ParameterInfo[] GetParametersInfo()
{
return parameters.GetDescription();
}
public Photo Process(Photo original, double[] values)
{
parameters.SetValues(values);
return Process(original, parameters);
}
public abstract Photo Process(Photo original, IParameters parameters);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Variant5
{
internal class Program
{
public static void Main(string[] args)
{
var text = new[]
{
"Текст. представлен. в виде массива строк, слова в которых; разделены: пробелами-и знаками",
"Новый, массив; должен содержать не: более- n таких слов. Все слова должны быть в нижнем регистре."
};
var words = GetLongestWords(text, 5);
foreach (var word in words)
Console.WriteLine(word);
var strs = File.ReadAllLines("db.txt");
var dataBase = new List<Record>();
foreach (var str in strs)
{
var data = str.Split();
var record = new Record()
{
ClientID = int.Parse(data[0]),
Year = int.Parse(data[1]),
Month = int.Parse(data[2]),
Duration = int.Parse(data[3])
};
dataBase.Add(record);
}
PrintYearsLongestDurationOfMonth(dataBase, 2);
Console.ReadKey();
}
public static string[] GetLongestWords(string[] lines, int n)
{
return lines.SelectMany(x => x.Split(new[] {'-', ':', ',', '.', ' ', ';'}))
.OrderBy(x => x)
.ThenByDescending(x => x.Length)
.Take(n)
.ToArray();
}
public static void PrintYearsLongestDurationOfMonth(List<Record> data, int id)
{
var years = data.Where(x => x.ClientID == id)
.GroupBy(x => x.Year)
.Select(x => (x.Max(y => (y.Duration, y.Month)), x.Key))
.OrderByDescending(x => x.Key)
.ToArray();
if (years.Length > 0)
{
foreach (var year in years)
{
Console.WriteLine(
$"Год: {year.Key}, месяц: {year.Item1.Month}, продолжительность: {year.Item1.Duration}");
}
}
else
{
Console.WriteLine("Нет данных о клиенте");
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PhotoEnhancer
{
public abstract class PixelFilter : ParametrizedFilter
{
public PixelFilter(IParameters p) : base(p) { }
//public abstract ParameterInfo[] GetParametersInfo();
public override Photo Process(Photo original, IParameters parameters)
{
var newPhoto = new Photo(original.Width, original.Height);
for (int x = 0; x < original.Width; x++)
for (int y = 0; y < original.Height; y++)
newPhoto[x, y] = ProcessPixel(original[x, y],
parameters);
return newPhoto;
}
public abstract Pixel ProcessPixel(Pixel originalPixel,
IParameters parameters);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PhotoEnhancer
{
class ReductionInSizeTransformers : ITransformer<ReductionInSizeParameters>
{
public Size ResultSize { get; private set; }
Size originalSize;
double TransformParameter;
public void Initialize(Size size, ReductionInSizeParameters parameters)
{
originalSize = size;
TransformParameter = parameters.ReductionInSizeParameter;
ResultSize = new Size(
(int)(size.Width / TransformParameter),
(int)(size.Height / TransformParameter));
if (ResultSize.Width == 0 || ResultSize.Height ==0)
{
ResultSize = size;
TransformParameter = 1;
}
}
public Point? MapPoint(Point point)
{
var x = (int)(point.X * TransformParameter);
var y = (int)(point.Y * TransformParameter);
return new Point(x, y);
}
}
}
<file_sep>using System;
namespace PhotoEnhancer
{
public class LighteningFilter : PixelFilter
{
public LighteningFilter() : base(new LighteningParameters()) { }
public override string ToString()
{
return "Осветление/затемнение";
}
public override Pixel ProcessPixel(Pixel originalPixel,
IParameters parameters)
{
return originalPixel * (parameters as LighteningParameters).Coefficient;
}
}
}
| 15a46e692f389a7d2f2fd380fa09ed393a35b060 | [
"C#"
] | 10 | C# | pmsnik/homeworks | f781fc7c3b4bc3276da986ed6cab8d3fc5bd9b7f | 6586e91397421248d24706ff920a6df5477e8651 | |
refs/heads/master | <repo_name>JASchilz/home<file_sep>/bin-common/mkgithubrepo
#!/bin/sh
curl -u $GITHUB_USERNAME:$GITHUB_CREATE_REPO_API_KEY --header "Content-Type: application/json" --request POST --data "{\"name\": \"$1\", \"description\": \"$2\"}" https://api.github.com/user/repos
<file_sep>/.bashrc
export PATH=$HOME/bin:$HOME/bin-common:$PATH
source $HOME/.secrets/secrets.sh
source $HOME/.local/.bashrc
<file_sep>/README.md
# ~
Common dotfiles, executable scripts, etc., for JASchilz
From [https://developer.atlassian.com/blog/2016/02/best-way-to-store-dotfiles-git-bare-repo/](https://developer.atlassian.com/blog/2016/02/best-way-to-store-dotfiles-git-bare-repo/)
| 40cd580e01f73a4fb1cef8645aba242d4f00e00e | [
"Markdown",
"Shell"
] | 3 | Shell | JASchilz/home | a93c42e631aee10b70385a6684c6b5d8b0d156b7 | b844ef3b1a69029cd2c24efaab3d01990c307ebd | |
refs/heads/main | <repo_name>adilfarizki/uprak-php<file_sep>/uprak_php/formedit.php
<?php
session_start();
if (!isset($_SESSION["login"])) {
header("Location : login.php");
exit;
}
include 'koneksi.php';
$id_produk = $_GET['id_produk'];
$sql = "SELECT * FROM produk WHERE id_produk='$id_produk'";
$query = mysqli_query($connect, $sql);
$pel = mysqli_fetch_assoc($query);
if ( mysqli_num_rows($query) < 1) {
die("data tidak ditemukan...");
}
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="edit.css">
<!-- google font -->
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<title>Hello, world!</title>
</head>
<body>
<!-- navbar -->
<nav class="navbar navbar-expand-lg navbar-light fixed-top">
<div class="container-fluid">
<a class="navbar-brand" href="index.php">Online Shop</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarNav"
aria-controls="navbarNav" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="nav-link">
<div><a class="me-4" href="produk.php">Produk</a></div>
</div>
</div>
</nav>
<!-- form edit -->
<form action="edit.php" method="post">
<div class="form">
<div class="container fluid">
<div class="card-body">
<div class="modal-body">
<h4 class="card-subtitle mb-4 text-center">Form Edit Barang dan Stock Data Barang</h4>
<div class="form-gorup row">
<label class="col-sm-3 col-form-label" for="id_produk">Id Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="id_produk" value="<?php echo $pel['id_produk']?>">
</div>
</div>
<div class="form-gorup row" style="margin-top: 10px;">
<label class="col-sm-3" for="nama_produk">Nama Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="nama_produk" value="<?php echo $pel['nama_produk']?>">
</div>
</div>
<div class="form-group row" style="margin-top: 10px;">
<label class="col-sm-3" for="harga">Harga Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="harga" value="<?php echo $pel['harga']?>">
</div>
</div>
<div class="form-group row" style="margin-top: 8px;">
<label class="col-sm-3" for="stok">Stock Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="stok" value="<?php echo $pel['stok']?>">
</div>
</div>
<div class="form-group row" style="margin-top: 8px;">
<label class="col-sm-3" for="gambar">Gambar Produk</label>
<div class="col-8">
<input type="url" class="form-control" name="gambar" value="<?php echo $pel['gambar']?>">
</div>
</div>
<div class="form-group row" style="margin-top: 8px;">
<label class="col-sm-3" for="detail">Detail Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="detail" value="<?php echo $pel['detail']?>">
</div>
</div>
</div>
<a href="produk.php" class="btn btn-danger">kembali</a>
<button type="submit" name="simpan" value="simpan" class="btn btn-success">Submit</button>
<!-- <button type="button" class="btn btn-primary mb-1 col-sm-2 container" onclick="datasimpan()"
id="simpan2">Simpan</button> -->
</div>
</div>
</div>
<!-- Option 1: Bootstrap Bundle with Popper -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<!-- Option 2: Separate Popper and Bootstrap JS -->
<!--
<script src="https://cdn.jsdelivr.net/npm/@popperjs/[email protected]/dist/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
-->
</body>
</html>
<file_sep>/uprak_php/produk.php
<?php
session_start();
if (!isset($_SESSION["login"])) {
header("Location : login.php");
exit;
}
include 'koneksi.php';
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="data.css">
<!-- google font -->
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<title>Hello, world!</title>
</head>
<body>
<!-- navbar -->
<nav class="navbar navbar-expand-lg navbar-light fixed-top">
<div class="container-fluid">
<a class="navbar-brand" href="index.php">Online Shop</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarNav"
aria-controls="navbarNav" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="nav-link">
<div><a class="me-4" href="produk.php">Produk</a></div>
<div><a class="me-4" href="cart.php">Cart</a></div>
</div>
</div>
</nav>
<!-- input -->
<div class="container mb-5">
<div class="hero">
<p><span class="ms-2 fw-bold">Data Produk</span></p>
<a href="tambah_produk.php" class="btn btn-primary ms-3">[+] Tambah Baru</a>
<a href="index.php" class="btn btn-green ms-3">Kembali</a>
</div>
<div class="input ms-3 me-3 mt-3 pb-3">
<table class="table table-bordered bg-light btn-dark text-dark text-center">
<tr class="bg-primary table-bordered btn-dark">
<td>NO</td>
<td>GAMBAR</td>
<td>NAMA PRODUK</td>
<td>HARGA PRODUK</td>
<td>STOCK</td>
<td>ACTION</td>
</tr>
<?php
$sql = "SELECT * FROM produk";
$query = mysqli_query($connect,$sql);
while($pel = mysqli_fetch_array($query)){
echo"<tr>";
echo"<td>".$pel['id_produk']."</td>";
echo "<td><img src='".$pel['gambar']."'style='width:80px; height:80px;'></td>";
echo"<td>".$pel['nama_produk']."</td>";
echo"<td>".$pel['harga']."</td>";
echo"<td>".$pel['stok']."</td>";
echo"<td>";
echo "<a class='btn btn-warning' href='formedit.php?id_produk=".$pel['id_produk']."'>Edit</a> ";
echo "<a class='btn btn-danger' href='hapus.php?id_produk=".$pel['id_produk']."'>Hapus</a>";
echo"</td>";
echo"</tr>";
}
?>
</table>
</div>
</div>
<!-- Option 1: Bootstrap Bundle with Popper -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js" integrity="<KEY>U5S9FOnJ0" crossorigin="anonymous"></script>
<!-- Option 2: Separate Popper and Bootstrap JS -->
<!--
<script src="https://cdn.jsdelivr.net/npm/@popperjs/[email protected]/dist/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
-->
</body>
</html><file_sep>/uprak_php/index.php
<?php
session_start();
if (!isset($_SESSION["login"])) {
header("Location: login.php");
exit;
}
include 'koneksi.php';
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="style.css">
<!-- google font -->
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<title>Hello, world!</title>
</head>
<body>
<!-- navbar -->
<nav class="navbar navbar-expand-lg navbar-light fixed-top">
<div class="container-fluid">
<a class="navbar-brand" href="index.php">Online Shop</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarNav"
aria-controls="navbarNav" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="nav-link">
<div><a class="me-4" href="index.php">Home</a></div>
<div><a class="me-4" href="produk.php">Produk</a></div>
<div><a class="me-4" href="cart.php">Cart</a></div>
<div><a class="me-4" style="margin-left: 700px;" href="logout.php">Logout</a></div>
</div>
</div>
</nav>
<!-- hero -->
<div id="carouselExampleControls" class="carousel slide gambar" data-bs-ride="carousel">
<div class="carousel-inner">
<div class="carousel-indicators">
<button type="button" data-bs-target="#carouselExampleIndicators" data-bs-slide-to="0" class="active"
aria-current="true" aria-label="Slide 1"></button>
<button type="button" data-bs-target="#carouselExampleIndicators" data-bs-slide-to="1"
aria-label="Slide 2"></button>
<button type="button" data-bs-target="#carouselExampleIndicators" data-bs-slide-to="2"
aria-label="Slide 3"></button>
</div>
<div class="carousel-item active">
<img src="https://s3.bukalapak.com/uploads/content_attachment/8c074ee410e8d762540a8cb5/w-744/12._Raincoat.jpg"
class="d-block w-100" alt="...">
</div>
<div class="carousel-item">
<img src="https://s0.bukalapak.com/uploads/content_attachment/5df4369af5f4585e8bcf5cb5/w-744/14._Jaket_track_top.jpg"
class="d-block w-100" alt="...">
</div>
<div class="carousel-item">
<img src="https://asset.kompas.com/crops/OrdUvsozEEjdX3F-YXwNLyuR1fA=/118x63:1288x843/750x500/data/photo/2020/01/29/5e3104a405ae4.jpg"
class="d-block w-100" alt="...">
</div>
</div>
<button class="carousel-control-prev" type="button" data-bs-target="#carouselExampleControls"
data-bs-slide="prev">
<span class="carousel-control-prev-icon" aria-hidden="true"></span>
<span class="visually-hidden">Previous</span>
</button>
<button class="carousel-control-next" type="button" data-bs-target="#carouselExampleControls"
data-bs-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="visually-hidden">Next</span>
</button>
<div class="button">
</div>
<b>Jaket</b>
<p>No Windage Hold Off, No Windage Hold Off seri MA-1 dan N3-B</p>
</div>
<!-- Produk -->
<div class="pro">
<h1 class="laris ps-5 p-2">Produk Terlaris</h1>
</div>
<div class="produk">
<?php
$id = mysqli_query($connect,'SELECT * FROM produk ORDER BY produk.id_produk');
while ($prdk = mysqli_fetch_array($id)){
$idp = $prdk['id_produk'];
?>
<a href="detail.php?id_produk=<?= $prdk["id_produk"]?>">
<div class="card m-2 me-3" style="width: 18rem;">
<img src="<?php echo $prdk['gambar'] ?>" class="mt-2 ms-3 mb-2 me-3 gambar_produk" alt="...">
<div class="card-body float-end">
<h5 class="card-title"><?php echo $prdk['nama_produk'] ?></h5>
<b>Rp. <?php echo $prdk['harga'] ?></b> <br>
</div>
</div>
</a>
<?php
}
?>
</div>
<!-- produk -->
<div class="duk">
<h1 class="produkkk ps-5 p-2">Product</h1>
</div>
<div class="listproduk">
<?php
$id = mysqli_query($connect,'SELECT * FROM produk ORDER BY produk.id_produk');
while ($prdk = mysqli_fetch_array($id)){
$idp = $prdk['id_produk'];
?>
<a href="detail.php?id_produk=<?= $prdk["id_produk"]?>">
<div class="card m-2 me-3 float-start" style="width: 15rem;">
<img src="<?php echo $prdk['gambar'] ?>" style="width: 200px; height: 200px !important;" class="mt-2 ms-3 mb-2 me-3 gambar_produk" alt="...">
<div class="card-body float-end">
<h5 class="card-title"><?php echo $prdk['nama_produk'] ?></h5>
<b>Rp. <?php echo $prdk['harga'] ?></b> <br>
</div>
</div>
</a>
<?php
}
?>
</div>
<div id="footer">
<footer>Copyright © 2021 Online Shop.com MAF <NAME>.</footer>
</div>
<!-- Option 1: Bootstrap Bundle with Popper -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"
integrity="<KEY>" crossorigin="anonymous">
</script>
<!-- Option 2: Separate Popper and Bootstrap JS -->
<!--
<script src="https://cdn.jsdelivr.net/npm/@popperjs/[email protected]/dist/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
-->
</body>
</html><file_sep>/uprak_php/hapus.php
<?php
include 'koneksi.php';
if (isset($_GET['id_produk'])) {
header('Location: produk.php');
}
$id_produk = $_GET['id_produk'];
$sql = "DELETE FROM produk WHERE id_produk='$id_produk'";
$query = mysqli_query($connect, $sql);
if ($query){
echo "<meta http-equiv='refresh' content='1;url=produk.php'>";
}else{
header('Location: hapus.php?status=gagal');
}
?><file_sep>/uprak_php/aksilogin.php
<?php
session_start();
include 'koneksi.php';
if ( isset($_POST["submit"])) {
$username = $_POST["username"];
$password = $_POST["<PASSWORD>"];
$result = mysqli_query($connect, "SELECT * FROM user WHERE username = '$username'");
// cek username
if ( mysqli_num_rows($result) === 1) {
// cek password
$row = mysqli_fetch_assoc($result);
if (password_verify($password, $row["password"])) {
// session
$_SESSION["login"] = true;
header("Location: index.php");
exit;
}
}
}
?><file_sep>/uprak_php/koneksi.php
<?php
$host ="localhost";
$user ="root";
$password ="";
$database ="uprak_2";
$connect =mysqli_connect($host,$user,$password,$database) or die("Gagal Menghubungkan");
?>
<?php
function registerasi($data) {
global $connect;
$username = stripslashes($data["username"]);
$email = stripslashes($data["email"]);
$password = mysqli_real_escape_string($connect, $data["password"]);
$password2 = mysqli_real_escape_string($connect, $data["password2"]);
if( $password !== $password2) {
echo "<script> alert('konfirmasi password tidak sama') </script>";
return false;
}
$password = password_hash($password, PASSWORD_DEFAULT);
// cek username
$result = mysqli_query($connect,"SELECT username FROM user WHERE username ='$username' ");
if (mysqli_fetch_assoc($result)) {
echo "<script> alert('username sudah ada!')</script>";
return false;
}
$querry = mysqli_query($connect, "INSERT INTO user VALUES('','$username','$email','$password')");
return mysqli_affected_rows($connect);
}
?><file_sep>/README.md
# uprak-php
progres_uprak
<file_sep>/uprak_php/detail.php
<?php
include 'koneksi.php';
$id_produk = $_GET['id_produk'];
$sql = "SELECT * FROM produk WHERE id_produk='$id_produk'";
$query = mysqli_query($connect, $sql);
$pel = mysqli_fetch_assoc($query);
if ( mysqli_num_rows($query) < 1) {
die("data tidak ditemukan...");
}
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="detail.css">
<!-- google font -->
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Roboto&display=swap" rel="stylesheet">
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Roboto:ital,wght@0,100;0,300;0,400;0,500;0,700;0,900;1,100;1,300;1,400;1,500;1,700&display=swap" rel="stylesheet">
<title>Hello, world!</title>
</head>
<body>
<!-- navbar -->
<nav class="navbar navbar-expand-lg navbar-light fixed-top">
<div class="container-fluid">
<a class="navbar-brand" href="index.php">Online Shop</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarNav"
aria-controls="navbarNav" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
</div>
</nav>
<!-- Produk -->
<div class="produk">
<div class="container">
<div class="row">
<div class="col-6">
<img src="<?php echo $pel['gambar']?>" alt="">
</div>
<div class="col-6">
<p><?php echo $pel['nama_produk']?></p>
<p>Rp. <?php echo $pel['harga']?></p>
<p>Stock : <?php echo $pel['stok']?></p>
<h1>Detail Product</h1>
<p class="detail">Detail : <?php echo $pel['detail']?></p>
<button class="btn btn-primary">Buy</button>
</div>
</div>
</div>
</div>
<!-- Option 1: Bootstrap Bundle with Popper -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"
integrity="<KEY>" crossorigin="anonymous">
</script>
<!-- Option 2: Separate Popper and Bootstrap JS -->
<!--
<script src="https://cdn.jsdelivr.net/npm/@popperjs/[email protected]/dist/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
-->
</body>
</html><file_sep>/uprak_php/uprak_2.sql
-- phpMyAdmin SQL Dump
-- version 5.0.3
-- https://www.phpmyadmin.net/
--
-- Host: 1192.168.3.11
-- Generation Time: Mar 25, 2021 at 08:30 AM
-- Server version: 10.4.14-MariaDB
-- PHP Version: 7.4.11
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `uprak_2`
--
-- --------------------------------------------------------
--
-- Table structure for table `login`
--
CREATE TABLE `login` (
`id` int(30) NOT NULL,
`username` varchar(255) NOT NULL,
`password` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `login`
--
INSERT INTO `login` (`id`, `username`, `password`) VALUES
(1, 'admin', '<PASSWORD>');
-- --------------------------------------------------------
--
-- Table structure for table `produk`
--
CREATE TABLE `produk` (
`id_produk` int(30) NOT NULL,
`nama_produk` varchar(255) NOT NULL,
`harga` int(255) NOT NULL,
`stok` int(255) NOT NULL,
`gambar` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `produk`
--
INSERT INTO `produk` (`id_produk`, `nama_produk`, `harga`, `stok`, `gambar`) VALUES
(1, 'd&f Jaket pria Bomber with mustard strip - Navy', 189900, 10, 'https://id-test-11.slatic.net/p/11a306f5f738b8c5d78299614846ddfe.jpg_400x400q90.jpg_.webp'),
(2, 'M.RDJ.JAKET - SANDWASH - JAKET SANDWASH BLACK ', 88000, 10, 'https://id-test-11.slatic.net/p/ab54716cbf50a03d4c26f2732e4f06b3.jpg_400x400q90.jpg_.webp'),
(3, '17SEVEN Jaket Bomber Polos Pria Terpopuler', 149000, 10, 'https://id-test-11.slatic.net/p/a5927313930f44fb5c6eb221fb7abfa0.jpg_400x400q90.jpg_.webp'),
(4, 'JAKET PRIA BAHAN JN IMPORT Mala punye', 126000, 10, 'https://id-test-11.slatic.net/p/836887c2898abaa92b1b9de678c2b19b.jpg_400x400q90.jpg_.webp'),
(5, 'Jaket Jeans Denim Hitam Pekat Pria ukuran M-XXL', 86100, 10, 'https://id-live-01.slatic.net/original/2cb6d776b23edc70a80e04e8b0f5c579.jpg_400x400q90.jpg_.webp');
-- --------------------------------------------------------
--
-- Table structure for table `user`
--
CREATE TABLE `user` (
`id` int(11) NOT NULL,
`username` varchar(50) NOT NULL,
`email` varchar(255) NOT NULL,
`password` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `user`
--
INSERT INTO `user` (`id`, `username`, `email`, `password`) VALUES
(1, 'adilfarizki', '<EMAIL>', <PASSWORD>'),
(2, 'LarasPradita', '<EMAIL>', <PASSWORD>');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `login`
--
ALTER TABLE `login`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `produk`
--
ALTER TABLE `produk`
ADD PRIMARY KEY (`id_produk`);
--
-- Indexes for table `user`
--
ALTER TABLE `user`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `login`
--
ALTER TABLE `login`
MODIFY `id` int(30) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `produk`
--
ALTER TABLE `produk`
MODIFY `id_produk` int(30) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- AUTO_INCREMENT for table `user`
--
ALTER TABLE `user`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/uprak_php/tambah_produk.php
<?php
include 'koneksi.php';
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="input.css">
<title>Hello, world!</title>
</head>
<body>
<!-- nav-bar -->
<form action="simpan.php" method="post">
<div class="card container">
<div class="card-body">
<div class="modal-body">
<h4 class="card-subtitle mb-4 text-center">Input Data Barang/Produk</h4>
<div class="form-gorup row">
<label class="col-sm-3 col-form-label" for="id_produk">Id Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="id_produk">
</div>
</div>
<div class="form-gorup row" style="margin-top: 10px;">
<label class="col-sm-3" for="nama">Nama Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="nama_produk">
</div>
</div>
<div class="form-group row" style="margin-top: 10px;">
<label class="col-sm-3" for="harga">Harga Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="harga">
</div>
</div>
<div class="form-group row" style="margin-top: 8px;">
<label class="col-sm-3" for="stock">Stock Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="stok">
</div>
</div>
<div class="form-group row" style="margin-top: 8px;">
<label class="col-sm-3" for="gambar">Gambar Produk</label>
<div class="col-8">
<input type="url" class="form-control" name="gambar">
</div>
</div>
<div class="form-group row" style="margin-top: 8px;">
<label class="col-sm-3" for="detail">Detail Produk</label>
<div class="col-8">
<input type="text" class="form-control" name="detail">
</div>
</div>
</div>
<input type="submit" value="Simpan" name="simpan" class="btn btn-primary">
<a href="index.php" class="btn btn-success ms-3">Kembali</a>
</div>
</div>
<!-- Option 1: Bootstrap Bundle with Popper -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<!-- Option 2: Separate Popper and Bootstrap JS -->
<!--
<script src="https://cdn.jsdelivr.net/npm/@popperjs/[email protected]/dist/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
-->
</body>
</html><file_sep>/uprak_php/register.php
<?php
include 'koneksi.php';
if (isset($_POST["submit"])) {
if (registerasi($_POST) > 0) {
echo "<script> alert('user baru telah ditambahkan')</script>";
}else{
echo mysqli_error($connect);
}
}
?>
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="login.css">
<!-- google font -->
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@100;300;400;500;700;900&display=swap" rel="stylesheet">
<title>Register</title>
</head>
<body>
<div id="cardd">
<div id="card-content">
<div id="card-title">
<h2>REGISTER</h2>
<div class="underline-title2"></div>
</div>
</div>
<!-- form -->
<form action="" method="post" class="form">
<label for="user-name" style="padding-top:-5px; margin-left:20px"> Username</label>
<input id="user-name" class="form-content" type="name" name="username" autocomplete="on" required />
<div class="form-border"></div>
<label for="user-email" style="padding-top:13px; margin-left:20px"> Email</label>
<input id="user-email" class="form-content" type="email" name="email" autocomplete="on" required />
<div class="form-border"></div>
<label for="user-password" style="padding-top:13px; margin-left:20px"> Password</label>
<input id="user-password" class="form-content" type="<PASSWORD>" name="password" required />
<div class="form-border"></div>
<label for="user-password2" style="padding-top:13px; margin-left:20px"> Confirm Password</label>
<input id="user-confirmpassword" class="form-content" type="<PASSWORD>" name="password2" required />
<div class="form-border"></div>
<input id="submit-btn" type="submit" name="submit" value="REGISTER" />
</form>
</div>
<!-- Option 1: Bootstrap Bundle with Popper -->
< src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"
integrity="<KEY>" crossorigin="anonymous">
</>
<!-- Option 2: Separate Popper and Bootstrap JS -->
<!--
< src="https://cdn.jsdelivr.net/npm/@popperjs/[email protected]/dist/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></>
< src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></>
-->
</body>
</html> | 33c006ad1aa3a925c3c0b20fa7ebc8cd7b706900 | [
"Markdown",
"SQL",
"PHP"
] | 11 | PHP | adilfarizki/uprak-php | 6b006c80f2ec9c66e0f53a9a525da3ed76aea610 | 25b1d65a0171febce88b0c49f372defb2f1154dd | |
refs/heads/master | <file_sep>潜在変数モデルと学習法に関して
================================================================================
________________________________________________________________________________
はじめに
--------------------------------------------------------------------------------
### 自己紹介
はじめまして!いしはたです!
### 前置き
この記事は Machine Learning Advent Calendar の 12月8日 の記事として書かれています。
思いの外長くなった割に内容が無いという悲惨な状況ですがご容赦ください。
### 内容
今日の内容ですが、生成的確率モデリングに関して述べたいと思います。
具体的には潜在変数(隠れ変数)を含む確率モデルの学習アルゴリズムについて簡単に解説します。
### 想定する読者
対象は「__機械学習はじめてみよう!__」とか「__確率モデルで遊んでみたい!__」というビギナー向けのつもりです。
難易度としては高校時代の同級生が「へー」って言ってくれるレベルを目指します。
場合によってはあえて正確な表現を避け、簡単な言い回しを用いる予定です。
なので玄人の方が読むと「__何を今更…__」とか「__それは違うだろ!__」となると思いますがご容赦ください。
________________________________________________________________________________
潜在変数モデル
--------------------------------------------------------------------------------
### 確率モデルと潜在変数
まず「__潜在変数モデル__」というものについて、簡単な実例を交えて簡単に解説します。
(簡単を繰り替えしてごめんなさい。)
確率モデルとはざっくり言えば、__同時分布__ です。
__生成的__ 確率モデルとはざっくり言えば、
__データを生成できる(サンプリングできる)__ 確率モデルです。
潜在変数モデルとはざっくり言えば、生成的確率モデルのうち、
観測可能である __観測変数 x__ と観測不能である __潜在変数 z__ を持つものです。
ここでモデルの同時分布を具体的に決めるために、モデルは __パラメータ θ__ を持つとします。
そしてその同時分布を p(x, z | θ)と書くことにします。
生成的確率モデルの素敵なところは、データの生成過程を想像できるので、解釈しやすいところです。
潜在変数モデルの素敵なところは、複雑なデータが大量にあっても、
それらを潜在変数という不思議なものでなんとなく説明してくれるところです。
潜在変数モデルの使い道としてもっともポピュラーなものがクラスタリングです。
データがいっぱいあるけど、実はそれらは潜在的にグループに分かれているという解釈です。
例えば、テレビの視聴履歴を集めたとします。
すると潜在グループとして、__オタク__ or __リア充__ などが考えられます。
データはその潜在グループに依存して生成されると考えます。
例えばオタク野郎はアニメばっかり観てて、リア充様はドラマとかスポーツとか観てるんじゃねえの?(激怒)
この潜在変数を推定するといろいろ便利なんです。
例えば Amazon の購入履歴を考えます。
クラスタリングの結果「こいつらは潜在的に萌豚えだな」と分かれば、
アニメBD売りたい放題とかなるわけです。
では具体的にはどうやってその潜在変数を推定するのでしょうか。
### 学習と推論
潜在変数モデル p(x, z | θ) では x は既知で、 z と θ は未知とします。
よって x から z, θ を推定する必要があります。
ここで推定ってなんやねん!となることがあります。
ここでは単純に z と θ の具体的な値を知りたいという意味で「推定」と言っています。
しかし個人的な感覚だと z を推定することを推論、
θ を推定する事を学習と呼ぶ気もします。
なぜ同じ未知の変数なのに扱いが違うのか。それはそれらが別のタイプの変数だからです。
潜在変数 z は確率変数であるのに対して、θはパラメータであり、単なる変数なのです。
前置きはまぁ置いといて推定してみましょう。
#### 準備
一気に両方推定するのは難しそうなので1つずつ推定しましょう。
まず x, z が既知のとき、θを推定することを考えます。
これはモデル中の全確率変数が観測可能であるときに、 そのパラメータを推定することに対応します。
このようなデータを __完全データ__ と呼びます。
よってもはや x,z と区別することなく、p(x | θ) と書いても良いのです。
完全データからのパラメータの推定は実は中学生くらいでもできます。
例えばコインを用意し、100回振ります。そしたら表 70 回、裏 30 回でした。
するとこのコインのパラメータは 表が出る確率 70 % , 裏が出る確率 30 % としたくなります。
表が55回で裏が45回なら表55%,裏45%としたくなります。
これは __最尤推定__ と呼ばれるれっきとした推定法なのです。
具体的には p(x | θ) を最大化するように θ を決めるのです。
要するに今起きたことは、『最も尤もらしい出来事』だったと考えるのです。
最尤推定はまぁ素敵な性質がいっぱいあるのですがここでは割愛します。
次に x, θ が既知のとき、z を推定することを考えます。
これは同時分布 p(x, z | θ) が既知であるときに z を推定することに対応します。
先のコインの例で言えば、コインの表裏の出る確率がわかっている時に、
今から振るコインの面を推定することに対応します。
例えば表がでる確率が 99 % で裏の確率が 1 % だったとします。
次にどっちが出ると思う?と聞かれたらまぁ「表」と答えるでしょう。
では表55%, 裏45%ではどうでしょう?まぁ一つ選べと言われれば「表」でしょう。
つまり、p(x, z | θ) を最大化するように z を決めるのです。
さっきの最尤推定と同じアイディアですね。
しかし、多くの人は 表55%,裏45% の状況で「表!」と叫ぶことを躊躇すると思います。
それに対し、θを推定するときに表55%,裏45%とすることはあまり抵抗がなかったと思います。
この違いは z が確率変数であることを知っているからなのです。
これがさっき z と θ の推定を推論と学習と言いわけたい気持ちの答えであり、
同時に今日のメインテーマでもあります。
とりあえずこの問題は棚上げし、z,θ を同時に推定する方法を考えましょう。
今、x,z から θ を、そして x,θ からzを推定する方法を知っています。
では x から z,θ を推定するにはどうすればいいか。
答えは簡単。交互に推定するのです!
#### MM algorithm (Viterbi Training, K-means etc)
まず適当に θ を決めます。
すると x,θ がわかるので z を p(x,z | θ) を最大化するように推定します。
すると x,z がわかるので θ を p(x,z | θ) を最大化するように推定します。
これを繰り返すといつか z,θ は変化しなくなり、その z,θ が推定結果となります。
「え?これでいいの?」って感じですがいいのです。
これは Viterbi Training と呼ばれたりする方法ですが、ここでは MM algorithm と呼びます。
なぜなら z を p(x,z | θ) を最大化(Maximize)するように決め、
次に θ を p(x,z | θ) を最大化(Maximize)するように決めるからです。
k-means というクラスタリング手法はこの一例です。
初心者の人でも「これならできそうだ!」と思ったでしょう。
できます。暇な時にやってください。
この手法では z を推定するとき思い切りを持って p(x, z | θ) を最大化するように決めました。
しかし、表55%,裏45%のときに「表」と答えるのを躊躇する方は納得出来ないでしょう。
そこで z をもっとソフトに推定する方法があります。
それがかの有名な __EM algorithm__ なのです!
#### EM algorithm
表55%,裏45%のときになんて答えればしっくりくるのか。
きっとその回答の一つが「表の期待値0.55(どやぁ」です。
麻雀でもパチンコでも競馬でも皆さん期待値で考えますよね?
「期待値が1を超えないギャンブルをする奴はクズ」とかいう人もいますが彼らは夢を買っているのです(多分。
つまり、z を推定するときに p(x, z | θ) を最大化(Maximize)するのでなく、
期待値(Expectation)を取ればいいんじゃないの?
それでいいんです。
まず適当に θ を決めます。
すると p(z |x, θ) がわかるので z の期待値を出します。
すると x と z の期待値がわかるので、それを用いて θ を更新します。
これを繰り返すと p(x | θ) がどんどん増加していきます。
これが EM algorithm なのです。
「なんでp(x | θ)が増えるんや!」という声が聞こえますが、Markdown で数式書きたくないので割愛します。
コインの例で言えば、
表回数 55, 裏回数 45のとき、表55%,裏45%と計算したのと同様、
表期待値 0.55, 裏期待値 0.45 のとき、表55%,裏45%と計算できますよね?
これで「表」と叫ぶことが不安な方々も学習できるようになりました。
しかし次の問題も生まれます。
「z の期待値考えるなら、θも期待値考えたくない?」
MM, EM algorithm がああるなら ME, EE algorithm もあるよね?
貪欲ですね〜。
でもθは確率変数じゃないんです。
ではどうするか。
θも確率変数としちゃおうよ!
これがかの有名な「ベイズ」の始まりである…。
#### ME algorithm と EE algorithm
__ベイズ的な__ 確率モデルとはモデルパラメータ θ に __事前分布 p(θ)__ を導入することで、
θ も確率変数として扱うモデルのことです。
これにより潜在変数モデルは p(x, z, θ) = p(x, z | θ)p(θ) はとなります。
(ベイズは奥が深いのでここでは詳細は述べません。)
この拡張により θ も p(x, z | θ) を最大化(Maximize)するのではなく、
p(θ | x) で期待値 (Expectation) を取れるようになります。
これによって MM, EM algorithm と同様に ME, EE algorithm が構成できますね。
しかし実際は、ベイズ的なモデルで p(z | x), p(θ | x) を計算することは難しく、
それらの近似分布 q(z), q(θ) を変分法で求める __変分ベイズ法__ が使われます。
これはとても説明がめんどくせいので興味のある人は自分で調べてください。
### まとめ
登場した4つの学習法をまとめると、
1. MM algorithm : z を最大化, θ も最大化
2. EM algorithm : z を期待値, θ は最大化
3. ME algorithm : z を最大化, θ は期待値
4. EE algorithm : z は期待値, θ も期待値
となります。
ではどれがいいの?
難しい質問です。
答えは「場合による」です。
一般的に、以下の特徴が挙げられます。
* M* algorithm は収束が早い。
* M* algorithm は初期値依存性が強い。
* E*, *E algorithm は汎化性能が高い。
* E* algorithm は遅い。
* *E algorithm は近似誤差が含まれる。
上の特徴を踏まえ、データやモデルから適切に学習法を選ぶ必要があります。
これらを適切に使い分けられるのが今流行りの __データサイエンティスト__ なんじゃないかなぁ(しらね。
________________________________________________________________________________
実験
================================================================================
ここでは実際に紹介した4手法を利用してクラスタリングを行ってみます。
### データ
今回は本をクラスタリングしてみます。
データは私が趣味で作っている[本推薦 AI bot][book_rec_ai] のデータを利用します。
まず bot がフォローしているユーザの本棚中の出現頻度 [Top 200][titles] の本を持ってきます。
次にユーザのうち、それらの本のうち大体半分くらいはもっているユーザ 65 人を取ってきます。
最後にユーザと本の関係を行列で表現します。
縦軸を本 i、横軸をユーザ j、行列の値 xij を
- 0:本を持ってない
- 1:本を持ってる
- 2:本を持ってる、かつ、高評価
の3値で表現します。
[この行列][relation]の各列をデータと思って本をクラスタリングしてみます。
クラスタ数は 10 として各学習法を 10 回ずつ初期値を変えて実行し、
もっとも尤度の高い結果をクラスタリング結果としました。
### モデル
今回はクラスタリングに naive Bayes model を用います。
本 i に関するデータは xi = {xi1,...,xi65} によって表現されます。
この本 i のクラスを zi とすれば、naive bayes model は
p(xi | zi, θ) = Πj p(xij | zi, θ) と分解することを許します。
つまりクラスが与えられたとき、各属性は条件付き独立と仮定するモデルです。
naive Baeys model に対する MM, EM, ME, EE の[実装][nbc]も一応載っけておきます。
この実装は「遅い、長い、汚い」の三拍子そろっていますが、唯一の見どころは、
z, θ の更新ルーチンに渡す関数を変えるだけで MM, EM, ME, EE が実現できるところでしょうか。
ようするにこれら4つの手法は紙一重ということが伝えたかったのです(フヒヒ、サーセンw
### 結果
各学習法のクラスタリング結果です。
1. [MM][mm]
2. [EM][em]
3. [ME][me]
4. [EE][ee]
ぶっちゃけおおきな違いはないですね。
よくよく見ると、漫画の扱いで個性が出ています。
MM が聖☆おにいさんを3月のライオンやよつばと!とくっつけているのに対して、
EE は聖☆おにいさんとテルマエロマエをくっつけてますね。
漫画好きの私としては EE の分け方に賛成です。
まぁこのように微妙に結果変わるので適切にアルゴリズム選んでねって話です。
________________________________________________________________________________
最後に
================================================================================
長々と書きましたが、後半だれてきてるのが目に見えますね。すいません。
この記事では潜在変数モデルの学習法がまぁいろいろあって、それらがどういう関係にあるかを述べました。
みなさんいろんなモデルを作るのが好きなようですが、MMは実装しやすく、早いのでお勧めです。
もし最後まで読んでくださった方がいれば、大変お疲れ様でした&ありがとうございました。
またどこかでお会いしましょう。
[book_rec_ai]: https://twitter.com/book_rec_ai "book_rec_ai"
[relation]: https://github.com/masakazu-ishihata/advent2013/blob/master/relation.dat "relation.dat"
[titles]: https://github.com/masakazu-ishihata/advent2013/blob/master/titles.dat "titles.dat"
[nbc]: https://github.com/masakazu-ishihata/advent2013/blob/master/nbc.rb "nbc.rb"
[mm]: https://github.com/masakazu-ishihata/advent2013/blob/master/mm.txt "mm.txt"
[em]: https://github.com/masakazu-ishihata/advent2013/blob/master/em.txt "em.txt"
[me]: https://github.com/masakazu-ishihata/advent2013/blob/master/me.txt "me.txt"
[ee]: https://github.com/masakazu-ishihata/advent2013/blob/master/ee.txt "ee.txt"
<file_sep>#!/usr/bin/env ruby
# -*- coding: utf-8 -*-
require "optparse"
require './nbc.rb'
################################################################################
# default
################################################################################
@ifile = "relation.dat" # 入力ファイル
@ofile = "result.dat" # 出力ファイル
@m = 0 # 学習法
@a = 1.1
@b = 1.1
@k = 5
@r = 1
@@mm, @@em, @@me, @@ee = [0, 1, 2, 3]
################################################################################
# Arguments
################################################################################
OptionParser.new { |opts|
# options
opts.on("-h","--help","Show this message") {
puts opts
exit
}
opts.on("-i [INPUT]"){ |f|
@ifile = f
}
opts.on("-o [OUTPUT]"){ |f|
@ofile = f
}
opts.on("-k [# clasters]"){ |f|
@k = f.to_i
}
opts.on("-r [# restart]"){ |f|
@r = f.to_i
}
opts.on("-a [alpha]"){ |f|
@a = f.to_f
}
opts.on("-b [beta]"){ |f|
@b = f.to_f
}
opts.on("-1", "--mm"){
@m = @@mm
}
opts.on("-2", "--em"){
@m = @@em
}
opts.on("-3", "--me"){
@m = @@me
}
opts.on("-4", "--ee"){
@m = @@ee
}
# parse
opts.parse!(ARGV)
}
################################################################################
# load data
################################################################################
class MyData
#### new ####
attr_reader :d # データの次元
attr_reader :n # n[d] = d次元の属性値の種類
attr_reader :data # データ
def initialize(_file)
# データの読み取り
# データ形式 : i a_i,1 ... a_i,d
@raw_data = []
open(_file).read.split("\n").each do |line|
ary = line.split(" ")
name = ary.shift # 名前
attr = ary # 属性
@raw_data.push([name, attr])
end
# データの加工
@N = @raw_data.size # データ数
@d = @raw_data[0][1].size # データの次元
@ns = Array.new(@N) # 各データ点の名前
@vs = Array.new(@d){|d| Hash.new(nil)} # 各次元の値
@data = []
for i in 0..@N-1
name, attr = @raw_data[i]
# 名前
@ns[i] = name
# 属性
a = Array.new(@d)
for d in 0..@d-1
v = attr[d]
@vs[d][v] = @vs[d].size if @vs[d][v] == nil
a[d] = @vs[d][v]
end
@data.push(a)
end
@n = Array.new(@d){|d| @vs[d].size}
end
#### clastering ####
def clastering(_file, opt = {})
opt = {:m => 0, :a => 1.1, :b => 1.1, :k => 5, :r => 1}.merge(opt)
p opt
# init
m = NaiveBayesModel.new(opt[:k], @n)
m.a = opt[:a]
m.b = opt[:b]
m.restart = opt[:r]
@data.each{|a| m.add(a) }
# learn
m.rand_p
case opt[:m]
when @@mm then m.mm_learn
when @@em then m.em_learn
when @@me then m.me_learn
when @@ee then m.ee_learn
end
# result
f = open(_file, "w")
m.result.sort{|a,b| b.size <=> a.size}.each do |c|
f.puts "#{c.map{|i| @ns[i]}.join(" ")}"
end
f.close
end
end
################################################################################
# main
################################################################################
d = MyData.new(@ifile)
d.clastering(@ofile, {:m => @m, :a => @a, :b => @b, :k => @k, :r => @r})
<file_sep>#!/usr/bin/env ruby
# -*- coding: utf-8 -*-
require './samplers.rb'
################################################################################
# 便利なやつら
################################################################################
#### Array まわり ####
class Array
def product; inject(:*); end
def sum; inject(:+); end
def normalize
s = self.sum
s == 0.0 || s == 1.0 ? self.clone : map{|i| i / s.to_f}
end
end
#### 各種関数 ####
def exp(x); Math.exp(x); end
def log(x); Math.log(x); end
def delta(x, y); x == y ? 1 : 0; end
# digamma 関数 (テイラー近似)
# copied from https://github.com/csauper/content-attitude
def dig(x)
x = x + 6
p = 1.0 / (x * x)
return (((0.004166666666667*p-0.003968253986254)*p+
0.008333333333333)*p-0.083333333333333)*p+
Math.log(x) - 0.5 / x - 1.0 / (x-1) - 1.0 / (x-2) -
1.0 / (x-3) - 1.0 / (x-4) - 1.0 / (x-5) - 1.0 / (x-6)
end
#### 収束判定用 : 平均二乗誤差 ####
def mse(_a, _b)
e = 0
for i in 0.._a.size-1
e += (_a[i] - _b[i]) ** 2
end
e
end
################################################################################
# Naive Bayes Model
################################################################################
class NaiveBayesModel
#### for each instacne / class / dimension / value ####
def each_i; for i in [email protected]; yield i; end; end
def each_k; for k in 0..@k-1; yield k; end; end
def each_d; for d in 0..@d-1; yield d; end; end
def each_v(_d); for v in 0..@n[_d]-1; yield v; end; end
########################################
# 初期化
########################################
#### new ####
attr_reader :d, :n, :k # モデル定数
attr_accessor :a, :b # ハイパーパラメータ
attr_reader :pk, :pvgk # パラメータ
attr_reader :x, :z # 確率変数
attr_accessor :restart, :iter, :radius # 学習設定
def initialize(_k, _n)
# モデル定数
@d = _n.size # 観測ベクトルの次元数
@n = _n.clone # 各次元の取りうる値
@k = _k # クラスタ数
# ハイパーパラメータ
@a = 1.0
@b = 1.0
# パラメータ
# @pk[k] = p(k)
# @pvgk[k][d][v] = p(a_d = v | k)
@pk = Array.new(@k){|k| 1 / @k.to_f}
@pvgk = Array.new(@k){|k| Array.new(@d){|d| Array.new(@n[d]){|v| 1/@n[d].to_f}}}
# 確率変数
@x = [] # 観測変数集合 [ a_i = [a_i,1,...,a_i,@d],... ]
@z = [] # 潜在変数集合 [ k_i,...]
# 学習用期待値 (もしくはカウント)
@ek = Array.new(@k){|k| @a-1}
@evgk = Array.new(@k){|k| Array.new(@d){|d| Array.new(@n[d]){|v| @b-1}}}
# 学習設定
@restart = 1 # ランダムリスタート
@iter = 100 # 繰り返し数
@radius = 1e-5 # 収束半径
end
#### パラメータの初期化 ####
# rand_p ランダムに初期化
# max_p 最頻値に初期化
def rand_p
@pk = Dirichlet.new_simple(@k, @a).sample
each_k{|k| each_d{|d| @pvgk[k][d] = Dirichlet.new_simple(@n[d], @b).sample } }
end
def max_p
@pk = @ek.normalize
each_k{|k| each_d{|d| @pvgk[k][d] = @evgk[k][d].normalize } }
end
#### 潜在変数の初期化 ####
# rand_z ランダムに初期化
# max_z 最頻値に初期化
def rand_z; each_i{|i| @z[i] = Categorical.new(@pk).sample}; end
def max_z; each_i{|i| @z[i] = max_k(@x[i])}; end
def max_k(_a)
mk, mp = [nil, nil]
each_k{|k| mk, mp = [k, pkga(k, _a)] if mk == nil || mp < pkga(k, _a) }
mk
end
#### 期待値の初期化 ####
# ハイパーパラメータを考慮した擬似カウント
def init_e
each_k{|k| @ek[k] = @a-1}
each_k{|k| each_d{|d| each_v(d){|v| @evgk[k][d][v] = @b-1}}}
end
########################################
# データ生成、入力
########################################
#### sample ####
def sample; sample_with_class[1]; end
def sample_with_class
k = Categorical.new(@pk).sample
a = Array.new(@d){|d| Categorical.new(@pvgk[k][d]).sample }
[k, a]
end
#### add ####
def add(_a); add_with_class(-1, _a); end
def add_with_class(_k, _a); @z.push(_k); @x.push(_a); end
########################################
# 確率値 (場合によっては期待値)
########################################
# pka : p(_k, _a)
# pkga : p(_k | _a)
def pka(_k, _a); @pk[_k] * pagk(_a, _k); end
def pkga(_k, _a); pka(_k, _a) / pa(_a); end
# pa : p(_a)
# pagk : p(_a | _k)
def pa(_a); Array.new(@k){|k| pka(k, _a) }.sum; end
def pagk(_a, _k); Array.new(@d){|d| @pvgk[_k][d][_a[d]] }.product; end
# export/import parameters
def export_par; [@pk, @pvgk].flatten; end
def import_par(_ary)
# load pk
each_k do |k|
@pk[k] = _ary.shift
end
# load pvgk
each_k do |k|
each_d do |d|
each_v(d) do |v|
@pvgk[k][d][v] = _ary.shift
end
end
end
end
########################################
# 学習 : z, p をそれぞれ e, m で推定
########################################
#### z の推定 ####
# zm_step : z を最頻値で推定
# ze_step : z を期待値で推定
def zm_step; max_z; z_step( lambda{|i, k| delta(k, @z[i])} ); end
def ze_step; z_step( lambda{|i, k| pkga(k, @x[i])} ); end
def z_step(_f)
init_e
each_i do |i|
each_k do |k|
# ek
@ek[k] += _f.call(i, k)
# evgk
each_d do |d|
@evgk[k][d][ @x[i][d] ] += _f.call(i, k)
end
end
end
end
#### p の推定 ####
# pm_step : p を最頻値で推定
# pe_step : p を期待値で推定
def pm_step; p_step( lambda{|x| log(x)} ); end
def pe_step; p_step( lambda{|x| dig(x)} ); end
def p_step(_f)
sum_ek = @ek.sum
each_k do |k|
# pk
@pk[k] = exp( _f.call(@ek[k]) - _f.call(sum_ek) )
# pvgk
each_d do |d|
sum_evgk = @evgk[k][d].sum
each_v(d) do |v|
@pvgk[k][d][v] = exp( _f.call(@evgk[k][d][v]) - _f.call(sum_evgk) )
end
end
end
end
#### 学習本体 ####
def mm_learn; puts "MM"; learn{zm_step; pm_step}; end
def em_learn; puts "EM"; learn{ze_step; pm_step}; end
def me_learn; puts "ME"; learn{zm_step; pe_step}; end
def ee_learn; puts "EE"; learn{ze_step; pe_step}; end
def learn
best_p, best_l = [nil, nil]
for r in 1..@restart
puts "Try #{r}"
# パラメータの初期化
rand_p
p0 = export_par
# _n 回 | 収束するまで繰り返す
for i in 1..@iter
# learn
t1 = Time.now
yield
t2 = Time.now
# 収束判定
p1 = export_par
e = mse(p0, p1)
printf "%5d %.5e %.5e\n", i, e, t2-t1
break if e < @radius
p0 = p1
end
# prediction
max_p
max_z
# loglikelihood
l = loglikelihood
printf "LL = %e\n", l
best_p, best_l = [p1, l] if best_l == nil || best_l < l
end
# best result
import_par(best_p)
max_z
end
#### log likelihood ####
def loglikelihood
ll = 0
each_i{|i| ll += log( pa( @x[i] ) )}
ll
end
def loglikelihood_with_class
ll = 0
each_i{|i| ll += log( pagk(@x[i], @z[i]) ) }
ll
end
#### clastering result ####
def result
cs = Array.new(@k){|k| []}
each_i{|i| cs[ @z[i] ].push(i) }
cs
end
########################################
# show
########################################
def show
show_cons
show_hpar
show_par
show_var
show_exp
end
#### constants ####
def show_cons
puts "Model Constants"
puts "D = #{@d}, N = #{@n}, K = #{@k}"
end
#### hyper parameters ####
def show_hpar
puts "Hyper Parameters"
puts "a = #{@a}"
each_d{|d| puts "b = #{@b}"}
end
#### parameters ####
def show_par
puts "Parameters"
puts "p(k) = #{@pk}"
each_k do |k|
each_d do |d|
puts "p(a_#{d} | k = #{k}) = #{@pvgk[k][d]}"
end
end
end
#### variables ####
def show_var
puts "Variables"
each_i do |i|
puts "#{@z[i]}, #{@x[i]}"
end
end
#### expectations ####
def show_exp
puts "Expectations"
puts "ek = #{@ek}"
each_k do |k|
each_d do |d|
puts "ea[#{k}][#{d}] = #{@evgk[k][d]}"
end
end
end
end
| 4c4ffc6ef9c0ec3b1422f21ddbdeef7dcb74a8f7 | [
"Markdown",
"Ruby"
] | 3 | Markdown | muimuixxx/advent2013 | 1ef00ad3e98e8b83611aba0b7f3e6d9f9c318f0d | 417dd02fe9104832371a23b66947a10ec1069a8f | |
refs/heads/master | <repo_name>HuguesGithub/hj-zombicide-v2<file_sep>/core/bean/KeywordBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe KeywordBean
* @author Hugues
* @since 1.0.00
* @version 1.04.27
*/
class KeywordBean extends LocalBean
{
/**
* Class Constructor
* @param Keyword $Keyword
*/
public function __construct($Keyword='')
{
parent::__construct();
$this->Keyword = ($Keyword=='' ? new Keyword() : $Keyword);
$this->EquipmentKeywordServices = new EquipmentKeywordServices();
}
/**
* @param string $tBodyButtons Template des Boutons de fin de ligne
* @return string
*/
public function getRowForAdminPage($tBodyButtons)
{
$Keyword = $this->Keyword;
$arrF = array(self::FIELD_KEYWORDID=>$Keyword->getId());
$Equipments = $this->EquipmentKeywordServices->getEquipmentKeywordsWithFilters($arrF);
$nb = count($Equipments);
$queryArg = array(
self::CST_ONGLET=>'parametre',
self::CST_POSTACTION=>'edit',
'table'=>'keyword',
'id'=>$Keyword->getId()
);
$urlEdit = $this->getQueryArg($queryArg);
$queryArg[self::CST_POSTACTION] = 'trash';
$urlTrash = $this->getQueryArg($queryArg);
$args = array(
$nb.' Carte'.($nb>1?'s':'').' Equipement',
$urlEdit,
$urlTrash,
);
$tBody = '<tr><td>'.$Keyword->getId().'</td><td>'.$Keyword->getName().'</td><td>'.$Keyword->getDescription().'</td>';
return $tBody.vsprintf($tBodyButtons, $args).'</tr>';
}
}
<file_sep>/core/bean/OrigineBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe OrigineBean
* @author Hugues
* @since 1.07.25
* @version 1.07.25
*/
class OrigineBean extends LocalBean
{
/**
* @param Expansion $Origine
*/
public function __construct($Origine=null)
{
parent::__construct();
$this->Origine = ($Origine==null ? new Origine() : $Origine);
$this->OrigineServices = new OrigineServices();
}
public static function getStaticSelect($name, $selId=0)
{
$OrigineBean = new OrigineBean();
return $OrigineBean->getSelect($name, $selId);
}
public function getSelect($name, $selId)
{
$Origines = $this->OrigineServices->getOriginesWithFilters();
$strOptions = $this->getBalise(self::TAG_OPTION, 'Toutes Origines', array(self::ATTR_VALUE=>0));
while (!empty($Origines)) {
$Origine = array_shift($Origines);
$args = array(self::ATTR_VALUE=>$Origine->getId());
if ($Origine->getId()==$selId) {
$args[self::ATTR_SELECTED] = self::CST_SELECTED;
}
$strOptions .= $this->getBalise(self::TAG_OPTION, $Origine->getName(), $args);
}
return $this->getBalise(self::TAG_SELECT, $strOptions, array(self::ATTR_NAME=>$name, self::ATTR_ID=>'filter-by-'.$name));
}
}
<file_sep>/core/bean/WpPageEquipmentsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageEquipmentsBean
* @author Hugues
* @since 1.04.15
* @version 1.04.28
*/
class WpPageEquipmentsBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-equipments.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->EquipmentServices = new EquipmentServices();
$this->EquipmentExpansionServices = new EquipmentExpansionServices();
$this->ExpansionServices = new ExpansionServices();
}
/**
* @return string
*/
public function getContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On construit la liste déroulante des Extensions ayant des cartes Equipements
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(), self::FIELD_DISPLAYRANK);
$strExpansions = '';
foreach ($Expansions as $Expansion) {
$id = $Expansion->getId();
$arrFilters = array(self::FIELD_EXPANSIONID=>$id);
$EquipmentExpansions = $this->EquipmentExpansionServices->getEquipmentExpansionsWithFilters($arrFilters);
// Si on n'a pas de carte Equipement rattachée, on n'a pas besoin d'afficher cette extension.
if (empty($EquipmentExpansions)) {
continue;
}
// On peut ajouter l'Extension au menu pour filtrer.
$strExpansions .= $this->getBalise(self::TAG_OPTION, $Expansion->getName(), array(self::ATTR_VALUE=>'set-'.$id));
/////////////////////////////////////////////////////////////////////////////
// On récupère l'ensemble des cartes de l'extension.
foreach ($EquipmentExpansions as $EquipmentExpansion) {
$EquipmentCard = $this->EquipmentServices->selectEquipment($EquipmentExpansion->getEquipmentCardId());
$niceName = $EquipmentCard->getNiceName().'-'.$EquipmentCard->getId().'-'.$id;
$EquipmentCard->setExpansionId($id);
// Par contre, vu qu'on s'appuye sur la tablede jointure, on ne peut pas directement trier les cartes.
// On les stocke donc temporairement.
$EquipmentCardsToDisplay[$niceName] = $EquipmentCard;
}
}
/////////////////////////////////////////////////////////////////////////////
// On construit la liste des cartes à afficher.
$strCartes = '';
if (!empty($EquipmentCardsToDisplay)) {
// On trie les cartes selon leur nom.
ksort($EquipmentCardsToDisplay);
foreach ($EquipmentCardsToDisplay as $name => $EquipmentCard) {
list(, , $id) = explode('-', $name);
$EquipmentBean = new EquipmentBean($EquipmentCard);
$strCartes .= $EquipmentBean->displayCard($id);
}
}
/////////////////////////////////////////////////////////////////////////////
// On construit la liste déroulante pour les filtres "mots-clés" (mais pas que).
$arr = array(
'weapon'=>'Armes',
'melee'=>'Armes de Mêlée',
'ranged'=>'Armes A distance',
'pimp'=>'Armes Pimp',
'dual'=>'Armes Dual',
'starter'=>'Equipement de départ'
);
$strCategories = '';
foreach ($arr as $key => $value) {
$strCategories .= $this->getBalise(self::TAG_OPTION, $value, array(self::ATTR_VALUE=>$key));
}
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Liste des cartes - 1
$strCartes,
// Options de sélection des extensions - 2
$strExpansions,
// Options de sélection de catégories. - 3
$strCategories,
'', '', '', '', '',
);
return $this->getRender($this->urlTemplate, $args);
}
/**
* @param array $post
*/
public function setFilters($post=null)
{ parent::setBeanFilters($post, self::FIELD_NAME); }
}
<file_sep>/core/domain/MissionRule.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionRule
* @author Hugues.
* @since 1.04.08
* @version 1.04.28
*/
class MissionRule extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Id technique de la Mission
* @var int $missionId
*/
protected $missionId;
/**
* Id technique de la Règle
* @var int $ruleId
*/
protected $ruleId;
/**
* titre de la règle
* @var string $title
*/
protected $title;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return int
*/
public function getMissionId()
{ return $this->missionId; }
/**
* @return int
*/
public function getRuleId()
{ return $this->ruleId; }
/**
* @return string
*/
public function getTitle()
{ return $this->title; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id = $id; }
/**
* @param int $missionId
*/
public function setMissionId($missionId)
{ $this->missionId = $missionId; }
/**
* @param int $ruleId
*/
public function setRuleId($ruleId)
{ $this->ruleId = $ruleId; }
/**
* @param string $title
*/
public function setTitle($title)
{ $this->title = $title; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('MissionRule'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return MissionRule
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new MissionRule(), self::getClassVars(), $row); }
}
<file_sep>/core/bean/SkillBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SkillBean
* @author Hugues
* @since 1.00.00
* @version 1.05.12
*/
class SkillBean extends LocalBean
{
protected $urlRowAdmin = 'web/pages/admin/fragments/skill-row.php';
protected $urlRowPublic = 'web/pages/public/fragments/skill-row.php';
/**
* @param Skill $Skill
*/
public function __construct($Skill=null)
{
parent::__construct();
$this->Skill = ($Skill==null ? new Skill() : $Skill);
$this->SurvivorSkillServices = new SurvivorSkillServices();
}
//////////////////////////////////////////////////////////////////////////
// Différentes modes de présentation
/**
* @return string
*/
public function getRowForAdminPage()
{
///////////////////////////////////////////////////////////////
// On enrichit le template et on le retourne.
$args = array(
// Identifiant de la Competence - 1
$this->Skill->getId(),
// Code de la Compétence - 2
$this->Skill->getCode(),
// Url d'édition du WpPost - 3
$this->Skill->getWpPostEditUrl(),
// Url d'édition BDD - 4
$this->Skill->getEditUrl(self::CST_SKILL),
// Url publique de l'Article - 5
$this->Skill->getWpPostUrl(),
// Nom de la Compétence - 6
$this->Skill->getName(),
// Description du Skill - 7
$this->Skill->getDescription(),
// Officiel ou non ? - 8
($this->Skill->getExpansion()->isOfficial() ? 'Oui' : 'Non'),
);
///////////////////////////////////////////////////////////////
// Puis on le retourne
return $this->getRender($this->urlRowAdmin, $args);
}
/**
* @return string
*/
public function getRowForPublicPage()
{
///////////////////////////////////////////////////////////////
// On enrichi le template et on le retourne.
$args = array(
// Front Url de la Compétence - 1
$this->Skill->getWpPostUrl(),
// Nom de la Compétence - 2
$this->Skill->getName(),
// Nombre de Compétences possédées par un Survivant, un Zombivant ou un Ultimate par niveau - 3
$this->getSkillCartouches(),
// Description de la Compétence - 4
$this->Skill->getDescription(),
// Identifiant de la Compétence - 5
$this->Skill->getId(),
);
return $this->getRender($this->urlRowPublic, $args);
}
private function getSkillCartouches()
{
///////////////////////////////////////////////////////////////
// Construction des éléments de la colonne Niveau
$strSkillsCartouches = '';
$arrTags = array(
self::LVL_BLUE => array(10, 11),
self::LVL_YELLOW => array(20),
self::LVL_ORANGE => array(30, 31),
self::LVL_RED => array(40, 41, 42),
);
$arrLvls = array(1=>'S', 2=>'Z', 3=>'U', 4=>'UZ');
foreach ($arrTags as $key => $value) {
foreach ($arrLvls as $k => $v) {
$nb = $this->getNbSkillsByTag($value, $k);
if ($nb!=0) {
$strSkillsCartouches .= $this->getBalise(self::TAG_SPAN, $v.' : '.$nb, array(self::ATTR_CLASS=>'badge badge-'.$key.'-skill'));
}
}
}
return $strSkillsCartouches;
}
/**
* @param array $arrTags Liste des tags dont on veut le nombre de couples SurvivorSkill
* @return int
*/
private function getNbSkillsByTag($arrTags, $type=1)
{
$arrFilters = array(
self::FIELD_SKILLID => $this->Skill->getId(),
self::FIELD_SURVIVORTYPEID => $type,
);
$nb = 0;
// Pour chaque tag, on fait une recherche en base et on cumule le nombre que l'on renvoie.
while (!empty($arrTags)) {
$arrFilters[self::FIELD_TAGLEVELID] = array_shift($arrTags);
$SurvivorSkills = $this->SurvivorSkillServices->getSurvivorSkillsWithFilters($arrFilters);
$nb += count($SurvivorSkills);
}
return $nb;
}
}
<file_sep>/core/domain/MissionExpansion.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionExpansion
* @author Hugues.
* @since 1.04.27
* @version 1.05.02
*/
class MissionExpansion extends LocalDomain
{
/**
* Id technique de la jointure
* @var int $id
*/
protected $id;
/**
* Id technique de la Mission
* @var int $missionId
*/
protected $missionId;
/**
* Id technique de l'Expansion
* @var int $expansionId
*/
protected $expansionId;
/**
* @return int
*/
public function getId()
{return $this->id; }
/**
* @ return int
*/
public function getMissionId()
{ return $this->MissionId; }
/**
* @ return int
*/
public function getExpansionId()
{ return $this->expansionId; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $missionId
*/
public function setMissionId($missionId)
{ $this->missionId = $missionId; }
/**
* @param int $expansionId
*/
public function setExpansionId($expansionId)
{ $this->expansionId = $expansionId; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('MissionExpansion'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return MissionExpansion
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new MissionExpansion(), self::getClassVars(), $row); }
/**
* @return array EquipmentExpansion
*/
public function getEquipmentExpansions()
{
if ($this->EquipmentExpansions == null) {
$arrFilters = array(self::FIELD_EXPANSIONID=>$this->expansionId);
$this->EquipmentExpansions = $this->EquipmentExpansionServices->getEquipmentExpansionsWithFilters($arrFilters);
}
return $this->EquipmentExpansions;
}
/**
* @param Expansion $Expansion
*/
public function setExpansion($Expansion)
{ $this->Expansion=$Expansion; }
}
<file_sep>/core/services/WeaponProfileServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WeaponProfileServices
* @author Hugues.
* @since 1.04.27
* @version 1.04.27
*/
class WeaponProfileServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var DurationDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new WeaponProfileDaoImpl();
}
}
<file_sep>/core/domain/Tile.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Tile
* @author Hugues.
* @since 1.04.07
* @version 1.07.22
*/
class Tile extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Id technique de l'Expansion
* @var int $expansionId
*/
protected $expansionId;
/**
* Code de la Dalle
* @var string $code
*/
protected $code;
protected $coordPoly;
protected $zoneType;
/**
* Sans doute à virer...
* @var string $zoneAcces
*/
protected $zoneAcces;
/**
* La Dalle est elle active ?
* @var int $active
*/
protected $activeTile;
protected $oCode;
protected $side_top;
protected $side_right;
protected $side_bottom;
protected $side_left;
/**
* @return int
*/
public function getId()
{return $this->id; }
/**
* @return string
*/
public function getCode()
{ return $this->code; }
/**
* @return int
*/
public function getExpansionId()
{ return $this->expansionId; }
/**
* @return string
*/
public function getZoneAcces()
{ return $this->zoneAcces; }
public function getOCode()
{ return $this->oCode; }
public function getSideTop()
{ return $this->side_top; }
public function getSideRight()
{ return $this->side_right; }
public function getSideBottom()
{ return $this->side_bottom; }
public function getSideLeft()
{ return $this->side_left; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param string $code
*/
public function setCode($code)
{ $this->code=$code; }
/**
* @param int $expansionId
*/
public function setExpansionId($expansionId)
{ $this->expansionId=$expansionId; }
/**
* @param string $zoneAcces
*/
public function setZoneAcces($zoneAcces)
{ $this->zoneAcces=$zoneAcces; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Tile'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Tile
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Tile(), self::getClassVars(), $row); }
/**
* @return string
*/
public function getImgUrl()
{ return '/wp-content/plugins/hj-zombicide/web/rsc/img/tiles/'.$this->code.'-500px.png'; }
/**
* @return string
*/
public function getDimensions()
{
list($width, $height, ,) = getimagesize('http://zombicide.jhugues.fr'.$this->getImgUrl());
return $width.'px x '.$height.'px';
}
/**
* @param array $row
* @return Tile
*/
public static function convertElementFromPost($row)
{
$Obj = new Tile();
$vars = get_class_vars('Tile');
if (!empty($vars)) {
foreach ($vars as $key => $value) {
$Obj->setField($key, str_replace("\\", '', $row[$key]));
}
if ($row['active']=='on') {
$Obj->setField('active', 1);
}
}
return $Obj;
}
}
<file_sep>/core/actions/ExpansionActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* ExpansionActions
* @author Hugues
* @since 1.04.30
* @version 1.08.01
*/
class ExpansionActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct($post=array())
{
parent::__construct();
$this->post = $post;
$this->ExpansionServices = new ExpansionServices();
$this->SurvivorServices = new SurvivorServices();
$this->WpPostServices = new WpPostServices();
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new ExpansionActions($post);
switch ($post[self::CST_AJAXACTION]) {
case self::AJAX_EXPANSIONVERIF :
$returned = $Act->dealWithExpansionVerif(true);
break;
case self::AJAX_GETEXPANSIONS :
$returned = $Act->dealWithGetExpansions();
break;
default :
$returned = 'Erreur dans ExpansionActions > dealWithStatic, '.$_POST[self::CST_AJAXACTION].' inconnu.';
break;
}
return $returned;
}
/**
* Récupération du contenu de la page via une requête Ajax.
* @param array $post
* @return string
*/
public function dealWithGetExpansions()
{
$Bean = new WpPageExpansionsBean();
$Bean->setFilters($this->post);
return $this->jsonString($Bean->getListContentPage(), self::PAGE_EXTENSION, true);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Bloc de gestion de vérifications des Extensions en Home Admin
/**
* @param boolean $isVerif
* @return string
*/
public function dealWithExpansionVerif($isVerif=false)
{
// On récupère les articles d'extensions
$args = array(
self::WP_CAT => self::WP_CAT_EXPANSION_ID,
self::WP_TAXQUERY => array(),
self::WP_POSTSTATUS => self::WP_PUBLISH.', future, pending',
);
$this->WpPostExpansions = $this->WpPostServices->getArticles($args);
$nbWpPostExpansions = count($this->WpPostExpansions);
// Et les extensions en base
$this->Expansions = $this->ExpansionServices->getExpansionsWithFilters();
$nbExpansions = count($this->Expansions);
if ($isVerif) {
$this->checkExpansions();
$strBilan = $this->jsonString($this->strBilan, self::AJAX_EXPANSIONVERIF, true);
} elseif ($nbWpPostExpansions!=$nbExpansions) {
$strBilan = "Le nombre d'articles ($nbWpPostExpansions) ne correspond pas au nombre d'extensions en base ($nbExpansions).<br>";
$strBilan .= "Une vérification est vivement conseillée.";
} else {
$strBilan = "Le nombre d'articles ($nbWpPostExpansions) correspond au nombre d'extensions en base.";
}
return $strBilan;
}
private function checkExpansions()
{
$hasErrors = false;
$strErrors = '';
$this->strBilan = "Début de l'analyse des données relatives aux Extensions.<br>";
$this->strBilan .= "Il y a ".count($this->WpPostExpansions)." articles d'Extensions.<br>";
$this->strBilan .= "Il y a ".count($this->Expansions)." entrées en base.<br>";
/////////////////////////////////////////////////////////////////////
// On va réorganiser les Expansions pour les retrouver facilement
$arrExpansions = array();
while (!empty($this->Expansions)) {
$Expansion = array_shift($this->Expansions);
if (isset($arrExpansions[$Expansion->getCode()])) {
$strErrors .= "Le code <em>".$Expansion->getCode()."</em> semble être utilisé deux fois dans la base de données.<br>";
$hasErrors = true;
}
$arrExpansions[$Expansion->getCode()] = $Expansion;
}
/////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////
while (!empty($this->WpPostExpansions)) {
// On regarde les articles créés et on vérifie les données en base, si elles existent et si elles sont cohérentes entre elles.
// On récupère le WpPost et ses données
$this->WpPost = array_shift($this->WpPostExpansions);
$code = $this->WpPost->getPostMeta(self::FIELD_CODE);
if (!isset($arrExpansions[$code])) {
// A priori l'article n'a pas de code associé en base. Il faut donc en créé un qui corresponde
$Expansion = new Expansion();
$Expansion->setCode($code);
$name = $this->WpPost->getPostTitle();
$Expansion->setName($name);
$Expansion->setDisplayRank($this->WpPost->getPostMeta(self::FIELD_DISPLAYRANK));
$Expansion->setOfficial($this->WpPost->getPostMeta(self::FIELD_OFFICIAL));
// On insère la donnée et on log dans le bilan
$this->ExpansionServices->insertExpansion($Expansion);
$this->strBilan .= "L'article <em>".$name."</em> a été créé en base.<br>";
continue;
}
$Expansion = $arrExpansions[$code];
unset($arrExpansions[$code]);
$this->checkExpansion($Expansion);
}
/////////////////////////////////////////////////////////////////////
// On vérifie que la totalité des Extensions en base ont été utilisées. Si ce n'est pas le cas, il faut créer des articles correspondants.
if (!empty($arrExpansions)) {
$this->strBilan .= "On a des données en base qui n'ont pas d'article correspondant.<br>";
while (!empty($arrExpansions)) {
$Expansion = array_shift($arrExpansions);
$this->strBilan .= '<br>Article à créer pour une Extension : '.$Expansion->getName().' ['.$Expansion->toJson().'].<br>';
}
}
/////////////////////////////////////////////////////////////////////
$this->strBilan .= "Fin de l'analyse des données relatives aux Extensions.<br>";
if ($hasErrors) {
$this->strBilan .= "Anomalies constatées :<br>".$strErrors;
} else {
$this->strBilan .= "Aucune anomalie constatée.";
}
}
private function checkExpansion($Expansion)
{
$doUpdate = false;
// On initialise les données de l'article
$name = $this->WpPost->getPostTitle();
$displayRank = $this->WpPost->getPostMeta(self::FIELD_DISPLAYRANK);
$official = $this->WpPost->getPostMeta(self::FIELD_OFFICIAL);
// On vérifie si la donnée en base correspond à l'article.
$strError = '';
if ($Expansion->getName()!=$name) {
$Expansion->setName($name);
$strError .= "Le Nom a été mis à jour.<br>";
$doUpdate = true;
}
if ($Expansion->getDisplayRank()!=$displayRank) {
$Expansion->setDisplayRank($displayRank);
$strError .= "Le Rang d'affichage a été mis à jour.<br>";
$doUpdate = true;
}
if ($Expansion->isOfficial()!=$official) {
$Expansion->setOfficial($official);
$strError .= "Le statut Officiel a été mis à jour.<br>";
$doUpdate = true;
}
// On peut aussi envisager de mettre à jour les chaps nbSurvivants et nbMissions...
$Survivors = $this->SurvivorServices->getSurvivorsWithFilters(array(self::FIELD_EXPANSIONID=>$Expansion->getId()));
if ($Expansion->getNbSurvivants()!=count($Survivors)) {
$Expansion->setNbSurvivants(count($Survivors));
$strError .= "Le Nombre de Survivants a été mis à jour.<br>";
$doUpdate = true;
}
// Vérifions le nombre de Dalles
$Tiles = $Expansion->getTiles();
// On exclus les sets de Dalles. Parce que une Dalle est rattachée à une Extension, et les sets de Dalles proposent les mêmes Dalles pour une autre Extension.
$doNotCheckTilesForExpansions = array(59, 60, 61, 62, 40);
if ($Expansion->getNbDalles()!=count($Tiles) && !in_array($Expansion->getId(), $doNotCheckTilesForExpansions)) {
$this->strBilan .= "Le nombre de Dalles pour l'Extension <em>".$name."</em> ne correspond pas. Nb dans le champ : ".$Expansion->getNbDalles().". Nb donnés par la requête : ".count($Tiles).".<br>";
// Note : On ne met à jour que si la requête renvoie plus que la donnée du champ.
if ($Expansion->getNbDalles()<count($Tiles)) {
$Expansion->setNbDalles(count($Tiles));
$doUpdate = true;
}
}
// Vérifions le nombre de Missions
// TODO : Données à mettre à jour dans les articles. Les champs "Dalles" des WpPost de type Missions doivent être renseignés
$Missions = $Expansion->getMissions();
if ($Expansion->getNbMissions()!=count($Missions) && $Expansion->getNbMissions()<count($Missions)) {
//$this->strBilan .= "Le nombre de Missions pour l'Extension <em>".$name."</em> ne correspond pas. Nb dans le champ : ".$Expansion->getNbMissions().". Nb donnés par la requête : ".count($Missions).".<br>";
// Note : On ne met à jour que si la requête renvoie plus que la donnée du champ.
$Expansion->setNbMissions(count($Missions));
$doUpdate = true;
}
if ($doUpdate) {
// Si nécessaire, on update en base.
$this->ExpansionServices->updateExpansion($Expansion);
$this->strBilan .= "Les données de l'Extension <em>".$name."</em> ont été mises à jour.<br>".$strError;
}
}
// Fin du bloc relatif à la vérification d'extensions sur la Home Admin.
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
}
<file_sep>/core/bean/LocalBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe LocalBean
* @author Hugues
* @since 1.00.00
* @version 1.07.21
*/
class LocalBean extends UtilitiesBean implements ConstantsInterface
{
/**
* @param array $addArg
* @param array $remArg
* @return string
*/
public function getQueryArg($addArg, $remArg=array())
{
$addArg['page'] = 'hj-zombicide/admin_manage.php';
$remArg[] = 'form';
$remArg[] = 'id';
return add_query_arg($addArg, remove_query_arg($remArg, 'http://zombicide.jhugues.fr/wp-admin/admin.php'));
}
/**
* @param array $addArg
* @param array $remArg
* @param string $url
* @return string
*/
public function getFrontQueryArg($addArg, $remArg=array(), $url='http://zombicide.jhugues.fr/')
{ return add_query_arg($addArg, remove_query_arg($remArg, $url)); }
/**
* @return bool
*/
public static function isAdmin()
{ return current_user_can('manage_options'); }
/**
* @return bool
*/
public static function isLogged()
{ return is_user_logged_in(); }
/**
* @return int
*/
public static function getWpUserId()
{ return get_current_user_id(); }
/**
* @param string $id
* @param string $default
* @return mixed
*/
public function initVar($id, $default='')
{
if (isset($_POST[$id])) {
return $_POST[$id];
}
if (isset($_GET[$id])) {
return $_GET[$id];
}
return $default;
}
public function getPublicBalise($balise, $label='', $attributes=array())
{ return $this->getBalise($balise, $label, $attributes); }
}
<file_sep>/core/domain/Duration.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Duration
* @author Hugues.
* @since 1.0.00
* @version 1.04.28
*/
class Duration extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Durée minimale estimée
* @var int $minDuration
*/
protected $minDuration;
/**
* Durée maximal estimée (nulle si une seule durée donnée)
* @var int $maxDuration
*/
protected $maxDuration;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return int
*/
public function getMinDuration()
{ return $this->minDuration; }
/**
* @return int
*/
public function getMaxDuration()
{ return $this->maxDuration; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id = $id; }
/**
* @param int $minDuration
*/
public function setMinDuration($minDuration)
{ $this->minDuration = $minDuration; }
/**
* @param int $maxDuration
*/
public function setMaxDuration($maxDuration)
{ $this->maxDuration = $maxDuration; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Duration'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Duration
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Duration(), self::getClassVars(), $row); }
/**
* @return string
*/
public function getStrDuree()
{ return $this->minDuration.($this->maxDuration == 0 ? '' : ' à '.$this->maxDuration).' minutes'; }
/**
* @return DurationBean
*/
public function getBean()
{ return new DurationBean($this); }
}
<file_sep>/core/bean/WpPageTilesBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageTilesBean
* @author Hugues
* @since 1.08.30
*/
class WpPageTilesBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-tiles.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->ExpansionServices = new ExpansionServices();
}
/**
* @return string
*/
public function getContentPage()
{ return $this->getListContentPage(); }
/**
* Retourne la liste des dalles
* @return string
*/
public function getListContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste de toutes les Extensions
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(), self::FIELD_DISPLAYRANK);
$strFilters = '';
$strSpawns = '';
while (!empty($Expansions)) {
$Expansion = array_shift($Expansions);
// Si l'extension n'a pas de dalles, on passe à l'extension suivante.
if ($Expansion->getNbDalles()==0) {
continue;
}
// On en profite aussi pour construire le bloc de filtres.
$strFilters .= $this->getBalise(self::TAG_OPTION, $Expansion->getName(), array(self::ATTR_VALUE => 'set-'.$Expansion->getId()));
}
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Les filtres disponibles - 1
$strFilters,
);
return $this->getRender($this->urlTemplate, $args);
}
}
<file_sep>/core/domain/Expansion.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Expansion
* @author Hugues.
* @since 1.04.00
* @version 1.07.22
*/
class Expansion extends WpPostRelais
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Code de la donnée
* @var string $code
*/
protected $code;
/**
* Nom de la donnée
* @var string $name
*/
protected $name;
/**
* Rang d'affichage
* @var int $displayRank
*/
protected $displayRank;
/**
* Nombre de Survivants
* @var int $nbSurvivants
*/
protected $nbSurvivants;
/**
* Nombre de Missions
* @var int $nbMissions
*/
protected $nbMissions;
/**
* Nombre de Dalles
* @var int $nbDalles
*/
protected $nbDalles;
/**
* Est officielle ?
* @var boolean $official;
*/
protected $official;
/**
* Getter Id
* @return int
*/
public function getId()
{return $this->id; }
/**
* Getter Code
* @return string
*/
public function getCode()
{ return $this->code; }
/**
* Getter Name
* @return string
*/
public function getName()
{ return $this->name; }
/**
* Getter displayRank
* @return int
*/
public function getDisplayRank()
{ return $this->displayRank; }
/**
* Getter nbSurvivants
* @return int
*/
public function getNbSurvivants()
{ return $this->nbSurvivants; }
/**
* Getter nbMissions
* @return int
*/
public function getNbMissions()
{ return $this->nbMissions; }
/**
* Getter nbDalles
* @return int
*/
public function getNbDalles()
{ return $this->nbDalles; }
/**
* Getter official
* @return boolean
*/
public function isOfficial()
{ return ($this->official==1); }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param string $code
*/
public function setCode($code)
{ $this->code=$code; }
/**
* @param string $name
*/
public function setName($name)
{ $this->name=$name; }
/**
* @param int $displayRank
*/
public function setDisplayRank($displayRank)
{ $this->displayRank=$displayRank; }
/**
* @param int $nbSurvivants
*/
public function setNbSurvivants($nbSurvivants)
{ $this->nbSurvivants = $nbSurvivants; }
/**
* @param int $nbMissions
*/
public function setNbMissions($nbMissions)
{ $this->nbMissions = $nbMissions; }
/**
* @param int $nbDalles
*/
public function setNbDalles($nbDalles)
{ $this->nbDalles = $nbDalles; }
/**
* @param boolean $official
*/
public function setOfficial($official)
{ $this->official = $official; }
///////////////////////////////////////////////////////////////
/**
* Retourne les attributs de la classe
* @return array
*/
public function getClassVars()
{ return get_class_vars('Expansion'); }
/**
* @param array $row
* @param string $a
* @param string $b
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Expansion(), self::getClassVars(), $row); }
/**
* @return ExpansionBean
*/
public function getBean()
{ return new ExpansionBean($this); }
///////////////////////////////////////////////////////////////
/**
* @return string
*/
public function getWpPost()
{ return $this->getMainWpPost(self::FIELD_CODE, $this->code, self::WP_CAT_EXPANSION_ID); }
/**
* @param array $row
*/
public static function convertElementFromPost($row)
{
$Obj = new Expansion();
$vars = get_class_vars('Expansion');
if (!empty($vars)) {
foreach ($vars as $key => $value) {
$Obj->setField($key, $row[$key]);
}
if ($row['officielle']=='on') {
$Obj->setField('officielle', 1);
}
if ($row['active']=='on') {
$Obj->setField('active', 1);
}
}
return $Obj;
}
public function getTiles()
{
if ($this->Tiles==null) {
$this->Tiles = $this->TileServices->getTilesWithFilters(array(self::FIELD_EXPANSIONID=>$this->getId()));
}
return $this->Tiles;
}
public function getMissions()
{
if ($this->Missions==null) {
$this->Missions = $this->MissionServices->getMissionsByExpansionId($this->getId());
}
return $this->Missions;
}
}
<file_sep>/core/bean/WpPageSkillsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageSkillsBean
* @author Hugues
* @since 1.00.00
* @version 1.08.01
*/
class WpPageSkillsBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-skills.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->SkillServices = new SkillServices();
}
/**
* On vérifie si on est ici pour traiter la page des compétences, ou une compétence en particulier.
* Pour le cas d'une compétence, on retourne une WpPostSkillBean.
* @return string
*/
public function getContentPage()
{
// On récupère l'éventuel paramètre FIELD_SKILLID
$skillId = $this->initVar(self::FIELD_SKILLID, -1);
if ($skillId==-1) {
// S'il n'est pas défini, on affiche la liste des compétences
$this->setFilters();
return $this->getListContentPage();
} else {
// S'il est défini, on affiche la compétence associée.
$Bean = new WpPostSkillBean($skillId);
return $Bean->getContentPage();
}
}
/**
* @return string
*/
public function getListContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste de compétences puis les éléments nécessaires à la pagination.
$Skills = $this->SkillServices->getSkillsWithFilters($this->arrFilters, $this->colSort, $this->colOrder);
$this->nbElements = count($Skills);
$this->nbPages = ceil($this->nbElements/$this->nbperpage);
// On slice la liste pour n'avoir que celles à afficher
$displayedSkills = array_slice($Skills, $this->nbperpage*($this->paged-1), $this->nbperpage);
// On construit le corps du tableau
$strBody = '';
if (!empty($displayedSkills)) {
foreach ($displayedSkills as $Skill) {
$strBody .= $Skill->getBean()->getRowForPublicPage();
}
}
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Affiche-t-on le filtre ?
$showFilters = (isset($this->arrFilters[self::FIELD_DESCRIPTION])&&$this->arrFilters[self::FIELD_DESCRIPTION]!='');
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Les lignes du tableau - 1
$strBody,
// On affiche le dropdown par pages - 2
$this->getDropdownNbPerPages(),
// On affiche la pagination - 3
$this->getNavPagination(),
// Affiche ou non le bloc filtre - 4
$showFilters ? 'block' : 'none',
// Filtre sur la Description - 5
$this->arrFilters[self::FIELD_DESCRIPTION],
);
return $this->getRender($this->urlTemplate, $args);
}
/**
* @param array $post
*/
public function setFilters($post=null)
{
$pattern = "/description=(.*)/";
if (preg_match($pattern, $post[self::CST_FILTERS], $matches)) {
$post[self::CST_FILTERS] .= '&name='.$matches[1];
}
parent::setBeanFilters($post, self::FIELD_NAME);
}
}
<file_sep>/web/rsc/zombicide.js
/*
* @version 1.04.26
*/
var $hj = jQuery;
$hj(document).ready(function(){
/***************
*** #01 - Home
*** Si on est sur la Homepage
***************/
if ($hj('#homeSectionArticles').length!=0 ) {
$hj('#more_news').click(function() {
var offset = $hj('#homeSectionArticles article').length;
addMoreNews(offset);
});
addPageSurvivantAjaxActions();
}
/***************
*** #02 - Compétences
*** Si on est sur la page du Listing des Compétences
***************/
if ($hj('#page-competences').length!=0 ) {
addPageCompetenceAjaxActions();
}
/***************
*** #03 - Missions
*** Si on est sur la page du Listing des Missions
***************/
if ($hj('#page-missions').length!=0 ) {
addPageMissionAjaxActions();
}
/***************
*** #04 - Survivants
*** Si on est sur la page du Listing des Survivants
***************/
if ($hj('#page-survivants').length!=0 ) {
addPageSurvivantAjaxActions();
}
/***************
*** #05 - Cartes Equipement
*** Si on est sur la page du Listing des Cartes Equipement
***************/
if ($hj('#page-equipements').length!=0 ) {
addPageCardEquipmentAjaxActions();
}
/***************
*** #06 - Page Tools
*** Si on est sur une des sous pages Tools
***************/
if ($hj('#page-tools').length!=0 ) {
addPageToolsAjaxActions();
}
/***************
*** #07 - Extensions
*** Si on est sur la page du Listing des Extensions
***************/
if ($hj('#page-extensions').length!=0 ) {
addPageExpansionAjaxActions();
}
/***************
*** #08 - Dalles
*** Si on est sur la page du Listing des Dalles
***************/
if ($hj('#page-dalles').length!=0 ) {
addPageTileAjaxActions();
}
});
/********
* Ajax Actions - HomePage
********/
function addMoreNews(offset) {
var obj;
var data = {'action': 'dealWithAjax', 'ajaxAction': 'addMoreNews', 'value': offset};
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
if (obj['homeSectionArticles'] != '' ) {
$hj('#homeSectionArticles').append(obj['homeSectionArticles']);
if ($hj('#homeSectionArticles>article').length%6 != 0 ) {
$hj('#more_news').remove();
}
} else {
$hj('#more_news').remove();
}
addPageSurvivantAjaxActions();
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
);
}
function addPageTileAjaxActions() {
$hj('button[data-ajaxaction="reset"]').unbind().click(function(){
$hj('#filter-expansionId').val('');
$hj('#tile-container').html("");
});
$hj('#filter-expansionId').unbind().change(function(){
var idSet = $hj(this).val().substr(4);
if (idSet=='') {
$hj('#tile-container').html("");
return false;
} else {
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getTiles', 'idSet': idSet};
resolveCallAjax(data, 'tile-container');
}
});
}
function addPageExpansionAjaxActions() {
// On ajoute une Action sur les actions Ajax
$hj('#page-extensions .ajaxAction').unbind().click(function(){
resolvePageExpansionAjaxActions($hj(this));
return false;
});
// On ajoute une Action sur le changement de nombre d'éléments à afficher par page
$hj('#displayedRows').change(function(){
resolvePageExpansionAjaxActions($hj(this));
return false;
});
// On ajoute une Action pour afficher/cacher le formulaire de filtre
$hj('i.fa-filter').unbind().click(function(){
$hj('#filters input').val('');
$hj('#filters').toggle();
});
}
function addPageCompetenceAjaxActions() {
// On ajoute une Action sur les actions Ajax
$hj('#page-competences .ajaxAction').unbind().click(function(){
resolvePageCompetenceAjaxActions($hj(this));
return false;
});
// On ajoute une Action sur le changement de nombre d'éléments à afficher par page
$hj('#displayedRows').change(function(){
resolvePageCompetenceAjaxActions($hj(this));
return false;
});
// On ajoute une Action pour afficher/cacher le formulaire de filtre
$hj('i.fa-filter').unbind().click(function(){
$hj('#filters input').val('');
$hj('#filters').toggle();
});
}
function addPageMissionAjaxActions() {
// On ajoute une Action sur les actions Ajax
$hj('#page-missions .ajaxAction').unbind().click(function(){
resolvePageMissionAjaxActions($hj(this));
return false;
});
// On ajoute une Action sur le changement de nombre d'éléments à afficher par page
$hj('#displayedRows').change(function(){
resolvePageMissionAjaxActions($hj(this));
return false;
});
// On ajoute une Action pour afficher/cacher le formulaire de filtre
$hj('i.fa-filter').unbind().click(function(){
$hj('#filters input').val('');
$hj('#filters').toggle();
});
}
function addPageSurvivantAjaxActions() {
// On ajoute une Action sur les actions Ajax
$hj('#page-survivants .ajaxAction').unbind().click(function(){
resolvePageSurvivantAjaxActions($hj(this));
return false;
});
// On ajoute une Action sur le changement de nombre d'éléments à afficher par page
$hj('#displayedRows').change(function(){
resolvePageSurvivantAjaxActions($hj(this));
return false;
});
// On ajoute un action sur les cases à cocher des Profils.
$hj('.publicSurvivorRow .changeProfile').unbind().click(function(){
addPageSurvivantLocalActions($hj(this));
return false;
});
// On ajoute une Action pour afficher/cacher le formulaire de filtre
$hj('i.fa-filter').unbind().click(function(){
$hj('#filters input').val('');
$hj('#filters').toggle();
});
}
function addPageSurvivantLocalActions(clicked) {
var type = clicked.data('type');
clicked.parent().parent().parent().parent().parent().removeClass('survivant zombivant ultimate ultimatez').addClass(type);
clicked.parent().siblings().removeClass('active');
clicked.parent().addClass('active');
}
function addPageCardEquipmentAjaxActions() {
$hj('#filters select').unbind().change(function(){
var set = $hj('#filter-expansionId').val();
var cType = $hj('#filter-typeId').val();
console.log(set);
console.log(cType);
$hj('#card-container .card').each(function(){
if ((set=='' || set!=''&&$hj(this).hasClass(set)) && (cType==undefined || cType=='' || cType!=''&&$hj(this).hasClass(cType))) {
$hj(this).css('display', 'inline-block');
} else {
$hj(this).css('display', 'none');
}
});
});
$hj('button[data-ajaxaction="reset"]').unbind().click(function(){
$hj('#filter-expansionId').val('');
$hj('#filter-typeId').val('');
$hj('#card-container .card').each(function(){
$hj(this).css('display', 'inline-block');
});
return false;
});
}
function addPageToolsAjaxActions() {
addSelectionSurvivantActions();
addThrowDiceActions();
addGenerationMapActions();
}
function addDownloadMap() {
$hj('.fa-camera').unbind().click(function(){
$hj('.overlay').addClass('loading');
$hj('.fa-unlock').removeClass('fa-unlock').addClass('fa-lock');
var params = lauchBuildingMapv2();
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getImageMap', 'params': params};
$hj.post(
ajaxurl,
data,
function(response) {
try {
var win = window.open('http://zombicide.jhugues.fr'+response, '_blank');
console.log('http://zombicide.jhugues.fr'+response);
if (win) {
win.focus();
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
).done(
function() {
$hj('.overlay').removeClass('loading');
}
);
});
}
function turnFaSquareToCheckBox() {
$hj('.displayMap i.fakeCb').unbind().click(function(){
$hj(this).toggleClass('fa-square fa-check-square');
});
}
function addLockUnlockEffect() {
$hj('.displayMap i.fakeLock').unbind().click(function(){
$hj(this).toggleClass('fa-unlock fa-lock');
});
}
function addCogAction() {
$hj('.displayMap i.fa-cog').unbind().click(function(){
var params = '';
// Liste des extensions sélectionnées
var expansionIds = $hj('#expansionIds').val();
if (expansionIds==undefined) {
params += '&version=2';
expansionIds = '';
$hj('.btn-expansion i.fa-check-square').each(function(){
if (expansionIds!='') {
expansionIds += ',';
}
expansionIds += $hj(this).parent().parent().data('expansion-id');
});
}
params += '&expansionIds='+expansionIds;
// Liste des Dalles déjà placées
var locks = '';
$hj('.displayMap i.fakeLock').each(function() {
if (locks!='') {
locks += ',';
}
locks += $hj(this).attr('data-lock');
});
params += '&locks='+locks;
// Et on ajoute la case courante
var current = $hj(this).siblings('.fakeLock').data('lock');
params += '¤t='+current;
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getNonUsedTiles', 'params': params};
$hj('.overlay').addClass('loading');
var idPage = 'page-generation-map';
$hj.post(
ajaxurl,
data,
function(response) {
try {
var obj = JSON.parse(response);
if (obj[idPage] != '' ) {
$hj('#'+idPage).prepend(obj[idPage]);
$hj('.proposals img').unbind().click(function(){
var row = $hj(this).data('row')*1;
var col = $hj(this).data('col')*1;
var orientation = $hj(this).data('orientation');
var src = $hj(this).data('src');
var code = $hj(this).data('code');
$hj('.displayMap .row:nth-child('+(row+1)+') .cell:nth-child('+(col+1)+') img').attr('class', orientation).attr('src', src);
$hj('.displayMap .row:nth-child('+(row+1)+') .cell:nth-child('+(col+1)+') .fakeLock').attr('data-lock', 'cell_'+row+'_'+col+'_'+code+'_'+orientation);
$hj('.proposals').remove();
})
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
).done(
function() {
$hj('.overlay').removeClass('loading');
}
);
});
}
function addRotateAction() {
$hj('.displayMap i.fa-undo').unbind().click(function(){
var orientation = $hj(this).parent().siblings('img').attr('class');
var newOrientation = '';
switch (orientation) {
case 'top' : newOrientation = 'left'; break;
case 'left' : newOrientation = 'bottom'; break;
case 'bottom' : newOrientation = 'right'; break;
case 'right' : newOrientation = 'top'; break;
default : newOrientation = orientation; break;
}
var strToReplace = $hj(this).siblings('.fakeLock').data('lock');
$hj(this).siblings('.fakeLock').attr('data-lock', strToReplace.replace(orientation, newOrientation));
$hj(this).parent().siblings('img').attr('class', newOrientation);
});
$hj('.displayMap i.fa-redo').unbind().click(function(){
var orientation = $hj(this).parent().siblings('img').attr('class');
var newOrientation = '';
switch (orientation) {
case 'top' : newOrientation = 'right'; break;
case 'left' : newOrientation = 'top'; break;
case 'bottom' : newOrientation = 'left'; break;
case 'right' : newOrientation = 'bottom'; break;
default : newOrientation = orientation; break;
}
var strToReplace = $hj(this).siblings('.fakeLock').data('lock');
$hj(this).siblings('.fakeLock').attr('data-lock', strToReplace.replace(orientation, newOrientation));
$hj(this).parent().siblings('img').attr('class', newOrientation);
});
}
function lauchBuildingMapv2() {
var params = '';
// Largeur et Hauteur
params = 'width='+$hj('#width').val();
params += '&height='+$hj('#height').val();
// Liste des extensions sélectionnées
var expansionIds = $hj('#expansionIds').val();
if (expansionIds==undefined) {
params += '&version=2';
expansionIds = '';
$hj('.btn-expansion i.fa-check-square').each(function(){
if (expansionIds!='') {
expansionIds += ',';
}
expansionIds += $hj(this).parent().parent().data('expansion-id');
});
}
params += '&expansionIds='+expansionIds;
// Liste des CheckBoxes non sélectionnées
var cells = '';
$hj('.displayMap i.fa-square').each(function() {
if (cells!='') {
cells += ',';
}
cells += $hj(this).data('cell');
});
params += '&cells='+cells;
// Liste des Dalles déjà placées
var locks = '';
$hj('.displayMap i.fa-lock').each(function() {
if (locks!='') {
locks += ',';
}
locks += $hj(this).attr('data-lock');
});
params += '&locks='+locks;
return params;
}
function addChangeMapSize() {
if ($hj('.displayMap').length==0) {
$hj('#proceedRandomMap').trigger('click');
} else {
var actualHeight = $hj('.displayMap .row').length;
var actualWidth = $hj('.displayMap .row:first-child .cell').length;
var height = $hj('#height').val();
var width = $hj('#width').val();
var cellModel = $hj('.cellModel').clone();
$hj('.displayMap').removeClass('map'+actualWidth+'x'+actualHeight+' mapWidth'+actualWidth);
$hj('.displayMap').addClass('map'+width+'x'+height+' mapWidth'+width);
if (height<actualHeight) {
$hj('.displayMap .row:last-child').remove();
} else if (width<actualWidth) {
$hj('.displayMap .row .cell:last-child').remove();
} else if (height>actualHeight) {
$hj('.displayMap').append('<div class="row"></div>');
for (var cpt=0; cpt<width; cpt++) {
cellModel = $hj('.cellModel').clone();
cellModel.removeClass('hidden cellModel');
cellModel.find('i.fakeCb').attr('data-cell', 'cell_'+(height-1)+'_'+cpt);
cellModel.find('i.fakeLock').attr('data-lock', 'cell_'+(height-1)+'_'+cpt);
$hj('.displayMap .row:last-child').append(cellModel);
}
} else if (width>actualWidth) {
for (cpt=0; cpt<height; cpt++) {
cellModel = $hj('.cellModel').clone();
cellModel.removeClass('hidden cellModel');
cellModel.find('i.fakeCb').attr('data-cell', 'cell_'+cpt+'_'+(width-1));
cellModel.find('i.fakeLock').attr('data-lock', 'cell_'+cpt+'_'+(width-1));
$hj('.displayMap .row:nth-child('+(cpt+1)+')').append(cellModel);
}
}
}
}
function addGenerationMapActions() {
addDownloadMap();
$hj('#proceedRandomMap').unbind().click(function() {
var params = lauchBuildingMapv2();
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getRandomMap', 'params': params};
resolveCallAjax(data, 'page-generation-map');
});
$hj('#width').unbind().change(function(){
addChangeMapSize();
});
$hj('#height').unbind().change(function(){
addChangeMapSize();
});
/*
$hj('#width').unbind().change(function(){
var actualWidth = $hj('.displayMap .row .cell').length;
var width = $hj(this).val();
var height = $hj('#height').val();
if ($hj('.displayMap').length==0) {
$hj('#proceedRandomMap').trigger('click');
} else if (width<actualWidth) {
$hj('.displayMap').removeClass('map'+actualWidth+'x'+height+' mapWidth'+actualWidth).addClass('map'+width+'x'+height+' mapWidth'+width);
for (var cpt=0; cpt<height; cpt++) {
$hj('.displayMap .row .cell:last-child').remove();
}
} else {
$hj('.overlay').addClass('loading');
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getEmptyCell'};
$hj.post(
ajaxurl,
data,
function(response) {
try {
$hj('.displayMap').removeClass('map'+actualWidth+'x'+height+' mapWidth'+actualWidth).addClass('map'+width+'x'+height+' mapWidth'+width);
var obj = JSON.parse(response);
var emptyCell = obj['empty-cell'];
for (var cpt=0; cpt<height; cpt++) {
var newCell = emptyCell.replace('cell_0_0', 'cell_'+cpt+'_'+(width-1));
newCell = newCell.replace('cell_0_0', 'cell_'+cpt+'_'+(width-1));
$hj('.displayMap .row:nth-child('+(cpt+1)+')').append(newCell);
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
).done(
function() {
$hj('.overlay').removeClass('loading');
turnFaSquareToCheckBox();
addLockUnlockEffect();
addCogAction();
}
);
}
});
$hj('#height').unbind().change(function(){
var actualHeight = $hj('.displayMap .row').length;
var width = $hj('#width').val();
var height = $hj(this).val();
if ($hj('.displayMap').length==0) {
$hj('#proceedRandomMap').trigger('click');
} else if (height<actualHeight) {
$hj('.displayMap').removeClass('map'+width+'x'+actualHeight).addClass('map'+width+'x'+height);
$hj('.displayMap .row:last-child').remove();
} else {
$hj('.overlay').addClass('loading');
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getEmptyCell'};
$hj.post(
ajaxurl,
data,
function(response) {
try {
$hj('.displayMap').removeClass('map'+width+'x'+actualHeight).addClass('map'+width+'x'+height);
$hj('.displayMap').append('<div class="row"></div>');
var obj = JSON.parse(response);
var emptyCell = obj['empty-cell'];
for (var cpt=0; cpt<width; cpt++) {
var newCell = emptyCell.replace('cell_0_0', 'cell_'+(height-1)+'_'+cpt);
newCell = newCell.replace('cell_0_0', 'cell_'+(height-1)+'_'+cpt);
$hj('.displayMap .row:last-child').append(newCell);
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
).done(
function() {
$hj('.overlay').removeClass('loading');
turnFaSquareToCheckBox();
addLockUnlockEffect();
addCogAction();
}
);
}
});
*/
}
function addThrowDiceActions() {
$hj('#proceedThrowDice').unbind().click(function() {
var params = '';
params = 'nbDice='+$hj('#diceNumber').val();
params += '&seuil='+$hj('#accuracy').val();
params += '&modif='+$hj('#plusAuDe').val();
params += '&surunsix='+$hj('#surUnSix').val();
params += '&dual=0';
params += '&barbauto=0';
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getThrowDice', 'params': params};
resolveCallAjax(data, 'page-piste-de-des');
});
}
function addSelectionSurvivantActions() {
// Actions sur les boutons de sélections du nombre de survivants.
$hj('#nbSurvSel button').unbind().click(function(){
$hj(this).siblings().removeClass('active');
$hj(this).addClass('active');
return false;
});
// Actions sur les regroupements d'extensions
$hj('#page-tools .btn-expansion-group').unbind().click(function(){
$hj(this).nextUntil('.btn-expansion-group').toggleClass('hidden');
$hj(this).find('i').toggleClass('fa-chevron-circle-down fa-chevron-circle-right');
return false;
});
// Actions sur les boutons d'Extensions pour afficher ou non les Survivants associés.
$hj('#page-tools .btn-expansion').unbind().click(function(){
// S'il a la classe fa-square, on doit tous les afficher et les sélectionner.
var expansionId= $hj(this).data('expansion-id');
if ($hj(this).find('i').hasClass('fa-check-square')) {
$hj(this).find('i').addClass('fa-square').removeClass('fa-check-square');
$hj('#listing button[data-expansion-id="'+expansionId+'"]').each(function(){
$hj(this).addClass('hidden');
$hj(this).find('i').addClass('fa-square').removeClass('fa-check-square');
});
} else {
$hj(this).find('i').removeClass('fa-square').addClass('fa-check-square');
$hj('#listing button[data-expansion-id="'+expansionId+'"]').each(function(){
$hj(this).removeClass('hidden');
$hj(this).find('i').removeClass('fa-square').addClass('fa-check-square');
});
}
$hj('#nbSelected').html($hj('#listing .fa-check-square').length);
var nb = 0;
$hj('#listing button.btn-survivor').each(function(){ if ($hj(this).is(':visible')) nb++;});
$hj('#nbDisplayed').html(nb);
return false;
});
// Actions sur la case à cocher du bouton d'un Survivant.
$hj('#page-tools .btn-survivor').unbind().click(function(){
// On inverse le statut de la case à cocher
$hj(this).find('i').toggleClass('fa-square fa-check-square');
// On récupère l'extension Parent
var expansionId = $hj(this).data('expansion-id');
var parentNode = $hj('#page-tools .btn-expansion[data-expansion-id="'+expansionId+'"]');
// On va parcourir les enfants du Parent pour dénombrer les statuts.
var children = $hj('#listing button[data-expansion-id="'+expansionId+'"]');
var childNb = children.length;
var checkedNb = 0;
children.each(function(){
if ($hj(this).find('i').hasClass('fa-check-square') ) { checkedNb++; }
});
if (checkedNb==0 ) {
// Si aucun n'est coché
parentNode.find('i').removeClass('fa-check-square fa-minus-square').addClass('fa-square');
} else if (checkedNb==childNb ) {
// S'ils sont tous cochés
parentNode.find('i').removeClass('fa-square fa-minus-square').addClass('fa-check-square');
} else {
// Si une partie est cochée.
parentNode.find('i').removeClass('fa-check-square fa-square').addClass('fa-minus-square');
}
$hj('#nbSelected').html($hj('#listing .fa-check-square').length);
});
// Si on clic sur le bouton Générer.
$hj('#proceedBuildTeam').unbind().click(function(){
// On récupère la liste des Survivants sélectionnés.
var selection = '';
var nbSurv = 0;
$hj('#page-tools .btn-survivor').each(function(){
if ($hj(this).find('i').hasClass('fa-check-square') ) {
selection += (selection==''?'':',')+$hj(this).data('survivor-id');
nbSurv++;
}
});
// Et le nombre de Survivants souhaités
var nbSurvSel = $hj('#nbSurvSel button.active').data('nb');
// Si le nombre sélectionné est inférieur à celui souhaité, même pas la peine de chercher...
if (nbSurv<nbSurvSel) {
console.log('Impossible');
} else {
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getRandomTeam', 'nbSurvSel': nbSurvSel, 'value': selection};
resolveCallAjax(data, 'page-selection-survivants');
}
});
$hj('#loadOwnTeam').unbind().click(function(){
// On supprime toute sélection précédente
$hj('#listing button.btn-survivor').addClass('hidden');
$hj('#listing button.btn-survivor i').removeClass('fa-check-square').addClass('fa-square');
// On récupère la sélection soumise
var teamLoader = $hj('#teamLoader').val();
var arr = teamLoader.split(',');
var nb = arr.length;
// On parcour la liste pour cocher les trucs qui vont bien
for (var i=0; i<nb; i++) {
var subArr = arr[i].split('-');
if (subArr.length==1) {
// un Survivant spécifique
$hj('button[data-survivor-id="'+subArr[0]+'"]').removeClass('hidden').click();
} else {
for (var j=subArr[0]; j<=subArr[1]; j++) {
// Un intervalle de Survivants
$hj('button[data-survivor-id="'+j+'"]').removeClass('hidden').click();
}
}
}
nb = 0;
$hj('#listing button.btn-survivor').each(function(){ if ($hj(this).is(':visible')) nb++;});
$hj('#nbDisplayed').html(nb);
});
$hj('#saveOwnTeam').unbind().click(function(e){
$hj('#saveOwnTeam .tooltip').addClass('visible');
var strSave = '';
$hj('#listing i.fa-check-square').each(function(){
strSave += $hj(this).parent().attr('data-survivor-id')+',';
});
var textArea = document.createElement("textarea");
textArea.value = strSave.substring(0,strSave.length-1);
// Avoid scrolling to bottom
textArea.style.top = "0";
textArea.style.left = "0";
textArea.style.position = "fixed";
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
try {
var successful = document.execCommand('copy');
var msg = successful ? 'successful' : 'unsuccessful';
console.log('Fallback: Copying text command was ' + msg);
} catch (err) {
console.error('Fallback: Oops, unable to copy', err);
}
document.body.removeChild(textArea);
});
$hj('#saveOwnTeam .tooltip').hover(function(e) {
}, function(e) {
$hj('#saveOwnTeam .tooltip').removeClass('visible');
});
}
function resolvePageExpansionAjaxActions(clicked) {
var ajaxaction = clicked.data('ajaxaction');
var callAjax = true;
// On initialise les données de tri et de filtres.
var colsort = 'name';
var colorder = 'asc';
var paged = 1;
var nbPerPages = $hj('#displayedRows').val();
var filters = 'name='+$hj('#filter-name').val();
switch (ajaxaction) {
// On change le nombre d'éléments affichés
case 'display' :
case 'filter' :
break;
// On change la page affichée
case 'paged' :
paged = clicked.data('paged');
break;
default :
callAjax = false;
break;
}
if (callAjax) {
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getExpansions', 'colsort': colsort, 'colorder': colorder, 'nbperpage': nbPerPages, 'paged': paged, 'filters': filters};
resolveCallAjax(data, 'page-extensions');
}
}
function resolvePageCompetenceAjaxActions(clicked) {
var ajaxaction = clicked.data('ajaxaction');
var callAjax = true;
// On initialise les données de tri et de filtres.
var colsort = 'name';
var colorder = 'asc';
var paged = 1;
var nbPerPages = $hj('#displayedRows').val();
var filters = '';
if ($hj('#filter-description').val()!=undefined) {
filters = 'description='+$hj('#filter-description').val();
}
switch (ajaxaction) {
// On change le nombre d'éléments affichés
case 'display' :
case 'filter' :
break;
// On change la page affichée
case 'paged' :
paged = clicked.data('paged');
break;
default :
callAjax = false;
break;
}
if (callAjax) {
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getSkills', 'colsort': colsort, 'colorder': colorder, 'nbperpage': nbPerPages, 'paged': paged, 'filters': filters};
resolveCallAjax(data, 'page-competences');
}
}
function resolvePageMissionAjaxActions(clicked) {
var ajaxaction = clicked.data('ajaxaction');
var callAjax = true;
// On initialise les données de tri et de filtres.
var colsort = 'title';
var colorder = 'asc';
var paged = 1;
var nbPerPages = $hj('#displayedRows').val();
var filters = 'title='+$hj('#filter-title').val();
filters += '&levelId='+$hj('#filter-levelId').val();
filters += '&playerId='+$hj('#filter-playerId').val();
filters += '&durationId='+$hj('#filter-durationId').val();
filters += '&origineId='+$hj('#filter-origineId').val();
if ($hj('#filter-expansionId').val()!=null) {
filters += '&expansionId='+$hj('#filter-expansionId').val();
}
switch (ajaxaction) {
// On change le nombre d'éléments affichés
case 'display' :
case 'filter' :
break;
// On change la page affichée
case 'paged' :
paged = clicked.data('paged');
break;
default :
callAjax = false;
break;
}
if (callAjax) {
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getMissions', 'colsort': colsort, 'colorder': colorder, 'nbperpage': nbPerPages, 'paged': paged, 'filters': filters};
resolveCallAjax(data, 'page-missions');
}
}
function resolvePageSurvivantAjaxActions(clicked) {
var ajaxaction = clicked.data('ajaxaction');
var callAjax = true;
// On initialise les données de tri et de filtres.
var colsort = 'name';
var colorder = 'asc';
var paged = 1;
var nbPerPages = $hj('#displayedRows').val();
var filters = 'name='+$hj('#filter-name').val();
if ($hj('#filter-expansionId').val()!=null) {
filters += '&expansionId='+$hj('#filter-expansionId').val();
}
if ($hj('#filter-blue-skill').val()!=null) {
filters += '&blue-skillId='+$hj('#filter-blue-skill').val();
}
if ($hj('#filter-yellow-skill').val()!=null) {
filters += '&yellow-skillId='+$hj('#filter-yellow-skill').val();
}
if ($hj('#filter-orange-skill').val()!=null) {
filters += '&orange-skillId='+$hj('#filter-orange-skill').val();
}
if ($hj('#filter-red-skill').val()!=null) {
filters += '&red-skillId='+$hj('#filter-red-skill').val();
}
switch (ajaxaction) {
// On change le nombre d'éléments affichés
case 'display' :
case 'filter' :
break;
// On change la page affichée
case 'paged' :
paged = clicked.data('paged');
break;
default :
callAjax = false;
break;
}
if (callAjax) {
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getSurvivants', 'colsort': colsort, 'colorder': colorder, 'nbperpage': nbPerPages, 'paged': paged, 'filters': filters};
console.log(data);
resolveCallAjax(data, 'page-survivants');
}
}
function resolveCallAjax(data, idPage) {
$hj('.overlay').addClass('loading');
$hj.post(
ajaxurl,
data,
function(response) {
try {
var obj = JSON.parse(response);
if (obj[idPage] != '' ) {
$hj('#'+idPage).replaceWith(obj[idPage]);
switch (idPage) {
case 'page-competences' : addPageCompetenceAjaxActions(); break;
case 'page-missions' : addPageMissionAjaxActions(); break;
case 'page-selection-survivants' :
case 'page-survivants' : addPageSurvivantAjaxActions(); break;
case 'page-extensions' : addPageExpansionAjaxActions(); break;
case 'page-generation-map' :
$hj('.displayMap .cell').each(function(){ $hj(this).height($hj(this).width()+'px'); });
turnFaSquareToCheckBox();
addLockUnlockEffect();
addRotateAction();
addCogAction();
break;
default: break;
}
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
).done(
function() {
$hj('.overlay').removeClass('loading');
}
);
}
<file_sep>/core/domain/Skill.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Skill
* @author Hugues.
* @since 1.00.00
* @version 1.05.12
*/
class Skill extends WpPostRelais
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Code de la donnée
* @var string $code
*/
protected $code;
/**
* Nom de la donnée
* @var string $name
*/
protected $name;
/**
* Description de la donnée
* @var string $description
*/
protected $description;
/**
* Extension de la compétence (première apparition ou derni_ère modification)
* @var int $expansionId
*/
protected $expansionId;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return string
*/
public function getCode()
{ return $this->code; }
/**
* @return string
*/
public function getName()
{ return $this->name; }
/**
* @return string
*/
public function getDescription()
{ return $this->description; }
/**
* @return int
*/
public function getExpansionId()
{ return $this->expansionId; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param string $code
*/
public function setCode($code)
{ $this->code=$code; }
/**
* @param string $name
*/
public function setName($name)
{ $this->name=$name; }
/**
* @param string $description
*/
public function setDescription($description)
{ $this->description=$description; }
/**
* @param int $official
*/
public function setExpansionId($expansionId)
{ $this->expansionId = $expansionId; }
///////////////////////////////////////////////////////////////
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Skill'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Skill
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Skill(), self::getClassVars(), $row); }
/**
* @return SkillBean
*/
public function getBean()
{ return new SkillBean($this); }
///////////////////////////////////////////////////////////////
/**
* @return string
*/
public function getWpPost()
{ return $this->getMainWpPost(self::FIELD_CODE, $this->code, self::WP_CAT_SKILL_ID); }
public function getExpansion()
{
if ($this->Expansion==null) {
$this->Expansion = $this->ExpansionServices->selectExpansion($this->getExpansionId());
}
return $this->Expansion;
}
}
<file_sep>/core/bean/EquipmentBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe EquipmentBean
* @author Hugues
* @since 1.04.26
* @version 1.04.26
*/
class EquipmentBean extends LocalBean
{
/**
* Class Constructor
* @param Equipment $EquipmentCard
*/
public function __construct($EquipmentCard='')
{
parent::__construct();
$this->EquipmentCard = ($EquipmentCard=='' ? new EquipmentCard() : $EquipmentCard);
}
/**
* @param int $expansionId
* @param int $equipmentLiveDeckId
* @return string
*/
public function displayCard($expansionId='', $equipmentLiveDeckId=-1)
{
$EquipmentCard = $this->EquipmentCard;
if ($expansionId=='') {
$expansionId = $EquipmentCard->getExpansionId();
}
$arrKeyWords = array();
$strClasse = '';
if ($EquipmentCard->isRanged()) {
$strClasse .= ' ranged weapon';
}
if ($EquipmentCard->isMelee()) {
$strClasse .= ' melee weapon';
}
if ($EquipmentCard->isPimp()) {
$strClasse .= ' pimp';
array_push($arrKeyWords, 'Pimp');
}
if ($EquipmentCard->isStarter()) {
$strClasse .= ' starter';
array_push($arrKeyWords, 'Starter');
}
if ($EquipmentCard->isDual()) {
$strClasse .= ' dual';
array_push($arrKeyWords, 'Dual');
}
$arrKeyWordsToCheck = array(
'Embuscade', '9mm', 'Pistolet', 'Précision', 'Vivres', '12mm', 'Recharge', 'Effraction', 'Katana',
'Munitions', 'Composite'
);
foreach ($arrKeyWordsToCheck as $keyWord) {
if ($EquipmentCard->hasKeyword($keyWord)) {
array_push($arrKeyWords, $keyWord);
}
}
$args = array(
// Identifiant de l'extension
$expansionId,
// Classe
$strClasse.(self::isAdmin() ? ' hasTooltip' : ''),
// Nom de l'équipement
$EquipmentCard->getName(),
// Liste des mots-clés pour Debug
implode(', ', $arrKeyWords),
// URL de l'image
$EquipmentCard->getImgUrl($expansionId),
// On montre le bouton Discard ou non - 6
($equipmentLiveDeckId!=-1?'':'hidden'),
// On a besoin du KeyAccess - 7
($equipmentLiveDeckId!=-1?$_SESSION[self::SESSION_DECKKEY]:''),
// On a besoin de l'id de l'EquipmentExpansion - 8
($equipmentLiveDeckId!=-1?$equipmentLiveDeckId:''),
);
$str = file_get_contents(PLUGIN_PATH.'web/pages/public/fragments/card-equipment-article.php');
return vsprintf($str, $args);
}
}
<file_sep>/core/services/WpPostServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPostServices
* @author Hugues
* @since 1.04.00
* @version 1.08.01
*/
class WpPostServices extends GlobalServices implements ConstantsInterface
{
/**
* @param array $params
* @param string $viaWpQuery
* @param string $wpPostType
* @return array
*/
public function getArticles($params=array(), $viaWpQuery=false, $wpPostType='WpPostMission')
{
$args = array(
self::WP_ORDERBY => self::FIELD_NAME,
self::WP_ORDER => self::ORDER_ASC,
self::WP_POSTSPERPAGE => -1,
self::WP_POSTTYPE => self::WP_POST,
self::WP_TAXQUERY => array(
array(
self::WP_TAXONOMY => self::WP_POSTTAG,
self::WP_FIELD => self::WP_SLUG,
self::WP_TERMS => array('mission', 'survivant'),
)
)
);
if (!empty($params)) {
foreach ($params as $key => $value) {
$args[$key] = $value;
}
}
if ($viaWpQuery) {
$wpQuery = new WP_Query($args);
$posts_array = $wpQuery->posts;
} else {
$posts_array = get_posts($args);
}
$WpPosts = array();
if (!empty($posts_array)) {
foreach ($posts_array as $post) {
$tags = wp_get_post_tags($post->ID);
$localWpPostType = $this->getPostTypeFromTags($tags);
$WpPosts[] = WpPost::convertElement($post, $localWpPostType);
}
}
return $WpPosts;
}
private function getPostTypeFromTags($tags)
{
$wpPostType = '';
if (!empty($tags)) {
foreach ($tags as $WpTerm) {
if ($WpTerm->slug == 'mission') {
$wpPostType = 'WpPostMission';
} elseif ($WpTerm->slug == 'news') {
$wpPostType = 'WpPostNews';
} elseif ($WpTerm->slug == 'survivant') {
$wpPostType = 'WpPostSurvivor';
}
}
}
return $wpPostType;
}
/**
* @param int $pageId
* @param int $limit
* @return array
*/
public function getChildPagesByParentId($pageId, $limit = -1, $params=array())
{
global $post;
$pages = array();
$args = array(
self::WP_ORDERBY => self::FIELD_NAME,
self::WP_ORDER => self::ORDER_ASC,
self::WP_POSTTYPE => 'page',
'post_parent' => $pageId,
self::WP_POSTSPERPAGE => $limit
);
if ( !empty($params) ) {
foreach ( $params as $key=>$value ) {
$args[$key] = $value;
}
}
$the_query = new WP_Query($args);
while ($the_query->have_posts()) {
$the_query->the_post();
$pages[] = WpPost::convertElement($post, 'WpPost');
}
wp_reset_postdata();
return $pages;
}
public function getWpPostsByCustomField($name, $value)
{
$args = array('numberposts'=>-1, 'post_type'=>'post', 'meta_key'=>$name, 'meta_value'=>$value);
$posts = get_posts($args);
$WpPosts = array();
while (!empty($posts)) {
array_push($WpPosts, WpPost::convertElement(array_shift($posts), 'WpPost'));
}
return $WpPosts;
}
public function getWpPostByCategoryId($categoryId=-1)
{
$args = array(
self::WP_TAXQUERY => array(),
self::WP_CAT => $categoryId,
);
return $this->getArticles($args);
}
}
<file_sep>/core/actions/SkillActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* SkillActions
* @author Hugues
* @since 1.04.00
* @version 1.08.01
*/
class SkillActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct($post=array())
{
parent::__construct();
$this->post = $post;
$this->ExpansionServices = new ExpansionServices();
$this->SkillServices = new SkillServices();
$this->WpPostServices = new WpPostServices();
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new SkillActions($post);
switch ($post[self::CST_AJAXACTION]) {
case self::AJAX_GETSKILLS :
$returned = $Act->dealWithGetSkills();
break;
case self::AJAX_SKILLVERIF :
$returned = $Act->dealWithSkillVerif(true);
break;
default :
$returned = 'Erreur dans SkillActions > dealWithStatic, '.$_POST[self::CST_AJAXACTION].' inconnu.';
break;
}
return $returned;
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Bloc de gestion de vérifications des Compétences en Home Admin
/**
* @param boolean $isVerif
* @return string
*/
public function dealWithSkillVerif($isVerif=false)
{
// On récupère les articles de compétences
$args = array(
self::WP_CAT => self::WP_CAT_SKILL_ID,
self::WP_TAXQUERY => array(),
self::WP_POSTSTATUS => self::WP_PUBLISH.', future',
);
$this->WpPostSkills = $this->WpPostServices->getArticles($args);
$nbWpPostSkills = count($this->WpPostSkills);
// Et les compétences en base
$this->Skills = $this->SkillServices->getSkillsWithFilters();
$nbSkills = count($this->Skills);
if ($isVerif) {
$this->checkSkills();
$strBilan = $this->jsonString($this->strBilan, self::AJAX_SKILLVERIF, true);
} elseif ($nbWpPostSkills!=$nbSkills) {
$strBilan = "Le nombre d'articles ($nbWpPostSkills) ne correspond pas au nombre de compétences en base ($nbSkills).<br>";
$strBilan .= "Une vérification est vivement conseillée.";
} else {
$strBilan = "Le nombre d'articles ($nbWpPostSkills) correspond au nombre de compétences en base.";
}
return $strBilan;
}
private function checkSkills()
{
$hasErrors = false;
$strErrors = '';
$this->strBilan = "Début de l'analyse des données relatives aux Compétences.<br>";
$this->strBilan .= "Il y a ".count($this->WpPostSkills)." articles de Compétences.<br>";
$this->strBilan .= "Il y a ".count($this->Skills)." entrées en base.<br>";
/////////////////////////////////////////////////////////////////////
// On va réorganiser les Skills pour les retrouver facilement
$arrSkills = array();
while (!empty($this->Skills)) {
$Skill = array_shift($this->Skills);
if (isset($arrSkills[$Skill->getCode()])) {
$strErrors .= "Le code <em>".$Skill->getCode()."</em> semble être utilisé deux fois dans la base de données.<br>";
$hasErrors = true;
}
$arrSkills[$Skill->getCode()] = $Skill;
}
/////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////
while (!empty($this->WpPostSkills)) {
// On regarde les articles créés et on vérifie les données en base, si elles existent et si elles sont cohérentes entre elles.
// On récupère le WpPost et ses données
$this->WpPost = array_shift($this->WpPostSkills);
$name = $this->WpPost->getPostTitle();
$code = $this->WpPost->getPostMeta(self::FIELD_CODE);
if (!isset($arrSkills[$code])) {
// A priori l'article n'a pas de code associé en base. Il faut donc en créé un qui corresponde
$Skill = new Skill();
$Skill->setCode($code);
$Skill->setName($name);
$description = $this->WpPost->getPostContent();
$Skill->setDescription($description);
$expansionId = $this->getExpansionId();
$Skill->setExpansionId($expansionId);
// On insère la donnée et on log dans le bilan
$this->SkillServices->insertSkill($Skill);
$this->strBilan .= "L'article <em>".$name."</em> a été créé en base.<br>";
continue;
}
$Skill = $arrSkills[$code];
unset($arrSkills[$code]);
$this->checkSkill($Skill);
}
/////////////////////////////////////////////////////////////////////
// On vérifie que la totalité des Compétences en base ont été utilisées. Si ce n'est pas le cas, il faut créer des articles correspondants.
if (!empty($arrSkills)) {
$this->strBilan .= "On a des données en base qui n'ont pas d'article correspondant.<br>";
while (!empty($arrSkills)) {
$Skill = array_shift($arrSkills);
$this->strBilan .= '<br>Article à créer pour une Compétence : '.$Skill->getName().' ['.$Skill->toJson().'].';
}
}
/////////////////////////////////////////////////////////////////////
$this->strBilan .= "Fin de l'analyse des données relatives aux Compétences.<br>";
if ($hasErrors) {
$this->strBilan .= "Anomalies constatées :<br>".$strErrors;
} else {
$this->strBilan .= "Aucune anomalie constatée.";
}
}
private function getExpansionId()
{
$postId = $this->WpPost->getPostMeta(self::FIELD_EXPANSIONID);
$Wp_post = get_post($postId);
$WpPost = WpPost::convertElement($Wp_post);
$codeExpansion = $WpPost->getPostMeta(self::FIELD_CODE);
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(self::FIELD_CODE=>$codeExpansion));
$Expansion = array_shift($Expansions);
return $Expansion->getId();
}
private function checkSkill($Skill)
{
$doUpdate = false;
// On initialise les données de l'article
$name = $this->WpPost->getPostTitle();
$description = $this->WpPost->getPostContent();
$expansionId = $this->getExpansionId();
// On vérifie si la donnée en base correspond à l'article.
$strError = '';
if ($Skill->getName()!=$name) {
$Skill->setName($name);
$doUpdate = true;
$strError .= "Le Nom a été mis à jour.<br>";
}
if ($Skill->getDescription()!=$description) {
$Skill->setDescription($description);
$doUpdate = true;
$strError .= "La description a été mise à jour.<br>";
}
if ($Skill->getExpansionId()!=$expansionId) {
$Skill->setExpansionId($expansionId);
$doUpdate = true;
$strError .= "L'extension a été mise à jour.<br>";
}
if ($doUpdate) {
// Si nécessaire, on update en base.
$this->SkillServices->updateSkill($Skill);
$this->strBilan .= "Les données de la Compétence <em>".$name."</em> ont été mises à jour.<br>".$strError;
}
}
// Fin du bloc relatif à la vérification des compétences sur la Home Admin.
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Récupération du contenu de la page via une requête Ajax.
* @param array $post
* @return string
*/
public function dealWithGetSkills()
{
$Bean = new WpPageSkillsBean();
$Bean->setFilters($this->post);
return $this->jsonString($Bean->getListContentPage(), self::PAGE_SKILL, true);
}
}
<file_sep>/core/domain/Spawn.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Spawn
* @author Hugues.
* @version 1.0.00
* @since 1.0.00
*/
class Spawn extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Id de l'extension
* @var int $expansionId
*/
protected $expansionId;
/**
* Numéro de la carte
* @var int $spawnNumber
*/
protected $spawnNumber;
/**
* Id technique du type de Spawn
* @var int $spawnTypeId
*/
protected $spawnTypeId;
/**
* Id technique de catégorie du Zombie
* @var int $zombieCategoryId
*/
protected $zombieCategoryId;
/**
* Type de Zombie Bleu
* @var int $blueZombieTypeId
*/
protected $blueZombieTypeId;
/**
* Nombre de Zombies apparaissant en Blue
* @var int $blueQuantity
*/
protected $blueQuantity;
/**
* Type de Zombie Yellow
* @var int $yellowZombieTypeId
*/
protected $yellowZombieTypeId;
/**
* Nombre de Zombies apparaissant en Blue
* @var int $yellowQuantity
*/
protected $yellowQuantity;
/**
* Type de Zombie Yellow
* @var int $orangeZombieTypeId
*/
protected $orangeZombieTypeId;
/**
* Nombre de Zombies apparaissant en Orange
* @var int $orangeQuantity
*/
protected $orangeQuantity;
/**
* Type de Zombie Red
* @var int $redZombieTypeId
*/
protected $redZombieTypeId;
/**
* Nombre de Zombies apparaissant en Red
* @var int $redQuantity
*/
protected $redQuantity;
/**
* @return $id
*/
public function getId()
{return $this->id; }
/**
* @return $expansionId
*/
public function getExpansionId()
{ return $this->expansionId; }
/**
* @return $spawnNumber
*/
public function getSpawnNumber()
{ return $this->spawnNumber; }
/**
* @return $spawnTypeId
*/
public function getSpawnTypeId()
{ return $this->spawnTypeId; }
/**
* @return $zombieCategoryId
*/
public function getZombieCategoryId()
{ return $this->zombieCategoryId; }
/**
* @return $blueZombieTypeId
*/
public function getBlueZombieTypeId()
{ return $this->blueZombieTypeId; }
/**
* @return $blueQuantity
*/
public function getBlueQuantity()
{ return $this->blueQuantity; }
/**
* @return $yellowZombieTypeId
*/
public function getYellowZombieTypeId()
{ return $this->yellowZombieTypeId; }
/**
* @return $yellowQuantity
*/
public function getYellowQuantity()
{ return $this->yellowQuantity; }
/**
* @return $orangeZombieTypeId
*/
public function getOrangeZombieTypeId()
{ return $this->orangeZombieTypeId; }
/**
* @return $orangeQuantity
*/
public function getOrangeQuantity()
{ return $this->orangeQuantity; }
/**
* @return $redZombieTypeId
*/
public function getRedZombieTypeId()
{ return $this->redZombieTypeId; }
/**
* @return $redQuantity
*/
public function getRedQuantity()
{ return $this->redQuantity; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $expansionId
*/
public function setExpansionId($expansionId)
{ $this->expansionId=$expansionId; }
/**
* @param int $spawnNumber
*/
public function setSpawnNumber($spawnNumber)
{ $this->spawnNumber=$spawnNumber; }
/**
* @param int $spawnTypeId
*/
public function setSpawnTypeId($spawnTypeId)
{ $this->spawnTypeId=$spawnTypeId; }
/**
* @param int $zombieCategoryId
*/
public function setZombieCategoryId($zombieCategoryId)
{ $this->zombieCategoryId=$zombieCategoryId; }
/**
* @param int $blueZombieTypeId
*/
public function setBlueZombieTypeId($blueZombieTypeId)
{ $this->blueZombieTypeId=$blueZombieTypeId; }
/**
* @param int $blueQuantity
*/
public function setBlueQuantity($blueQuantity)
{ $this->blueQuantity=$blueQuantity; }
/**
* @param int $yellowZombieTypeId
*/
public function setYellowZombieTypeId($yellowZombieTypeId)
{ $this->yellowZombieTypeId=$yellowZombieTypeId; }
/**
* @param int $yellowQuantity
*/
public function setYellowQuantity($yellowQuantity)
{ $this->yellowQuantity=$yellowQuantity; }
/**
* @param int $orangeZombieTypeId
*/
public function setOrangeZombieTypeId($orangeZombieTypeId)
{ $this->orangeZombieTypeId=$orangeZombieTypeId; }
/**
* @param int $orangeQuantity
*/
public function setOrangeQuantity($orangeQuantity)
{ $this->orangeQuantity=$orangeQuantity; }
/**
* @param int $redZombieTypeId
*/
public function setRedZombieTypeId($redZombieTypeId)
{ $this->redZombieTypeId=$redZombieTypeId; }
/**
* @param int $redQuantity
*/
public function setRedQuantity($redQuantity)
{ $this->redQuantity=$redQuantity; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Spawn'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Spawn
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Spawn(), self::getClassVars(), $row); }
/**
* @return string
*/
public function getImgUrl()
{ return '/wp-content/plugins/zombicide/web/rsc/images/spawns/'.(str_pad($this->spawnNumber, 4, '0', STR_PAD_LEFT)).'-thumb.jpg'; }
public function getBean()
{ return new SpawnBean($this); }
}
<file_sep>/core/domain/MapTile.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MapTile
* @author Hugues.
* @since 1.08.30
*/
class MapTile extends LocalDomain
{
/**
* Tile
* @var Tile $Tile
*/
protected $Tile;
/**
* Orientation de la Tuile
* @var string $orientation
*/
protected $orientation;
/**
* Tuile bloquée ou non.
* @var boolean $locked
*/
protected $locked;
public function __construct($Tile, $orientation, $locked=false)
{
$this->Tile = $Tile;
$this->orientation = $orientation;
$this->locked = $locked;
}
public function getTile()
{ return $this->Tile; }
public function getOrientation()
{ return $this->orientation; }
public function isLocked()
{ return ($this->locked==1); }
public function setTile($Tile)
{ $this->Tile = $Tile; }
public function setOrientaiton($orientation)
{ $this->orientation = $orientation; }
public function setLocked($locked)
{ $this->locked = $locked; }
public function isCompatible($OtherMapTile, $side)
{
$isCompatible = true;
if ($side=='left') {
$sideLeft = $this->getLeft();
$sideRight = $OtherMapTile->getRight();
//echo "[left : ".$this->Tile->getCode()."-$sideLeft;".$OtherMapTile->getTile()->getCode()."-$sideRight]\r\n";
$isCompatible = $this->compSides($sideLeft, $sideRight);
} elseif ($side=='right') {
$thisSide = $this->getRight();
$otherSide = $OtherMapTile->getLeft();
//echo "[top : ".$this->Tile->getCode()."-$sideTop;".$OtherMapTile->getTile()->getCode()."-$sideBottom]\r\n";
$isCompatible = $this->compSides($thisSide, $otherSide);
} elseif ($side=='bottom') {
$sideBottom = $this->getBottom();
$sideTop = $OtherMapTile->getTop();
//echo "[top : ".$this->Tile->getCode()."-$sideTop;".$OtherMapTile->getTile()->getCode()."-$sideBottom]\r\n";
$isCompatible = $this->compSides($sideBottom, $sideTop);
} elseif ($side=='top') {
$sideTop = $this->getTop();
$sideBottom = $OtherMapTile->getBottom();
//echo "[top : ".$this->Tile->getCode()."-$sideTop;".$OtherMapTile->getTile()->getCode()."-$sideBottom]\r\n";
$isCompatible = $this->compSides($sideTop, $sideBottom);
}
return $isCompatible;
}
public function isCompatibleV2($MapTiles, $row, $col)
{
$isCompatible = true;
if (isset($MapTiles[$row][$col-1])) {
$isCompatible = $this->isCompatible($MapTiles[$row][$col-1], 'left');
}
if ($isCompatible && isset($MapTiles[$row-1][$col])) {
$isCompatible = $this->isCompatible($MapTiles[$row-1][$col], 'top');
}
if ($isCompatible && isset($MapTiles[$row][$col+1])) {
$isCompatible = $this->isCompatible($MapTiles[$row][$col+1], 'right');
}
if ($isCompatible && isset($MapTiles[$row+1][$col])) {
$isCompatible = $this->isCompatible($MapTiles[$row+1][$col], 'bottom');
}
return $isCompatible;
}
private function compSides($sideLeft, $sideRight)
{
// B: Building
// S : Street
// V : terrain Vague
// C : Couloir
// --> P : Prison Outbreak
// --> M : Toxic City Mall
// --> H : Rue Morgue
// --> W : Museum Worricow
// --> F : Highschool Funeral
$arrCorridors = array('P', 'M', 'H', 'W', 'F');
if ($sideLeft[0]==$sideRight[2] && $sideLeft[1]==$sideRight[1] && $sideLeft[2]==$sideRight[0]) {
return true;
}
if (in_array($sideLeft[0], $arrCorridors) && in_array($sideRight[2], $arrCorridors) &&
in_array($sideLeft[1], $arrCorridors) && in_array($sideRight[1], $arrCorridors) &&
in_array($sideLeft[2], $arrCorridors) && in_array($sideRight[0], $arrCorridors)) {
return true;
}
return false;
}
public function getTop()
{
switch ($this->orientation) {
case 'top' :
$top = $this->Tile->getSideTop();
break;
case 'right' :
$top = $this->Tile->getSideLeft();
break;
case 'bottom' :
$top = $this->Tile->getSideBottom();
break;
default :
$top = $this->Tile->getSideRight();
break;
}
return $top;
}
public function getBottom()
{
switch ($this->orientation) {
case 'top' :
$bottom = $this->Tile->getSideBottom();
break;
case 'right' :
$bottom = $this->Tile->getSideRight();
break;
case 'bottom' :
$bottom = $this->Tile->getSideTop();
break;
default :
$bottom = $this->Tile->getSideLeft();
break;
}
return $bottom;
}
public function getRight()
{
switch ($this->orientation) {
case 'top' :
$right = $this->Tile->getSideRight();
break;
case 'right' :
$right = $this->Tile->getSideTop();
break;
case 'bottom' :
$right = $this->Tile->getSideLeft();
break;
default :
$right = $this->Tile->getSideBottom();
break;
}
return $right;
}
public function getLeft()
{
switch ($this->orientation) {
case 'top' :
$left = $this->Tile->getSideLeft();
break;
case 'right' :
$left = $this->Tile->getSideBottom();
break;
case 'bottom' :
$left = $this->Tile->getSideRight();
break;
default :
$left = $this->Tile->getSideTop();
break;
}
return $left;
}
}
<file_sep>/core/daoimpl/SurvivorDaoImpl.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SurvivorDaoImpl
* @author Hugues.
* @since 1.0.00
* @version 1.05.06
*/
class SurvivorDaoImpl extends LocalDaoImpl
{
protected $whereFiltersExpansionIn;
/**
* Class constructor
*/
public function __construct()
{ parent::__construct('Survivor'); }
/**
* @param array $rows
* @return array
*/
protected function convertToArray($rows)
{
$Items = array();
if (!empty($rows)) {
foreach ($rows as $row) {
$Items[] = Survivor::convertElement($row);
}
}
return $Items;
}
/**
* @param array $arrParams
* @param array $filters
* @return array
*/
public function selectEntriesWithFiltersIn($arrParams, $filters)
{
// On s'appuie sur la requête de base.
$requete = $this->selectRequest.$this->fromRequest;
// Contrainte sur la Compétence Bleue
if (isset($filters[self::COLOR_BLUE.'-'.self::FIELD_SKILLID])) {
$requete .= 'INNER JOIN wp_11_zombicide_survivor_skill ss1 ON s.id=ss1.survivorId ';
$requete .= 'AND ss1.skillId='.$filters[self::COLOR_BLUE.'-'.self::FIELD_SKILLID].' AND ss1.tagLevelId IN (10,11) ';
}
// Contrainte sur la Compétence Jaune
if (isset($filters[self::COLOR_YELLOW.'-'.self::FIELD_SKILLID])) {
$requete .= 'INNER JOIN wp_11_zombicide_survivor_skill ss2 ON s.id=ss2.survivorId ';
$requete .= 'AND ss2.skillId='.$filters[self::COLOR_YELLOW.'-'.self::FIELD_SKILLID].' AND ss2.tagLevelId IN (20) ';
}
// Contrainte sur la Compétence Orange
if (isset($filters[self::COLOR_ORANGE.'-'.self::FIELD_SKILLID])) {
$requete .= 'INNER JOIN wp_11_zombicide_survivor_skill ss3 ON s.id=ss3.survivorId ';
$requete .= 'AND ss3.skillId='.$filters[self::COLOR_ORANGE.'-'.self::FIELD_SKILLID].' AND ss3.tagLevelId IN (30,31) ';
}
// Contrainte sur la Compétence Rouge
if (isset($filters[self::COLOR_RED.'-'.self::FIELD_SKILLID])) {
$requete .= 'INNER JOIN wp_11_zombicide_survivor_skill ss4 ON s.id=ss4.survivorId ';
$requete .= 'AND ss4.skillId='.$filters[self::COLOR_RED.'-'.self::FIELD_SKILLID].' AND ss4.tagLevelId IN (40,41,42) ';
}
// On peut aussi trier
$requete .= $this->orderBy;
// Et retourner le tableau de résultats.
return $this->convertToArray($this->selectEntriesAndLogQuery(__FILE__, __LINE__, $requete, $arrParams));
}
/**
* @param string $file
* @param int $line
* @param array $arrParams
* @return array|Survivor
*/
public function select($file, $line, $arrParams)
{ return parent::localSelect($file, $line, $arrParams, new Survivor()); }
/**
* @param array $filters
* @return array
*/
public function selectEntriesInExpansions($filters) {
$requete = $this->selectRequest.$this->fromRequest;
$requete .= "WHERE name LIKE '%s' AND zombivor LIKE '%s' AND ultimate LIKE '%s' AND (expansionId LIKE '%s' ";
$requete .= "OR expansionId IN (".$filters[SQL_PARAMS_WHERE][3].")) ";
$requete .= "AND background LIKE '%s' AND liveAble LIKE '%s' ";
$requete .= $this->orderBy.$this->limit;
return $this->convertToArray($this->selectEntriesAndLogQuery(__FILE__, __LINE__, $requete, $filters));
}
}
<file_sep>/core/domain/LocalDomain.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe LocalDomain
* @author Hugues.
* @since 1.00.00
* @version 1.07.20
*/
class LocalDomain extends GlobalDomain implements ConstantsInterface
{
public function __construct($attributes=array())
{
global $globalExpansions;
$globalExpansions = array();
parent::__construct($attributes);
$this->DurationServices = new DurationServices();
$this->EquipmentKeywordServices = new EquipmentKeywordServices();
$this->EWProfileServices = new EquipmentWeaponProfileServices();
$this->ExpansionServices = new ExpansionServices();
$this->KeywordServices = new KeywordServices();
$this->LevelServices = new LevelServices();
$this->MissionServices = new MissionServices();
$this->MissionExpansionServices = new MissionExpansionServices();
$this->MissionObjectiveServices = new MissionObjectiveServices();
$this->MissionRuleServices = new MissionRuleServices();
$this->MissionTileServices = new MissionTileServices();
$this->ObjectiveServices = new ObjectiveServices();
$this->OrigineServices = new OrigineServices();
$this->PlayerServices = new PlayerServices();
$this->RuleServices = new RuleServices();
$this->SkillServices = new SkillServices();
$this->SurvivorSkillServices = new SurvivorSkillServices();
$this->TileServices = new TileServices();
$this->WeaponProfileServices = new WeaponProfileServices();
$this->WpPostServices = new WpPostServices();
}
/**
* @return Expansion
*/
public function getExpansion()
{
global $globalExpansions;
if ($this->Expansion==null) {
if (isset($globalExpansions[$this->expansionId])) {
$this->Expansion = $globalExpansions[$this->expansionId];
} else {
$this->Expansion = $this->ExpansionServices->selectExpansion($this->expansionId);
$globalExpansions[$this->expansionId] = $this->Expansion;
}
}
return $this->Expansion;
}
/**
* @return string
*/
public function toJson()
{
$classVars = $this->getClassVars();
$str = '';
foreach ($classVars as $key => $value) {
if ($str!='') {
$str .= ', ';
}
$str .= '"'.$key.'":'.json_encode($this->getField($key));
}
return '{'.$str.'}';
}
/**
* @param array $post
* @return bool
*/
public function updateWithPost($post)
{
$classVars = $this->getClassVars();
unset($classVars['id']);
$doUpdate = false;
foreach ($classVars as $key => $value) {
if (is_array($post[$key])) {
$value = stripslashes(implode(';', $post[$key]));
} else {
$value = stripslashes($post[$key]);
}
if ($this->{$key} != $value) {
$doUpdate = true;
$this->{$key} = $value;
}
}
return $doUpdate;
}
/**
* @return int
*/
public static function getWpUserId()
{ return get_current_user_id(); }
/**
* @version 1.04.27
* @param array $addArg
* @param array $remArg
* @return string
*/
public function getQueryArg($addArg, $remArg=array())
{
$addArg['page'] = 'hj-zombicide/admin_manage.php';
$remArg[] = 'form';
$remArg[] = 'id';
return add_query_arg($addArg, remove_query_arg($remArg, 'http://zombicide.jhugues.fr/wp-admin/admin.php'));
}
}
<file_sep>/core/services/WpTagServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpTagServices
* @author Hugues.
* @since 1.04.16
* @version 1.07.20
*/
class WpTagServices extends GlobalServices {
public function getTagByName($name, $dbTag='wp_5')
{
$requete = "SELECT t.term_id AS termId FROM ".$dbTag."_terms t INNER JOIN ".$dbTag."_term_taxonomy tt ON t.term_id = tt.term_id WHERE name = '$name' AND taxonomy='post_tag';";
$rows = MySQL::wpdbSelect($requete);
if (empty($rows)) {
return new WpTag();
} else {
$row = array_shift($rows);
$tagId = $row->termId;
return WpTag::convertElement(get_tag($tagId));
}
}
public function getTagBySlug($slug='')
{
$requete = "SELECT term_id FROM wp_11_terms WHERE slug = '$slug';";
$rows = MySQL::wpdbSelect($requete);
if (empty($rows)) {
return new WpTag();
} else {
$row = array_shift($rows);
$tagId = $row->term_id;
return WpTag::convertElement(get_tag($tagId));
}
}
public function getTags()
{
$WpTags = array();
$tags = get_tags();
while (!empty($tags)) {
// Qu'on convertit en WpTag
array_push($WpTags, WpTag::convertElement(array_shift($tags)));
}
return $WpTags;
}
}
?>
<file_sep>/core/bean/WpPostNewsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPostNewsBean
* @author Hugues
* @since 1.05.09
* @version 1.05.09
*/
class WpPostNewsBean extends WpPostBean
{
protected $urlTemplate = 'web/pages/public/fragments/article-news-extract.php';
/**
* Constructeur
*/
public function __construct($WpPost='')
{
parent::__construct();
$this->WpPost = $WpPost;
}
/**
* @param string $isHome
* @return string
*/
public function displayWpPost()
{
$args = array(
$this->WpPost->getPostContent(),
$this->WpPost->getPostTitle(),
'','','','','','','',
);
return $this->getRender($this->urlTemplate, $args);
}
}
<file_sep>/core/bean/WpPageTagBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageTagBean
* @author Hugues
* @since 1.04.16
* @version 1.08.02
*/
class WpPageTagBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-spawncards.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($scriptUrl='')
{
$arr = explode('/', $scriptUrl);
$tag = $arr[2];
// On défini les Services
$this->DurationServices = new DurationServices();
$this->LevelServices = new LevelServices();
$this->WpPostServices = new WpPostServices();
$this->WpTagServices = new WpTagServices();
// On initialise le Tag
$this->WpTag = $this->WpTagServices->getTagBySlug($tag);
}
/**
* @return string
*/
public function getContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère le slug
$slug = $this->WpTag->getSlug();
/////////////////////////////////////////////////////////////////////////////
// Slugs relatifs à la Durée des Missions
if (strpos($slug, '-minutes')!==false) {
$arr = explode('-', $slug);
$minDuration = $arr[0];
$Durations = $this->DurationServices->getDurationsWithFilters(array(self::FIELD_MINDURATION=>$minDuration));
$Duration = array_shift($Durations);
$WpPage = new WpPageMissionsBean();
$WpPage->setFilters(array(self::CST_FILTERS=>self::FIELD_DURATIONID.'='.$Duration->getId()));
$returned = $WpPage->getListContentPage();
} else {
switch ($slug) {
/////////////////////////////////////////////////////////////////////////////
// Slugs relatifs à la Difficulté des Missions
case 'tutoriel' :
case 'facile' :
case 'moyen' :
case 'difficile' :
case 'hardcore' :
case 'competitif' :
$Levels = $this->LevelServices->getLevelsWithFilters(array(self::FIELD_NAME=>$slug));
$Level = array_shift($Levels);
$WpPage = new WpPageMissionsBean();
$WpPage->setFilters(array(self::CST_FILTERS=>self::FIELD_LEVELID.'='.$Level->getId()));
$returned = $WpPage->getListContentPage();
break;
default :
$returned = "WIP : Gestion des Tags [[$slug]].";
break;
}
}
return $returned;
}
}
<file_sep>/core/services/MissionExpansionServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionExpansionServices
* @author Hugues.
* @since 1.0.00
* @version 1.04.27
*/
class MissionExpansionServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var MissionExpansionDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new MissionExpansionDaoImpl();
$this->ExpansionServices = new ExpansionServices();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_MISSIONID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_EXPANSIONID));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getMissionExpansionsWithFilters($arrFilters=array(), $orderby=self::FIELD_ID, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
public function deleteMissionExpansion($MissionExpansion)
{ $this->delete(__FILE__, __LINE__, $MissionExpansion); }
public function insertMissionExpansion($MissionExpansion)
{ $this->insert(__FILE__, __LINE__, $MissionExpansion); }
}
<file_sep>/core/services/LocalServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe LocalServices
* @author Hugues.
* @since 1.00.00
* @version 1.05.20
*/
class LocalServices extends GlobalServices implements ConstantsInterface
{
protected $arrParams = array();
/**
* Texte par défaut du Select
* @var string $labelDefault
*/
protected $labelDefault = '';
/**
* Valeur par défaut de la classe du Select
* @var string $classe
*/
protected $classe = 'form-control';
/**
* Le Select est-il multiple ?
* @var boolean $multiple
*/
protected $multiple = false;
/**
* Class Constructor
*/
public function __construct()
{
}
/**
* @param array $arrFilters
* @param string $field
* @return string
*/
protected function addFilter($arrFilters, $field)
{
return (isset($arrFilters[$field]) && !empty($arrFilters[$field]) ? $arrFilters[$field] : '%');
}
/**
* @param array $arrFilters
* @param string $field
* @return string
*/
protected function addNonArrayFilter($arrFilters, $field, $defaultSearch='%')
{
return (isset($arrFilters[$field]) && !empty($arrFilters[$field]) && !is_array($arrFilters[$field]) ? $arrFilters[$field] : $defaultSearch);
}
/**
* @param array $arrFilters
* @param string $field
* @return string
*/
protected function addNonArrayWideFilter($arrFilters, $field, $defaultSearch='%')
{
return (isset($arrFilters[$field]) && !empty($arrFilters[$field]) && !is_array($arrFilters[$field]) ? '%'.$arrFilters[$field].'%' : $defaultSearch);
}
public function prepObject($Obj, $isUpdate=false) {
$arr = array();
$vars = $Obj->getClassVars();
if ( !empty($vars) ) {
foreach ( $vars as $key=>$value ) {
if ( $key=='id' ) { continue; }
$arr[] = $Obj->getField($key);
}
if ( $isUpdate ) { $arr[] = $Obj->getField('id'); }
}
return $arr;
}
/**
* @param array $arrSetLabels
* @param string $name
* @param string $value
* @return string
*/
protected function getSetSelect($arrSetLabels, $name, $value)
{
$strSelect = '';
$selName = $name;
if ($this->labelDefault!='') {
$strSelect .= '<label class="screen-reader-text" for="'.$name.'">'.$this->labelDefault.'</label>';
}
// On créé la base du select
$strSelect .= '<select id="'.$name.'" name="'.$selName.'" class="'.$this->classe.'"'.($this->multiple?' multiple':'').'>';
// S'il n'est pas multiple et qu'il a une valeur par défaut, on la met.
if (!$this->multiple && $this->labelDefault!='') {
$strSelect .= '<option value="">'.$this->labelDefault.'</option>';
}
// On parcourt l'ensemble des couples $key/$value de la liste
if (!empty($arrSetLabels)) {
foreach ($arrSetLabels as $key => $labelValue) {
// Visiblement, la $key peut parfois être nulle et c'est mal.
if ($key=='') {
continue;
}
// On construit l'option.
$strSelect .= '<option value="'.$key.'"';
$strSelect .= ($this->isKeySelected($key, $value) ? ' selected="selected"' : '');
$strSelect .= '>'.$labelValue.'</option>';
}
}
return $strSelect.'</select>';
}
/**
* @param string $key
* @param mixed $values
* @return boolean
*/
protected function isKeySelected($key, $values)
{
// Si on ne cherche pas dans un tableau, on teste juste l'égalité.
if (!is_array($values)) {
return trim($key)==trim($values);
}
$isSelected = false;
// Sinon, on parcourt la liste pour essayer de trouver la valeur cherchée.
while (!empty($values)) {
$value = array_shift($values);
if ($key==$value) {
$isSelected = true;
}
}
return $isSelected;
}
/**
* Vérifie qu'un élément du tableau n'est ni vide ni un tableau.
* @param array $arrFilters
* @param string $tag
* @return boolean
*/
protected function isNonEmptyAndNoArray($arrFilters, $tag)
{ return !empty($arrFilters[$tag]) && !is_array($arrFilters[$tag]); }
/**
* @return int
*/
public static function getWpUserId()
{ return get_current_user_id(); }
}
<file_sep>/core/domain/SurvivorSkill.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SurvivorSkill
* @author Hugues.
* @since 1.0.00
* @version 1.05.02
*/
class SurvivorSkill extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Id technique du Survivant
* @var int $survivorId
*/
protected $survivorId;
/**
* Id technique du Skill
* @var int $skillId
*/
protected $skillId;
/**
* Id technique du type
* @var int $survivorTypeId
*/
protected $survivorTypeId;
/**
* Rang de la compétence sur le profil
* @var int $tagLevelId
*/
protected $tagLevelId;
/**
* @param array $attributes
*/
public function __construct($attributes=array())
{
parent::__construct($attributes);
$this->SkillServices = new SkillServices();
$this->SurvivorServices = new SurvivorServices();
}
public function getBean()
{ return new SurvivorSkillBean($this); }
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return int
*/
public function getSurvivorId()
{ return $this->survivorId; }
/**
* @return int
*/
public function getSkillId()
{ return $this->skillId; }
/**
* @return int
*/
public function getSurvivorTypeId()
{ return $this->survivorTypeId; }
/**
* @return int
*/
public function getTagLevelId()
{ return $this->tagLevelId; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $survivorId
*/
public function setSurvivorId($survivorId)
{ $this->survivorId=$survivorId; }
/**
* @param int $skillId
*/
public function setSkillId($skillId)
{ $this->skillId=$skillId; }
/**
* @param int $survivorTypeId
*/
public function setSurvivorTypeId($survivorTypeId)
{ $this->survivorTypeId=$survivorTypeId; }
/**
* @param int $tagLevelId
*/
public function setTagLevelId($tagLevelId)
{ $this->tagLevelId=$tagLevelId; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('SurvivorSkill'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return SurvivorSkill
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new SurvivorSkill(), self::getClassVars(), $row); }
/**
* @return Skill
*/
public function getSkill()
{
if ($this->Skill == null) {
$this->Skill = $this->SkillServices->selectSkill($this->skillId);
}
return $this->Skill;
}
/**
* @return string
*/
public function getSkillName()
{ return $this->getSkill()->getName(); }
public function getSurvivor()
{
if ($this->Survivor == null) {
$this->Survivor = $this->SurvivorServices->selectSurvivor($this->survivorId);
}
return $this->Survivor;
}
}
<file_sep>/core/services/LevelServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe LevelServices
* @author Hugues.
* @since 1.04.16
* @version 1.04.27
*/
class LevelServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var LevelDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new LevelDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_NAME));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getLevelsWithFilters($arrFilters=array(), $orderby=self::FIELD_ID, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
/**
* @param int $id
* @return Level
*/
public function selectLevel($id)
{ return $this->select(__FILE__, __LINE__, $id); }
}
<file_sep>/core/domain/Survivor.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Survivor
* @author Hugues.
* @since 1.0.00
* @version 1.07.21
*/
class Survivor extends WpPostRelais
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Nom de la donnée
* @var string $name
*/
protected $name;
/**
* A un profil Standard
* @var int $standard
*/
protected $standard;
/**
* A un profil Zombivor
* @var int $zombivor
*/
protected $zombivor;
/**
* A un profil Ultimate
* @var int $ultimate
*/
protected $ultimate;
/**
* A un profil Ultimate Zombivor
* @var int $ultimatez
*/
protected $ultimatez;
/**
* Id de l'extension
* @var int $expansionId
*/
protected $expansionId;
/**
* Background du Survivant
* @var string $background
*/
protected $background;
/**
* Eventuelle image alternative
* @var string $altImgName
*/
protected $altImgName;
/**
* Le Survivant peut-il être joué en ligne ?
* @var int $liveAble
*/
protected $liveAble;
public function __construct($attributes=array())
{
parent::__construct($attributes);
$this->imgBaseUrl = 'http://www.jhugues.fr/wp-content/plugins/hj-zombicide/web/rsc/img/portraits/p';
$this->SurvivorSkills = array();
}
/**
* @return int
*/
public function getId()
{return $this->id; }
/**
* @return string
*/
public function getName()
{ return $this->name; }
/**
* @return boolean
*/
public function isStandard()
{ return ($this->standard==1); }
/**
* @return boolean
*/
public function isZombivor()
{ return ($this->zombivor==1); }
/**
* @return boolean
*/
public function isUltimate()
{ return ($this->ultimate==1); }
/**
* @return boolean
*/
public function isUltimatez()
{ return ($this->ultimatez==1); }
/**
* @return int
*/
public function getExpansionId()
{ return $this->expansionId; }
/**
* @return string
*/
public function getBackground()
{ return $this->background; }
/**
* @return string
*/
public function getAltImgName()
{ return $this->altImgName; }
/**
* @return boolean
*/
public function isLiveAble()
{ return ($this->liveAble==1); }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param string $name
*/
public function setName($name)
{ $this->name=$name; }
/**
* @param int $standard
*/
public function setStandard($standard)
{ $this->standard=$standard; }
/**
* @param int $zombivor
*/
public function setZombivor($zombivor)
{ $this->zombivor=$zombivor; }
/**
* @param int $ultimate
*/
public function setUltimate($ultimate)
{ $this->ultimate=$ultimate; }
/**
* @param int $ultimatez
*/
public function setUltimatez($ultimatez)
{ $this->ultimatez=$ultimatez; }
/**
* @param int $expansionId
*/
public function setExpansionId($expansionId)
{ $this->expansionId=$expansionId; }
/**
* @param string $background
*/
public function setBackground($background)
{ $this->background=$background; }
/**
* @param string $altImgName
*/
public function setAltImgName($altImgName)
{ $this->altImgName=$altImgName; }
/**
* @param int $liveAble
*/
public function setLiveAble($liveAble)
{ $this->liveAble=$liveAble; }
///////////////////////////////////////////////////////////////
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Survivor'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Survivor
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Survivor(), self::getClassVars(), $row); }
/**
* @return Bean
*/
public function getBean()
{ return new SurvivorBean($this); }
///////////////////////////////////////////////////////////////
/**
* @return string
*/
public function getWpPost()
{ return $this->getMainWpPost(self::FIELD_SURVIVORID, $this->id, self::WP_CAT_SURVIVOR_ID); }
////////////////////////////////////////////////////////////////////////////
// Méthodes relatives aux Portraits
/**
* @param string $str
* @return string
*/
public function getNiceName($str='')
{
if ($str=='') {
$str = $this->name;
}
return str_replace(array(' ', '#'), '', strtolower($str));
}
/**
* @param string $type
* @return string
*/
public function getPortraitUrl($type='')
{
if (!$this->isStandard() && $type=='') {
$type = 'u';
}
$usedName = ($this->getAltImgName()!='' ? $this->getAltImgName() : $this->name);
$wholeUrl = $this->imgBaseUrl.$this->getNiceName($usedName).($type!='' ? '-'.$type : '').'.jpg';
if (self::isAdmin() && @getimagesize($wholeUrl)===false) {
$wholeUrl = $this->imgBaseUrl.($type!='' ? '-'.$type : '').'.jpg';
}
return $wholeUrl;
}
////////////////////////////////////////////////////////////////////////////
protected function initSurvivorSkills()
{
$SurvivorSkills = $this->SurvivorSkillServices->getSurvivorSkillsWithFilters(array(self::FIELD_SURVIVORID=>$this->getId()));
while (!empty($SurvivorSkills)) {
$SurvivorSkill = array_shift($SurvivorSkills);
$survivorTypeId = $SurvivorSkill->getSurvivorTypeId();
$tagLevelId = $SurvivorSkill->getTagLevelId();
if (!isset($this->SurvivorSkills[$survivorTypeId])) {
$this->SurvivorSkills[$survivorTypeId] = array();
}
$this->SurvivorSkills[$survivorTypeId][$tagLevelId] = $SurvivorSkill->getSkill();
}
}
public function getSkill($type, $rank)
{
if ($this->SurvivorSkills == null) {
$this->SurvivorSkills = array();
$this->initSurvivorSkills();
}
return ($this->SurvivorSkills[$type][$rank]==null ? new Skill() : $this->SurvivorSkills[$type][$rank]);
}
/**
* @param int $survivorTypeId
* @return boolean
*/
public function areDataSkillsOkay($survivorTypeId=1)
{
// On récupère les Compétences associées au Survivant pour le profil passé en paramètre.
$SurvivorSkills = $this->getSurvivorSkills($survivorTypeId);
$nbSkills = count($SurvivorSkills);
// On doit avoir 7 (profils standards et zombivants) ou
// 8 (ultimate survivant et zombivant, ou standard avec Descente en Rappel ou Pilote d'Hélicoptère) compétences de retournées.
// Si on a ce nombre de compétences, on retourne true. Sinon false.
return ($nbSkills==7 || $nbSkills==8);
}
/**
* @return array SurvivorSkill
*/
public function getSurvivorSkills($survivorTypeId='')
{
if ($this->SurvivorSkills == null) {
$arrFilters = array(self::FIELD_SURVIVORID=>$this->id);
if ($survivorTypeId!='') {
$arrFilters[self::FIELD_SURVIVORTYPEID] = $survivorTypeId;
}
$this->SurvivorSkills = $this->SurvivorSkillServices->getSurvivorSkillsWithFilters($arrFilters);
}
return $this->SurvivorSkills;
}
public function getAdminUlSkills($survivorTypeId=1)
{
$args = array(
self::FIELD_SURVIVORID => $this->getId(),
self::FIELD_SURVIVORTYPEID => $survivorTypeId,
);
$SurvivorSkills = $this->SurvivorSkillServices->getSurvivorSkillsWithFilters($args);
$strReturned = '';
while (!empty($SurvivorSkills)) {
$SurvivorSkill = array_shift($SurvivorSkills);
if ($SurvivorSkill->getSurvivorTypeId()!=$survivorTypeId) {
continue;
}
$strReturned .= '<li><span>'.$SurvivorSkill->getBean()->getBadge().'</span></li>';
}
return $this->getBean()->getBalise(self::TAG_UL, $strReturned, array(self::ATTR_CLASS=>'col-12'));
}
/**
* @param bool $isHome
* @return string
*/
public function getStrClassFilters()
{ return 'col-12 col-md-6 col-xl-4'; }
/**
* Retourne si le type de Survivant associé au SurvivorSkill est bien celui attendu.
* @param string $type Le type recherché
* @param SurvivorSkill $SurvivorSkill
* @return boolean
*/
public function controlTypeAndSkill($type, $SurvivorSkill)
{
return ($type=='' && $SurvivorSkill->getSurvivorTypeId()!=1 ||
$type=='z' && $SurvivorSkill->getSurvivorTypeId()!=2 ||
$type=='u' && $SurvivorSkill->getSurvivorTypeId()!=3 ||
$type=='uz' && $SurvivorSkill->getSurvivorTypeId()!=4);
}
/**
* @param string $type
* @param boolean $withLink
* @return string
*/
public function getUlSkills($type='', $withLink=false, $isHome=false)
{
if ($type=='' && !$this->isStandard() && $this->isUltimate()) {
$type='u';
}
$classExtra = 'col-12'.($isHome ? '' : ' col-sm-6 col-lg-3');
$SurvivorSkills = $this->getSurvivorSkills();
$str = '';
$strTmp = '';
if (!empty($SurvivorSkills)) {
foreach ($SurvivorSkills as $SurvivorSkill) {
if ($this->controlTypeAndSkill($type, $SurvivorSkill)) {
continue;
}
switch ($SurvivorSkill->getTagLevelId()) {
case 20 :
case 30 :
case 40 :
$str .= $this->getBean()->getBalise(self::TAG_UL, $strTmp, array(self::ATTR_CLASS=>$classExtra));
$strTmp = '';
break;
default :
break;
}
$strTmp .= $this->getSkillLi($SurvivorSkill, $withLink);
}
$str .= $this->getBean()->getBalise(self::TAG_UL, $strTmp, array(self::ATTR_CLASS=>$classExtra));
}
return $str;
}
private function getSkillLi($SurvivorSkill, $withLink)
{ return $this->getBean()->getBalise(self::TAG_LI, $SurvivorSkill->getBean()->getBadge($withLink)); }
}
<file_sep>/core/bean/TokenBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe TokenBean
* @author Hugues
* @since 1.11.01
* @version 1.11.01
*/
class TokenBean extends LocalBean
{
protected $urlDirLiveMissions = '/web/rsc/missions/live/';
private $addClass = ' token';
private $color;
private $coordX;
private $coordY;
private $height;
private $id;
private $level;
private $name;
private $orientation;
private $quantite;
private $src;
private $status;
private $type;
private $width;
private $urlMenuZombiesTemplate = 'web/pages/public/fragments/menu-zombies-creation.php';
private $urlOnlineDetailSurvivor = 'web/pages/public/fragments/online-detail-survivor.php';
private $arrTagColors = array(1=>'blue', 2=>'yellow', 3=>'orange', 4=>'red');
/**
* @param Expansion $Expansion
*/
public function __construct($chip=null)
{
parent::__construct();
$this->SurvivorServices = new SurvivorServices();
$this->SkillServices = new SkillServices();
if (!is_array($chip)) {
$this->color = $chip->attributes()['color'];
$this->coordX = $chip->attributes()['coordX'];
$this->coordY = $chip->attributes()['coordY'];
$this->id = $chip->attributes()['id'];
$this->level = $chip->attributes()['level'];
$this->orientation = $chip->attributes()['orientation'];
$this->quantite = $chip->attributes()['quantite'];
$this->src = $chip->attributes()['src'];
$this->status = $chip->attributes()['status'];
$this->type = $chip->attributes()['type'];
$this->experiencePoints = $chip->attributes()['experiencePoints'];
$this->actionPoints = $chip->attributes()['actionPoints'];
$this->hitPoints = $chip->attributes()['hitPoints'];
} else {
$this->color = $chip[self::XML_ATTRIBUTES]['color'];
$this->coordX = $chip[self::XML_ATTRIBUTES]['coordX'];
$this->coordY = $chip[self::XML_ATTRIBUTES]['coordY'];
$this->id = $chip[self::XML_ATTRIBUTES]['id'];
$this->level = $chip[self::XML_ATTRIBUTES]['level'];
$this->orientation = $chip[self::XML_ATTRIBUTES]['orientation'];
$this->quantite = $chip[self::XML_ATTRIBUTES]['quantite'];
$this->src = $chip[self::XML_ATTRIBUTES]['src'];
$this->status = $chip[self::XML_ATTRIBUTES]['status'];
$this->type = $chip[self::XML_ATTRIBUTES]['type'];
$this->experiencePoints = $chip[self::XML_ATTRIBUTES]['experiencePoints'];
$this->actionPoints = $chip[self::XML_ATTRIBUTES]['actionPoints'];
$this->hitPoints = $chip[self::XML_ATTRIBUTES]['hitPoints'];
}
$this->chip = $chip;
$this->patternZombie = '/z(Walker|Runner|Fatty|Abomination)(Standard)/';
$this->init();
}
public function getJsonModifications($id, $bln_create)
{
// On retourne Le Token et son menu
$args = array(
// On veut retourner le Tag mis à jour.
array($id, $this->getTokenBalise()),
// On veut retourner le Menu associé au Tag mis à jour.
array('m'.$id, $this->getTokenMenu()),
);
// Eventuellement, on ajoute pour certains trucs spéciaux.
if ($this->type=='Survivor') {
$args[] = array('portrait-'.($bln_create ? 'new' : $id), $this->getTokenPortrait());
$args[] = array('detail-survivor-'.($bln_create ? 'new' : $id), $this->getTokenDetail());
}
// Puis on retourne le tout
return $args;
}
private function init()
{
$this->baliseContent = '';
switch ($this->type) {
case 'Noise' :
$this->width = 55;
$this->height = 50;
$this->addClass .= ' noise';
$this->name = 'noise';
$this->baliseContent = $this->getBalise(self::TAG_DIV, $this->quantite, array(self::ATTR_CLASS=>'badge'));
break;
case 'Door' :
$this->width = 56;
$this->height = 56;
$this->name = 'door_'.strtolower($this->color).'_'.strtolower($this->status);
$this->addClass .= ' '.$this->orientation;
break;
case 'Objective' :
$this->width = 50;
$this->height = 50;
$this->name = 'objective_'.($this->status=='Unveiled' ? 'red' : strtolower($this->color));
break;
case 'Spawn' :
$this->width = 100;
$this->height = 50;
$this->name = 'spawn_'.strtolower($this->color);
$this->addClass .= ' '.$this->orientation.' '.strtolower($this->status);
break;
case 'Exit' :
$this->width = 100;
$this->height = 50;
$this->name = 'exit';
$this->addClass .= ' '.$this->orientation.($this->status=='Unactive' ? ' unactive' : '');
break;
case 'Survivor' :
$this->width = 50;
$this->height = 50;
$this->addClass .= ' survivor '.$this->level;
$args = array(
self::ATTR_SRC=>'/wp-content/plugins/hj-zombicide/web/rsc/img/portraits/'.$this->src.'.jpg'
);
$this->baliseContent = $this->getBalise(self::TAG_IMG, '', $args);
break;
case 'Zombie' :
if (preg_match($this->patternZombie, $this->src, $matches)) {
$strName = $matches[1].' '.$matches[2];
} else {
$strName = 'Pattern foireux ('.$this->src.')';
}
$this->width = 50;
$this->height = 50;
$this->type = 'Zombie';
$this->addClass .= ' zombie '.$matches[2];
$args = array(
self::ATTR_SRC => '/wp-content/plugins/hj-zombicide/web/rsc/img/zombies/'.$this->src.'.png',
self::ATTR_TITLE => $strName.' x'.$this->quantite,
);
$this->baliseContent = $this->getBalise(self::TAG_IMG, '', $args);
$this->baliseContent .= $this->getBalise(self::TAG_DIV, $this->quantite, array(self::ATTR_CLASS=>'badge'));
break;
default :
$this->name = '';
break;
}
}
public function getTokenBalise()
{
if ($this->status=='Picked') {
return '';
}
$args = array(
self::ATTR_CLASS => 'chip'.$this->addClass,
'data-color' => $this->color,
'data-coordX' => $this->coordX,
'data-coordY' => $this->coordY,
'data-height' => $this->height,
self::ATTR_ID => $this->id,
'data-orientation' => $this->orientation,
'data-status' => $this->status,
'data-type' => $this->type,
'data-width' => $this->width,
);
if (!empty($this->name)) {
$args['style'] = "background:url('/wp-content/plugins/hj-zombicide/web/rsc/img/tokens/".$this->name.".png');";
}
return $this->getBalise(self::TAG_DIV, $this->baliseContent, $args);
}
private function getLiMenuSeparator()
{ return $this->getBalise(self::TAG_LI, '', array(self::ATTR_CLASS=>'menu-separator')); }
private function getLiMenuItem($label, $act, $iCode, $disabled='', $type='')
{
$span = $this->getBalise(self::TAG_SPAN, $label, array(self::ATTR_CLASS=>'menu-text'));
$i = $this->getBalise(self::TAG_I, '', array(self::ATTR_CLASS=>'fa fa-'.$iCode));
$button = $this->getBalise(self::TAG_BUTTON, $i.$span, array(self::ATTR_TYPE=>'button', self::ATTR_CLASS=>'menu-btn'));
$argsLi = array(
self::ATTR_CLASS => 'menu-item'.$disabled,
self::ATTR_ID => $this->id,
'data-menu-action' => $act
);
if ($type!='') {
$argsLi['data-quantite'] = 1;
$argsLi['data-type'] = $type;
}
return $this->getBalise(self::TAG_LI, $button, $argsLi);
}
private function getDoorMenu()
{
// On peut vouloir ouvrir ou fermer une porte.
$strMenu = $this->getLiMenuItem('Ouvrir', 'open', 'folder-open-o', ($this->status=='Closed' ? '' : ' '.self::CST_DISABLED));
return $strMenu . $this->getLiMenuItem('Fermer', 'close', 'folder-o', ($this->status=='Opened' ? '' : ' '.self::CST_DISABLED));
}
private function getObjectiveMenu()
{
// On peut vouloir révéler ou prendre un Objectif
$strMenu = $this->getLiMenuItem('Révéler', 'reveal', 'share-square-o', ($this->status=='Unveiled' ? '' : ' '.self::CST_DISABLED));
return $strMenu . $this->getLiMenuItem('Prendre', 'pick', 'check-square-o', ($this->status=='Unactive' ? '' : ' '.self::CST_DISABLED));
}
private function getExitMenu()
{
$strMenu = $this->getLiMenuItem('Activer', 'activate', 'thumbs-o-up', ($this->status=='Unactive' ? '' : ' '.self::CST_DISABLED));
return $strMenu . $this->getLiMenuItem('Désactiver', 'unactivate', 'thumbs-o-down', ($this->status=='Active' ? '' : ' '.self::CST_DISABLED));
}
private function getSpawnMenu()
{
// On peut vouloir activer ou désactiver un Spawn.
$strMenu = $this->getLiMenuItem('Activer', 'activate', 'thumbs-o-up', ($this->status=='Unactive' ? '' : ' '.self::CST_DISABLED));
$strMenu .= $this->getLiMenuItem('Désactiver', 'unactivate', 'thumbs-o-down', ($this->status=='Active' ? '' : ' '.self::CST_DISABLED));
$strMenu .= $this->getLiMenuSeparator();
// On peut vouloir le retirer du plateau. On peut vouloir le déplacer.
$strMenu .= $this->getLiMenuItem('Déplacer', 'move', 'arrows-alt', ' '.self::CST_DISABLED);
$strMenu .= $this->getLiMenuItem('Supprimer', 'pick', 'trash');
$strMenu .= $this->getLiMenuSeparator();
// On peut vouloir ajouter des Zombies.
$strMenu .= $this->getLiMenuItem('Piocher', 'draw', 'stack-overflow', ($this->status=='Active' ? '' : ' '.self::CST_DISABLED), 'Spawn');
$strMenu .= $this->getLiMenuItem('Mélanger', 'shuffle', 'recycle', ($this->status=='Active' ? '' : ' '.self::CST_DISABLED), 'Spawn');
$args = array(($this->status=='Active' ? '' : ' '.self::CST_DISABLED));
return $strMenu . $this->getRender($this->urlMenuZombiesTemplate, $args);
}
private function getLiSubMenu($faClass, $label, $content)
{
return '<li class="menu-item submenu"><button type="button" class="menu-btn"> <i class="fa fa-'.$faClass.'"></i> <span class="menu-text">'.$label.'</span> </button><menu class="menu">'.$content.'</menu></li>';
}
private function getSurvivorMenu()
{
$strButton = '<button type="button" class="menu-btn"> <span class="menu-text">%1$s</span> </button>';
$argsLi = array(
self::ATTR_CLASS => 'menu-item',
self::ATTR_ID => $this->id,
'data-menu-action' => 'add',
);
// On peut ajouter des Zombies
$subMenu = '';
for ($i=1; $i<=5; $i++) {
$argsLi['data-quantite'] = $i;
$argsLi['data-type'] = 'xp';
$subMenu .= $this->getBalise(self::TAG_LI, sprintf($strButton, $i), $argsLi);
}
$strMenu = $this->getLiSubMenu('plus-circle', 'Ajouter XP', $subMenu);
$strMenu .= $this->getLiMenuItem('Retirer 1 XP', 'del', 'minus-circle', ($this->experiencePoints!=0 ? '' : ' '.self::CST_DISABLED), 'xp');
$strMenu .= $this->getLiMenuSeparator();
$strMenu .= $this->getLiMenuItem('Ajouter 1 PA', 'add', 'plus-circle', '', 'pa');
$strMenu .= $this->getLiMenuItem('Retirer 1 PA', 'del', 'minus-circle', ($this->actionPoints!=0 ? '' : ' '.self::CST_DISABLED), 'pa');
$strMenu .= $this->getLiMenuSeparator();
$strMenu .= $this->getLiMenuItem('Ajouter 1 PV', 'add', 'plus-circle', '', 'pv');
$strMenu .= $this->getLiMenuItem('Retirer 1 PV', 'del', 'minus-circle', ($this->hitPoints!=0 ? '' : ' '.self::CST_DISABLED), 'pv');
$strMenu .= $this->getLiMenuSeparator();
return $strMenu . $this->getLiMenuItem('Supprimer', 'pick', 'trash');
}
private function getBruitMenu()
{ return $this->getZombieMenu(); }
private function getZombieMenu()
{
$strButton = '<button type="button" class="menu-btn"> <span class="menu-text">%1$s</span> </button>';
$argsLi = array(
self::ATTR_CLASS => 'menu-item',
self::ATTR_ID => $this->id,
'data-menu-action' => 'add',
);
// On peut ajouter des Zombies
$subMenu = '';
// De 1 à 5
for ($i=1; $i<=5; $i++) {
$argsLi['data-quantite'] = $i;
$subMenu .= $this->getBalise(self::TAG_LI, sprintf($strButton, $i), $argsLi);
}
$strMenu = $this->getLiSubMenu('plus-circle', 'Ajouter', $subMenu);
// On peut enlever des Zombies
// Tous d'un coup
$argsLi['data-menu-action'] = 'pick';
unset($argsLi['quantite']);
$subMenu = $this->getBalise(self::TAG_LI, sprintf($strButton, 'Tous'), $argsLi);
// Ou de 1 à 5 ou 1 de moins que le nombre disponible.
$argsLi['data-menu-action'] = 'del';
if ($this->quantite>1) {
$subMenu .= $this->getLiMenuSeparator();
for ($i=1; $i<min(6, $this->quantite); $i++) {
$argsLi['data-quantite'] = $i;
$subMenu .= $this->getBalise(self::TAG_LI, sprintf($strButton, $i), $argsLi);
}
}
$strMenu .= $this->getLiSubMenu('minus-circle', 'Retirer', $subMenu);
// On peut déplacer des Zombies
$strMenu .= $this->getLiMenuSeparator();
return $strMenu . $this->getLiMenuItem('Déplacer', 'move', 'arrows-alt', ' '.self::CST_DISABLED);
}
public function getTokenMenu()
{
switch ($this->type) {
case 'Noise' :
$returned = $this->getBruitMenu();
break;
case 'Door' :
$returned = $this->getDoorMenu();
break;
case 'Objective' :
$returned = $this->getObjectiveMenu();
break;
case 'Spawn' :
$returned = $this->getSpawnMenu();
break;
case 'Exit' :
$returned = $this->getExitMenu();
break;
case 'Zombie' :
$returned = $this->getZombieMenu();
break;
case 'Survivor' :
$returned = $this->getSurvivorMenu();
break;
default :
$returned = 'Bad Type in getTokenMenu : ['.$this->type.'].';
break;
}
return $this->getBalise('menu', $returned, array(self::ATTR_CLASS=>'menu', self::ATTR_ID=>'m'.$this->id));
}
public function getTokenPortrait()
{
// Retourne le portrait que l'on veut afficher en haut à droite de la sidebar.
$args = array(
self::ATTR_ID => 'portrait-'.$this->id,
self::ATTR_CLASS => 'known',
self::ATTR_SRC => '/wp-content/plugins/hj-zombicide/web/rsc/img/portraits/p'.$this->src.'.jpg',
self::ATTR_TITLE => '',
);
return $this->getBalise(self::TAG_IMG, '', $args);
}
public function getTokenDetail()
{
$fileName = PLUGIN_PATH.$this->urlDirLiveMissions.$_SESSION['zombieKey'].".mission.xml";
$this->objXmlDocument = simplexml_load_file($fileName);
// On récupère l'id et le Survivor associé
$survivorId = substr($this->id, 1);
$Survivor = $this->SurvivorServices->selectSurvivor($survivorId);
// On récupère les Skills stockés dans le fichier
$skills = $this->objXmlDocument->xPath('//survivor[@id="'.$this->id.'"]/skills/skill');
$strSkills = '';
while (!empty($skills)) {
$skill = array_shift($skills);
// On récupère l'id du Skill pour aller chercher le nom en base.
$skillId = $skill->attributes()[self::FIELD_SKILLID];
$Skill = $this->SkillServices->selectSkill($skillId);
// On récupère son Id
$nodeId = $skill->attributes()[self::FIELD_ID];
list(, $tagLevel) = explode('-', $nodeId);
$skillColor = $this->arrTagColors[$tagLevel/10];
// On récupère le Status
$skillStatus = $skill->attributes()['status'];
// On construit les tags HTML
$spanBadge = $this->getBalise(self::TAG_SPAN, $Skill->getName(), array(self::ATTR_CLASS=>'badge badge-'.$skillColor.'-skill'));
// Et on stack la liste de Skills
$argsLi = array(
self::ATTR_ID=>$nodeId,
self::ATTR_CLASS=>($skillStatus=='Unactive' ? 'disabled' : '')
);
$strSkills .= $this->getBalise(self::TAG_LI, $spanBadge, $argsLi);
}
// On enrichit le Template et on retourne l'ensemble.
$args = array(
// Le rang du Survivant dans la partie
$this->id,
// L'url du portrait
$Survivor->getPortraitUrl(),
// Le nom du Survivant
$Survivor->getName(),
// Niveau du Survivant
strtolower($this->level),
// Nombre d'XP - 5
strtolower($this->experiencePoints),
// Nombre de PA - 6
strtolower($this->actionPoints),
// Nombre de PV - 7
strtolower($this->hitPoints),
// Les Compétences du Survivant - 8
$strSkills
);
return $this->getRender($this->urlOnlineDetailSurvivor, $args);
}
}
<file_sep>/core/actions/LiveMissionActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* LiveMissionActions
* @author Hugues
* @since 1.10.06
* @version 1.10.06
*/
class LiveMissionActions extends LocalActions
{
protected $urlDirSpawns = '/wp-content/plugins/hj-zombicide/web/rsc/img/spawns/';
protected $urlDirLiveMissions = 'web/rsc/missions/live/';
protected $strTokenStyle = 'background:url("/wp-content/plugins/hj-zombicide/web/rsc/img/tokens/%1$s.png");';
protected $chipToken = '<PASSWORD>';
/**
* Constructeur
*/
public function __construct($post=array())
{
parent::__construct();
$this->post = $post;
$this->SurvivorServices = new SurvivorServices();
$this->EquipmentExpansionServices = new EquipmentExpansionServices();
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new LiveMissionActions($post);
if ($post[self::CST_AJAXACTION]=='updateLiveMission') {
$returned = $Act->dealWithUpdateLiveMission();
} else {
$returned = '';
}
return $returned;
}
/**
* @return string
*/
public function dealWithUpdateLiveMission()
{
$returned = '';
////////////////////////////////////////////////////////////////////////
// On récupère et vérifie les données
if (!isset($this->post['uniqid'])) {
return $this->formatErrorMessage('Identifiant fichier non défini.');
}
$this->fileId = $this->post['uniqid'];
$fileName = PLUGIN_PATH.$this->urlDirLiveMissions.$this->fileId.".mission.xml";
if (!is_file($fileName)) {
return $this->formatErrorMessage('Le fichier de sauvegarde n\'existe pas.');
}
////////////////////////////////////////////////////////////////////////
// On a le feu vert, on ouvre le fichier XML
$this->objXmlDocument = simplexml_load_file($fileName);
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
// On est venu pour ça. Analyser l'action passée en paramètre, et traiter.
$returned = $this->parseAndResolveAction();
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
// Et si on en profitait pour purger les Tchats un peu vieux... de l'automate uniquement
$purgeLimit = time()-1*24*60*60;
$Tchats = $this->objXmlDocument->xPath('//tchat[@timestamp<"'.$purgeLimit.'" and @author="Automat"]');
// On parcourt la liste des logs à supprimer.
foreach ($Tchats as $Tchat) {
// Et on les supprime un à un.
unset($Tchat[0]);
}
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
// On sauvegarde les modifications du fichier.
$this->objXmlDocument->asXML($fileName);
// Et on retourne le visuel modifié. S'il y en a un.
if (!empty($returned)) {
return $returned;
}
}
private function parseAndResolveAction()
{
$bln_create = true;
$this->id = $this->post['id'];
if ($this->id!='') {
$this->node = $this->objXmlDocument->xPath('//*[@id="'.$this->id.'"]')[0];
$bln_create = false;
}
$qte = $this->post['quantite'];
$type = $this->post['type'];
$posX = $this->post['left'];
$posY = $this->post['top'];
$needABean = false;
switch ($this->post['act']) {
case 'activate' :
$needABean = $this->activateAction();
break;
case 'add' :
$needABean = $this->addAction($qte, $type);
break;
case 'close' :
$needABean = $this->closeAction();
break;
case 'del' :
$needABean = $this->deleteAction($qte, $type);
break;
case 'draw' :
$returned = $this->drawAction($type);
$needABean = false;
break;
case 'init' :
$returned = $this->initAction($type);
$needABean = false;
break;
case 'move' :
$needABean = $this->moveAction($posX, $posY);
break;
case 'open' :
$needABean = $this->openAction();
break;
case 'pick' :
$this->pickAction();
break;
case 'reveal' :
$needABean = $this->revealAction();
break;
case 'shuffle' :
$this->shuffleAction($type);
break;
case 'tchat' :
$returned = $this->tchatAction();
$needABean = false;
break;
case 'unactivate' :
$needABean = $this->unactivateAction();
break;
default :
break;
}
if ($needABean) {
$TokenBean = new TokenBean($this->node);
$returned = $TokenBean->getJsonModifications($this->id, $bln_create);
return $this->jsonString($returned, 'lstElements', true);
} else {
return $returned;
}
}
private function initAction($type)
{
switch ($type) {
case 'Item' :
break;
case 'Spawn' :
// On doit supprimer tous les Spawns.
$Spawns = $this->objXmlDocument->xPath('//spawns/spawn');
// On vire le noeud Spawns
foreach ($Spawns as $Spawn) {
// Et on les supprime un à un.
unset($Spawn[0]);
}
// On récupère l'intervalle à utiliser dorénavant.
$newInterval = $this->post['interval'];
$this->insertTchatMessage('Pioche Invasion redéfinie : '.$newInterval);
// Et on recrée le nouveau, avec le bon intervalle.
$Spawns = $this->objXmlDocument->xpath('//spawns')[0];
$Spawns->attributes()['interval'] = $newInterval;
// On ajoute les nouvelles cartes
$intervals = explode(',', $newInterval);
$rank = 1;
foreach ($intervals as $interval) {
list($interval, $multi) = explode('x', $interval);
list($start, $end) = explode('-', $interval);
if ($multi=='') {
$multi = 1;
}
if ($end=='') {
$end = $start;
}
for ($i=1; $i<=$multi; $i++) {
for ($j=$start; $j<=$end; $j++) {
$spawn = $this->objXmlDocument->spawns->addChild('spawn');
$spawn->addAttribute('id', 'spawn-'.$rank);
$spawn->addAttribute('src', 'x'.str_pad($j, 3, 0, STR_PAD_LEFT));
$spawn->addAttribute('rank', $rank);
$spawn->addAttribute('status', 'deck');
$rank++;
}
}
}
// On pense bien à mélanger.
$this->shuffleAction($type);
// Et on retourne l'intervalle mis à jour
$returned = array(
array('currentInterval', '<input type="text" class="form-control" id="currentInterval" readonly value="'.$newInterval.'"/>'),
);
return $this->jsonString($returned, 'lstElements', true);
break;
default :
break;
}
}
private function shuffleAction($type)
{
// Spawn, Equipment
// TODO : Equipment à faire.
switch ($type) {
case 'Item' :
// On récupère toutes les cartes Items, puis on les mélange
$Items = $this->objXmlDocument->xpath('//items/item');
shuffle($Items);
// On les renumérote en les remettant dans la pioche
$rank = 1;
foreach ($Items as $Item) {
$Item->attributes()['rank'] = $rank;
if ($Item->attributes()['status']=='discard') {
$Item->attributes()['status'] = 'deck';
}
$rank++;
}
// On insère un message et on ne retourne rien.
$this->insertTchatMessage('Pioche Equipment mélangée');
break;
case 'Spawn' :
// On récupère toutes les cartes Invasions, puis on les mélange
$Spawns = $this->objXmlDocument->xpath('//spawns/spawn');
shuffle($Spawns);
// On les renumérote en les remettant dans la pioche
$rank = 1;
foreach ($Spawns as $Spawn) {
$Spawn->attributes()['rank'] = $rank;
$Spawn->attributes()['status'] = 'deck';
$rank++;
}
// On insère un message et on ne retourne rien.
$this->insertTchatMessage('Pioche Invasion mélangée');
break;
default :
break;
}
}
private function drawAction($type)
{
// Spawn, Equipment
// TODO : Equipment à faire.
// On récupère toutes les cartes Invasions encore dans la pioche
$Spawns = $this->objXmlDocument->xPath('//spawns/spawn[@status="deck"]');
if (empty($Spawns)) {
$this->shuffleAction($type);
$Spawns = $this->objXmlDocument->xPath('//spawns/spawn[@status="deck"]');
}
// On trie par ordre croissant et on récupère le premier élément.
usort($Spawns, 'sort_trees_rank');
$Spawn = $Spawns[0];
// On le défausse, on le trace, puis on retourne le visuel
$Spawn->attributes()['status'] = 'discard';
$this->insertTchatMessage('1 Carte Invasion piochée');
//
$Bean = new LocalBean();
$returned = array(
array("modalBody", $Bean->getBalise(self::TAG_IMG, '', array(self::ATTR_SRC=>$this->urlDirSpawns.$Spawn->attributes()['src'].'-thumb.jpg'))),
);
return $this->jsonString($returned, 'lstElements', true);
}
private function moveAction($posX, $posY)
{
// Zombie, Noise, Survivor
$this->node->attributes()['coordX'] = $posX;
$this->node->attributes()['coordY'] = $posY;
$this->insertTchatMessage($this->node->attributes()['type'].' déplacé');
return true;
}
private function pickAction()
{
switch (substr($this->id, 0, 1)) {
case 'c' :
// Objective, Noise
$Elements = $this->objXmlDocument->map->chips->chip;
break;
case 'z' :
// Zombie
$Elements = $this->objXmlDocument->map->zombies->zombie;
$type = $this->node->attributes()['src'];
$oldQte = $this->objXmlDocument->xPath('//pool[@type="'.$type.'"]')[0]->attributes()['current'];
$this->objXmlDocument->xPath('//pool[@type="'.$type.'"]')[0]->attributes()['current'] = $oldQte - $this->node->attributes()['quantite'];
break;
case 's' :
// Survivor
$Elements = $this->objXmlDocument->map->survivors->survivor;
break;
default :
$this->insertTchatMessage('Suppression échouée ['.$this->id.'].');
$Elements = array();
break;
}
$cpt = 0;
foreach ($Elements as $element) {
if ($element['id'][0]==$this->id) {
$this->insertTchatMessage($element->attributes()['type'].' supprimé');
unset($Elements[$cpt]);
}
$cpt++;
}
}
private function insertAction($type='')
{
// Zombie, Survivor, Bruit.
$createId = true;
switch ($type) {
case 'Survivor' :
/////////////////////////////////////////////////////////
// On ajoute un nouveau Survivor au fichier XML
$survivor = $this->objXmlDocument->xPath('//survivors')[0]->addChild('survivor');
$survivorId = $this->post['survivorId'];
$this->id = $survivorId;
$survivor->addAttribute('id', $survivorId);
// On s'appuie sur l'id pour récupérer les infos en base
$Survivor = $this->SurvivorServices->selectSurvivor(substr($survivorId, 1));
// Et on peut sauvegarder l'id du portrait
$usedName = ($Survivor->getAltImgName()!='' ? $Survivor->getAltImgName() : $Survivor->getName());
$src = 'p'.$Survivor->getNiceName($usedName);
$survivor->addAttribute('src', $src);
// On récupère le Token Zone de départ pour y mettre le Survivant.
$Token = $this->objXmlDocument->xPath('//chip[@type="Starting"]')[0];
$survivor->addAttribute('coordX', $Token->attributes()['coordX']);
$survivor->addAttribute('coordY', $Token->attributes()['coordY']);
// On initialise ensuite les données de base.
// TODO : Ces infos pourraient ne pas être fixes, selon ... plein de facteurs éventuels.
$survivor->addAttribute('type', 'Survivor');
$survivor->addAttribute('status', 'Survivor');
$survivor->addAttribute('hitPoints', 2);
$survivor->addAttribute('actionPoints', 3);
$survivor->addAttribute('experiencePoints', 0);
$survivor->addAttribute('level', 'Blue');
/////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////
// On va maintenant s'occuper d'ajouter les Skills du Survivor
$skills = $survivor->addChild('skills');
$SurvivorSkills = $Survivor->getSurvivorSkills(self::CST_SURVIVORTYPEID_S);
while (!empty($SurvivorSkills)) {
$SurvivorSkill = array_shift($SurvivorSkills);
$skill = $skills->addChild('skill');
$skill->addAttribute('id', $survivorId.'-'.$SurvivorSkill->getTagLevelId());
$skill->addAttribute('skillId', $SurvivorSkill->getSkillId());
$skill->addAttribute('status', ($SurvivorSkill->getTagLevelId()<20 ? 'Active' : 'Unactive'));
$skill->addAttribute('type', 'Skill');
}
/////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////
// Enfin, on gère l'équipement de départ pour ceux ayant une compétence spécifique
// TODO : Enfin... En suspens pour le moment.
$this->node = $survivor;
/////////////////////////////////////////////////////////
$msg = '1 Survivant ajouté.';
break;
case 'Noise' :
/////////////////////////////////////////////////////////
// On récupère l'id du prochain Token à insérer.
$chips = $this->objXmlDocument->xPath('//chips')[0];
$maxId = $chips->attributes()['maxid']+1;
$chips->attributes()['maxid'] = $maxId;
/////////////////////////////////////////////////////////
// On ajoute un nouveau Token au fichier XML
$this->id = 'c'.$maxId;
$chip = $this->objXmlDocument->xPath('//chips')[0]->addChild('chip');
$chip->addAttribute('id', $this->id);
$chip->addAttribute('type', 'Noise');
$chip->addAttribute('coordX', $this->post['coordx']);
$chip->addAttribute('coordY', $this->post['coordy']);
$chip->addAttribute('quantite', 1);
$this->node = $chip;
/////////////////////////////////////////////////////////
$msg = '1 Bruit ajouté.';
break;
default :
// Dans le cas des Zombies, c'est un peu plus complexe...
$patternZombie = '/z(Walker|Runner|Fatty|Abomination)(Standard)/';
if (preg_match($patternZombie, $type, $matches)) {
/////////////////////////////////////////////////////////
// On récupère l'id du prochain Zombie à insérer.
$zombies = $this->objXmlDocument->xPath('//zombies')[0];
$maxId = $zombies->attributes()['maxid']+1;
$zombies->attributes()['maxid'] = $maxId;
/////////////////////////////////////////////////////////
// On ajoute un nouveau Zombie au fichier XML
$this->id = 'z'.$maxId;
$zombie = $this->objXmlDocument->xPath('//zombies')[0]->addChild('zombie');
$zombie->addAttribute('id', $this->id);
$zombie->addAttribute('type', 'Zombie');
$zombie->addAttribute('src', $type);
$zombie->addAttribute('coordX', $this->post['coordx']);
$zombie->addAttribute('coordY', $this->post['coordy']);
$zombie->addAttribute('quantite', 1);
$this->node = $zombie;
/////////////////////////////////////////////////////////
$msg = '1 '.$matches[1].' '.$matches[2].' ajouté.';
$oldQte = $this->objXmlDocument->xPath('//pool[@type="'.$type.'"]')[0]->attributes()['current'];
$this->objXmlDocument->xPath('//pool[@type="'.$type.'"]')[0]->attributes()['current'] = $oldQte + 1;
} else {
$createId = false;
$msg = 'Tentative création Zombie foirée : '.$type.'.';
}
break;
}
$this->insertTchatMessage($msg);
return $createId;
}
private function addAction($qte='', $type='')
{
// Si l'id n'est pas défini, c'est probablement une insertion.
if ($this->id=='') {
return $this->insertAction($type);
}
// Pour les PA, PV & XP, on a passé un $type. Mais le type du node est Survivor.
$arrTypes = array('pa'=>'actionPoints', 'pv'=>'hitPoints', 'xp'=>'experiencePoints');
// Zombie, Noise, Survivor (XP, PV & PA).
$matchId = true;
switch ($this->node->attributes()['type']) {
case 'Noise' :
$oldQte = $this->node->attributes()['quantite'];
$this->node->attributes()['quantite'] = $oldQte + $qte;
$msg = $qte.' Bruit(s) ajouté(s).';
break;
case 'Zombie' :
$oldQte = $this->node->attributes()['quantite'];
$this->node->attributes()['quantite'] = $oldQte + $qte;
$msg = $qte.' Zombie(s) ajouté(s).';
$type = $this->node->attributes()['src'];
$oldQte = $this->objXmlDocument->xPath('//pool[@type="'.$type.'"]')[0]->attributes()['current'];
$this->objXmlDocument->xPath('//pool[@type="'.$type.'"]')[0]->attributes()['current'] = $oldQte + $qte;
break;
case 'Survivor' :
$oldQte = $this->node->attributes()[$arrTypes[$type]];
$this->node->attributes()[$arrTypes[$type]] = $oldQte + $qte;
$msg = $qte.' '.$type.' ajouté(s).';
break;
default :
$msg = 'Tentative insertion foirée.';
$matchId = false;
break;
}
$this->insertTchatMessage($msg);
return $matchId;
}
private function deleteAction($qte, $type)
{
// Pour les PA, PV & XP, on a passé un $type. Mais le type du node est Survivor.
$arrTypes = array('pa'=>'actionPoints', 'pv'=>'hitPoints', 'xp'=>'experiencePoints');
// Zombie, Noise, Survivor (XP, PV & PA).
$matchId = true;
switch ($this->node->attributes()['type']) {
case 'Noise' :
$oldQte = $this->node->attributes()['quantite'];
$this->node->attributes()['quantite'] = $oldQte - $qte;
$msg = $qte.' Bruit(s) retiré(s).';
break;
case 'Zombie' :
$oldQte = $this->node->attributes()['quantite'];
$this->node->attributes()['quantite'] = $oldQte - $qte;
$msg = $qte.' Zombie(s) retiré(s).';
break;
case 'Survivor' :
$oldQte = $this->node->attributes()[$arrTypes[$type]];
$this->node->attributes()[$arrTypes[$type]] = $oldQte - $qte;
$msg = $qte.' '.$type.' retiré(s).';
break;
default :
$msg = 'Tentative suppression foirée.';
$matchId = false;
break;
}
$this->insertTchatMessage($msg);
return $matchId;
}
private function revealAction()
{
// Objectif
$this->node->attributes()['status'] = 'Unactive';
$this->insertTchatMessage('Objectif révélé');
return true;
}
private function openCloseMutualAction($newStatus, $label)
{
// Door
$this->node->attributes()['status'] = $newStatus;
$this->insertTchatMessage('Porte '.$label);
return true;
}
private function openAction()
{ return $this->openCloseMutualAction('Opened', 'ouverte'); }
private function closeAction()
{ return $this->openCloseMutualAction('Closed', 'fermée'); }
private function activateUnactivateMutualAction($newStatus, $label)
{
// Spawn, Exit ou Skill
$matchId = true;
$this->node->attributes()['status'] = $newStatus;
switch ($this->node->attributes()['type']) {
case 'Exit' :
$msg = 'Zone de Sortie '.$label;
break;
case 'Skill' :
$msg = 'Compétence '.$label;
break;
case 'Spawn' :
$msg = 'Zone de Spawn '.$label;
break;
default :
$msg = ucfirst($label).' envisagée, mais id ['.$this->id.'] ne trouve pas de cible.';
$matchId = false;
break;
}
$this->insertTchatMessage($msg);
return $matchId;
}
private function unactivateAction()
{ return $this->activateUnactivateMutualAction('Unactive', 'désactivée'); }
private function activateAction()
{ return $this->activateUnactivateMutualAction('Active', 'activée'); }
private function tchatAction()
{
// L'idée est d'insérer le message envoyé en paramètre. Ou ne rien faire s'il n'y a pas de message à insérer.
// Puis de retourner les derniers messages insérés.
// Pour ça, on a besoin de connaître le timestamp du dernier tchat affiché. Il faut donc qu'il soit stocké dans le tchat...
$timestamp = $this->post['tsTreshold'];
$Bean = new WpPageMissionOnlineBean();
$lstTchats = $Bean->getLstTchats($timestamp);
$returned = array(
array('tchat-new', $lstTchats),
);
return $this->jsonString($returned, 'lstElements', true);
}
private function formatErrorMessage($msgError)
{
// TODO
return "[[$msgError]]";
}
private function insertTchatMessage($msg='', $author='Automat')
{
$Tchat = $this->objXmlDocument->tchats->addChild('tchat', $msg);
$Tchat->addAttribute('timestamp', time());
$Tchat->addAttribute('author', $author);
}
}
function sort_trees_rank($t1, $t2) {
return ($t1['rank']*1 > $t2['rank']*1);
}
<file_sep>/core/config/requests.php
[Chat]
select="SELECT id, liveId, sendToId, senderId, timestamp, texte "
from="FROM wp_11_zombicide_chat "
where="WHERE (liveId LIKE '%s' OR sendToId LIKE '%s' OR senderId LIKE '%s') AND timestamp > '%s' "
insert="INSERT INTO wp_11_zombicide_chat (liveId, sendToId, senderId, timestamp, texte) VALUES ('%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_chat SET liveId='%s', sendToId='%s', senderId='%s', timestamp='%s', texte='%s' "
[Duration]
select="SELECT id, minDuration, maxDuration "
from="FROM wp_11_zombicide_duration "
where="WHERE minDuration LIKE '%s' AND maxDuration LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_duration (minDuration, maxDuration) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_duration SET minDuration='%s', maxDuration='%s' "
[Equipment]
select="SELECT id, name, textAbility "
from="FROM wp_11_zombicide_equipmentcards "
insert="INSERT INTO wp_11_zombicide_equipmentcards (name, textAbility) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_equipmentcards SET name='%s', textAbility='%s' "
[EquipmentExpansion]
select="SELECT id, equipmentCardId, expansionId, quantity "
from="FROM wp_11_zombicide_equipment_expansion "
where="WHERE equipmentCardId LIKE '%s' AND expansionId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_equipment_expansion (equipmentCardId, expansionId, quantity) VALUES ('%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_equipment_expansion SET equipmentCardId='%s', expansionId='%s', quantity='%s' "
[EquipmentKeyword]
select="SELECT id, equipmentCardId, keywordId "
from="FROM wp_11_zombicide_equipment_keyword "
where="WHERE equipmentCardId LIKE '%s' AND keywordId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_equipment_keyword (equipmentCardId, keywordId) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_equipment_keyword SET equipmentCardId='%s', keywordId='%s' "
[EquipmentLiveDeck]
select="SELECT id, liveId, equipmentCardId, rank, status, liveSurvivorId "
from="FROM wp_11_zombicide_equipmentlivedeck "
where="WHERE liveId LIKE '%s' AND equipmentCardId LIKE '%s' AND status LIKE '%s' AND liveSurvivorId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_equipmentlivedeck (liveId, equipmentCardId, rank, status, liveSurvivorId) VALUES ('%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_equipmentlivedeck SET liveId='%s', equipmentCardId='%s', rank='%s', status='%s', liveSurvivorId='%s' "
[EquipementWeaponProfile]
select="SELECT id, equipmentCardId, weaponProfileId, noisy "
from="FROM wp_11_zombicide_equipment_weaponprofile "
where="WHERE equipmentCardId LIKE '%s' AND weaponProfileId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_equipment_weaponprofile (equipmentCardId, weaponProfileId, noisy) VALUES ('%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_equipment_weaponprofile SET equipmentCardId='%s', weaponProfileId='%s', noisy='%s' "
[Expansion]
select="SELECT id, code, name, displayRank, nbSurvivants, nbMissions, nbDalles, official "
from="FROM wp_11_zombicide_expansion "
where="WHERE code LIKE '%s' AND name LIKE '%s' AND nbMissions >= '%s' AND nbSurvivants >= '%s' "
insert="INSERT INTO wp_11_zombicide_expansion (code, name, displayRank, nbSurvivants, nbMissions, nbDalles, official) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_expansion SET code='%s', name='%s', displayRank='%s', nbSurvivants='%s', nbMissions='%s', nbDalles='%s', official='%s' "
[ExpansionZombies]
select="SELECT id, expansionId, zombieTypeId, zombieCategoryId, quantite "
from="FROM wp_11_zombicide_expansion_zombies "
where="WHERE expansionId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_expansion_zombies (expansionId, zombieTypeId, zombieCategoryId, quantite) VALUES ('%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_expansion_zombies SET expansionId='%s', zombieTypeId='%s', zombieCategoryId='%s', quantite='%s' "
[Keyword]
select="SELECT id, name, description "
from="FROM wp_11_zombicide_keyword "
where="WHERE name LIKE '%s' AND description LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_keyword (name, description) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_keyword SET name='%s', description='%s' "
[Level]
select="SELECT id, name "
from="FROM wp_11_zombicide_level "
where="WHERE name LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_level (name) VALUES ('%s');"
update="UPDATE wp_11_zombicide_level SET name='%s' "
[Live]
select="SELECT id, deckKey, dateUpdate "
from="FROM wp_11_zombicide_live "
where="WHERE deckKey LIKE '%s' AND dateUpdate <= '%s' "
insert="INSERT INTO wp_11_zombicide_live (deckKey, dateUpdate) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_live SET deckKey='%s', dateUpdate='%s' "
[LiveDeck]
select="SELECT id, deckKey, dateUpdate "
from="FROM wp_11_zombicide_livedeck "
where="WHERE deckKey LIKE '%s' AND dateUpdate <= '%s' "
insert="INSERT INTO wp_11_zombicide_livedeck (deckKey, dateUpdate) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_livedeck SET deckKey='%s', dateUpdate='%s' "
[LiveMission]
select="SELECT id, liveId, missionId, activeLiveSurvivorId, nbSurvivors, turn "
from="FROM wp_11_zombicide_live_mission "
where="WHERE liveId LIKE '%s' AND missionId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_live_mission (liveId, missionId, activeLiveSurvivorId, nbSurvivors, turn) VALUES ('%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_live_mission SET liveId='%s', missionId='%s', activeLiveSurvivorId='%s', nbSurvivors='%s', turn='%s' "
[LiveMissionToken]
select="SELECT id, liveId, missionTokenId, status "
from="FROM wp_11_zombicide_live_missiontoken "
where="WHERE liveId LIKE '%s' AND tokenId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_live_missiontoken (liveId, missionTokenId, status) VALUES ('%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_live_missiontoken SET liveId='%s', missionTokenId='%s', status='%s' "
[LiveSurvivor]
select="SELECT id, liveId, survivorId, missionZoneId, survivorTypeId, experiencePoints, hitPoints, playedThisTurn, turnRank, searchedThisTurn "
from="FROM wp_11_zombicide_live_survivor "
where="WHERE liveId LIKE '%s' AND survivorId LIKE '%s' AND missionZoneId LIKE '%s' AND playedThisTurn LIKE '%s' AND turnRank LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_live_survivor (liveId, survivorId, missionZoneId, survivorTypeId, experiencePoints, hitPoints, playedThisTurn, turnRank, searchedThisTurn) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_live_survivor SET liveId='%s', survivorId='%s', missionZoneId='%s', survivorTypeId='%s', experiencePoints='%s', hitPoints='%s', playedThisTurn='%s', turnRank='%s', searchedThisTurn='%s' "
[LiveSurvivorAction]
select="SELECT id, liveSurvivorId, actionId "
from="FROM wp_11_zombicide_livesurvivor_action "
where="WHERE liveSurvivorId LIKE '%s' AND actionId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_livesurvivor_action (liveSurvivorId, actionId) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_livesurvivor_action SET liveSurvivorId='%s', actionId='%s' "
[LiveSurvivorSkill]
select="SELECT id, liveSurvivorId, skillId, tagLevelId, locked "
from="FROM wp_11_zombicide_live_survivorskill "
where="WHERE liveSurvivorId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_live_survivorskill (liveSurvivorId, skillId, tagLevelId, locked) VALUES ('%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_live_survivorskill SET liveSurvivorId='%s', skillId='%s', tagLevelId='%s', locked='%s' "
[LiveZombie]
select="SELECT id, liveId, missionZoneId, zombieTypeId, zombieCategoryId, quantity "
from="FROM wp_11_zombicide_live_zombie "
where="WHERE liveId LIKE '%s' AND missionZoneId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_live_zombie (liveId, missionZoneId, zombieTypeId, zombieCategoryId, quantity) VALUES ('%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_live_zombie SET liveId='%s', missionZoneId='%s', zombieTypeId='%s', zombieCategoryId='%s', quantity='%s' "
[Market]
select="SELECT id, name, description, quantity, price, imgProduct, universId, lang "
from="FROM wp_11_zombicide_market "
insert="INSERT INTO wp_11_zombicide_market (name, description, quantity, price, imgProduct, universId, lang) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_market SET name='%s', description='%s', quantity='%s', price='%s', imgProduct='%s', universId='%s', lang='%s' "
[MessageModel]
select="SELECT code, model_fr "
from="FROM wp_11_zombicide_message_model "
where="WHERE code LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_message_model (code, model_fr) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_message_model SET code='%s', model_fr='%s' "
[Mission]
select="SELECT m.id AS id, title, m.code AS code, levelId, playerId, durationId, origineId, width, height, published, liveAble "
from="FROM wp_11_zombicide_mission AS m "
where="WHERE title LIKE '%s' AND code LIKE '%s' AND levelId LIKE '%s' AND durationId LIKE '%s' AND playerId LIKE '%s' AND origineId LIKE '%s' AND published LIKE '%s' AND liveAble LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_mission (title, code, levelId, playerId, durationId, origineId, width, height, published, liveAble) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_mission SET title='%s', code='%s', levelId='%s', playerId='%s', durationId='%s', origineId='%s', width='%s', height='%s', published='%s', liveAble='%s' "
[MissionExpansion]
select="SELECT id, missionId, expansionId "
from="FROM wp_11_zombicide_mission_expansion "
where="WHERE missionId LIKE '%s' AND expansionId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_mission_expansion (missionId, expansionId) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_mission_expansion SET missionId='%s', expansionId='%s' "
[MissionObjective]
select="SELECT id, missionId, objectiveId, title "
from="FROM wp_11_zombicide_mission_objective "
where="WHERE missionId LIKE '%s' AND objectiveId LIKE '%s' AND title LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_mission_objective (missionId, objectiveId, title) VALUES ('%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_mission_objective SET missionId='%s', objectiveId='%s', title='%s' "
[MissionRule]
select="SELECT id, missionId, ruleId, title "
from="FROM wp_11_zombicide_mission_rule "
where="WHERE missionId LIKE '%s' AND ruleId LIKE '%s' AND title LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_mission_rule (missionId, ruleId, title) VALUES ('%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_mission_rule SET missionId='%s', ruleId='%s', title='%s' "
[MissionTile]
select="SELECT id, missionId, tileId, orientation, coordX, coordY "
from="FROM wp_11_zombicide_mission_tile "
where="WHERE missionId LIKE '%s' AND coordX LIKE '%s' AND coordY LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_mission_tile (missionId, tileId, orientation, coordX, coordY) VALUES ('%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_mission_tile SET missionId='%s', tileId='%s', orientation='%s', coordX='%s', coordY='%s' "
[MissionToken]
select="SELECT id, missionId, tokenId, coordX, coordY, color, status, orientation "
from="FROM wp_11_zombicide_mission_token "
where="WHERE missionId LIKE '%s' AND tokenId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_mission_token (missionId, tokenId, coordX, coordY, color, status, orientation) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_mission_token SET missionId='%s', tokenId='%s', coordX='%s', coordY='%s', color='%s', status='%s', orientation='%s' "
[MissionZone]
select="SELECT id, missionId, zoneNum, coordsX, coordsY, type, reachZone, startingZone "
from="FROM wp_11_zombicide_mission_zone "
where="WHERE missionId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_mission_zone (missionId, zoneNum, coordsX, coordsY, type, reachZone, startingZone) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_mission_zone SET missionId='%s', zoneNum='%s', coordsX='%s', coordsY='%s', type='%s', reachZone='%s', startingZone='%s' "
[Objective]
select="SELECT id, code, description "
from="FROM wp_11_zombicide_objective "
where="WHERE code LIKE '%s' AND description LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_objective (code, description) VALUES ('%s', '%s');"
update="UPDATE wp_11_zombicide_objective SET code='%s', description='%s' "
[Origine]
select="SELECT id, name "
from="FROM wp_11_zombicide_origine "
where="WHERE name LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_origine (name) VALUES ('%s');"
update="UPDATE wp_11_zombicide_origine SET name='%s' "
[Player]
select="SELECT id, name "
from="FROM wp_11_zombicide_player "
where="WHERE name LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_player (name) VALUES ('%s');"
update="UPDATE wp_11_zombicide_player SET name='%s' "
[Rule]
select="SELECT id, setting, code, description "
from="FROM wp_11_zombicide_rule "
where="WHERE setting LIKE '%s' AND code LIKE '%s' AND description LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_rule (setting, code, description) VALUES ('%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_rule SET setting='%s', code='%s', description='%s' "
[Skill]
select="SELECT DISTINCT s.id AS id, code, name, description, expansionId "
from="FROM wp_11_zombicide_skill s "
where="WHERE code LIKE '%s' AND (name LIKE '%s' OR description LIKE '%s') AND expansionId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_skill (code, name, description, expansionId) VALUES ('%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_skill SET code='%s', name='%s', description='%s', expansionId='%s' "
[Spawn]
select="SELECT id, expansionId, spawnNumber, spawnTypeId, zombieCategoryId, blueZombieTypeId, blueQuantity, yellowZombieTypeId, yellowQuantity, orangeZombieTypeId, orangeQuantity, redZombieTypeId, redQuantity "
from="FROM wp_11_zombicide_spawncards "
where="WHERE expansionId LIKE '%s' AND spawnNumber LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_spawncards (expansionId, spawnNumber, spawnTypeId, zombieCategoryId, blueZombieTypeId, blueQuantity, yellowZombieTypeId, yellowQuantity, orangeZombieTypeId, orangeQuantity, redZombieTypeId, redQuantity) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_spawncards SET expansionId='%s', spawnNumber='%s', spawnTypeId='%s', zombieCategoryId='%s', blueZombieTypeId='%s', blueQuantity='%s', yellowZombieTypeId='%s', yellowQuantity='%s', orangeZombieTypeId='%s', orangeQuantity='%s', redZombieTypeId='%s', redQuantity='%s' "
[SpawnLive]
select="SELECT id, liveId, spawnCardId, rank, status "
from="FROM wp_11_zombicide_spawnlivedeck "
where="WHERE liveId LIKE '%s' AND spawnCardId LIKE '%s' AND status LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_spawnlivedeck (liveId, spawnCardId, rank, status) VALUES ('%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_spawnlivedeck SET liveId='%s', spawnCardId='%s', rank='%s', status='%s' "
[Survivor]
select="SELECT DISTINCT s.id AS id, name, standard, zombivor, ultimate, ultimatez, expansionId, background, altImgName, liveAble "
from="FROM wp_11_zombicide_survivor s "
where="WHERE name LIKE '%s' AND zombivor LIKE '%s' AND ultimate LIKE '%s' AND expansionId LIKE '%s' and background LIKE '%s' and liveAble LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_survivor (name, standard, zombivor, ultimate, ultimatez, expansionId, background, altImgName, liveAble) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_survivor SET name='%s', standard='%s', zombivor='%s', ultimate='%s', ultimatez='%s', expansionId='%s', background='%s', altImgName='%s', liveAble='%s' "
[SurvivorSkill]
select="SELECT id, survivorId, skillId, survivorTypeId, tagLevelId "
from="FROM wp_11_zombicide_survivor_skill "
where="WHERE survivorId LIKE '%s' AND skillId LIKE '%s' AND survivorTypeId LIKE '%s' AND tagLevelId LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_survivor_skill (survivorId, skillId, survivorTypeId, tagLevelId) VALUES ('%s', '%s', '%s', '%s');"
update="UPDATE wp_11_z_survivor_skill SET survivorId='%s', skillId='%s', survivorTypeId='%s', tagLevelId='%s' "
[Tile]
select="SELECT id, expansionId, code, coordPoly, zoneType, zoneAcces, activeTile, oCode, side_top, side_right, side_bottom, side_left "
from="FROM wp_11_zombicide_tile "
where="WHERE code LIKE '%s' AND expansionId LIKE '%s' AND activeTile LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_tile (expansionId, code, coordPoly, zoneType, zoneAcces, activeTile) VALUES ('%s', '%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_tile SET expansionId='%s', code='%s', coordPoly='%s', zoneType='%s', zoneAcces='%s', activeTile='%s', oCode='%s', top='%s', right='%s', bottom='%s', left='%s' "
[Token]
select="SELECT id, code, width, height "
from="FROM wp_11_zombicide_token "
where="WHERE code LIKE '%s' "
insert="INSERT INTO wp_11_zombicide_token (code, width, height) VALUES ('%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_token SET code='%s', width='%s', height='%s' "
[WeaponProfile]
select="SELECT id, minRange, maxRange, nbDice, successRate, damageLevel "
from="FROM wp_11_zombicide_weaponprofile "
insert="INSERT INTO wp_11_zombicide_weaponprofile (minRange, maxRange, nbDice, successRate, damageLevel) VALUES ('%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_weaponprofile SET minRange='%s', maxRange='%s', nbDice='%s', successRate='%s', damageLevel='%s' "
[ZombieType]
select="SELECT id, name, pointAction, endurance, pointExperience, zoneDeplacement "
from="FROM wp_11_zombicide_zombieType "
insert="INSERT INTO wp_11_zombicide_zombieType (name, pointAction, endurance, pointExperience, zoneDeplacement) VALUES ('%s', '%s', '%s', '%s', '%s');"
update="UPDATE wp_11_zombicide_zombieType SET name='%s', pointAction='%s', endurance='%s', pointExperience='%s', zoneDeplacement='%s' "
<file_sep>/core/bean/WpPostExpansionBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPostExpansionBean
* @author Hugues
* @since 1.07.21
* @version 1.08.01
*/
class WpPostExpansionBean extends WpPostBean
{
protected $urlTemplate = 'web/pages/public/wppage-expansion.php';
/**
* Class Constructor
*/
public function __construct($WpPost)
{
parent::__construct();
$this->ExpansionServices = new ExpansionServices();
$this->WpPost = $WpPost;
$code = $this->WpPost->getPostMeta(self::FIELD_CODE);
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(self::FIELD_CODE=>$code));
$this->Expansion = (!empty($Expansions) ? array_shift($Expansions) : new Expansion());
}
/**
* On retourne la page dédiée à la compétence.
* @return string
*/
public function getContentPage()
{
if ($this->Expansion->isOfficial()) {
$label = 'Officielle';
$color = 'success';
} else {
$label = 'Custom';
$color = 'danger';
}
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Nom de l'Extension - 1
$this->Expansion->getName(),
// Description de la Compétence - 2
$this->WpPost->getPostContent(),
// Lien de navigation - 3
'',//$this->getNavLinks(),
// Infos sur les Survivants / Dalles / Missions... - 4
$this->Expansion->getBean()->getExpansionDetails(),
// Mais aussi les cartes Equipements et Invasion... - 5
$this->Expansion->getBean()->getCardsDetails(),
// Badge officiel ou Custom - 6
$this->getBalise(self::TAG_SPAN, $label, array(self::ATTR_CLASS=>'badge badge-'.$color)),
);
return $this->getRender($this->urlTemplate, $args);
}
}
<file_sep>/core/bean/AdminPageExpansionsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* AdminPageExpansionsBean
* @author Hugues
* @since 1.04.30
* @version 1.05.12
*/
class AdminPageExpansionsBean extends AdminPageBean
{
protected $tplHomeCheckCard = 'web/pages/admin/fragments/home-check-card.php';
protected $urlExpansionListing = 'web/pages/admin/expansion-listing.php';
protected $urlAdminEdit = 'web/pages/admin/expansion-edit.php';
/**
* Class Constructor
*/
public function __construct($urlParams='')
{
$this->urlParams = $urlParams;
parent::__construct(self::CST_EXPANSION);
$this->title = 'Extensions';
$this->ExpansionServices = new ExpansionServices();
$this->MissionServices = new MissionServices();
$this->SurvivorServices = new SurvivorServices();
}
/**
* @param array $urlParams
* @return $Bean
*/
public function getSpecificContentPage()
{
if (isset($this->urlParams[self::FIELD_ID])) {
$this->Expansion = $this->ExpansionServices->selectExpansion($this->urlParams[self::FIELD_ID]);
}
if (isset($_POST)&&!empty($_POST)) {
$this->dealWithPost();
}
switch ($this->urlParams[self::CST_POSTACTION]) {
case 'confirmEdit' :
case self::CST_EDIT :
return $this->getEditContentPage();
break;
default :
return $this->getListContentPage();
break;
}
}
private function dealWithPost()
{
if ($this->urlParams[self::CST_POSTACTION]=='confirmEdit') {
// On ne met à jour via cette interface que les données suivantes :
// Le nombre de Survivants.
$this->Expansion->setNbSurvivants($_POST[self::FIELD_NBSURVIVANTS]);
// Le nombre de Missions.
$this->Expansion->setNbMissions($_POST[self::FIELD_NBMISSIONS]);
// Une fois fait, on peut sauvegarder les modifications.
$this->ExpansionServices->updateExpansion($this->Expansion);
}
}
public function getEditContentPage()
{
$args = array(
self::FIELD_EXPANSIONID => $this->Expansion->getId(),
);
// Nombre de Survivants attendus
$ExpectedSurvivors = $this->SurvivorServices->getSurvivorsWithFilters($args);
$nbExpectedSurvivors = count($ExpectedSurvivors);
$strExpectedSurvivors = '';
while (!empty($ExpectedSurvivors)) {
$Survivor = array_shift($ExpectedSurvivors);
$strExpectedSurvivors .= $Survivor->getBean()->getCartouche();
}
// Nb de Missions attendues
$ExpectedMissions = $this->MissionServices->getMissionsByExpansionId($this->Expansion->getId());
$nbExpectedMissions = count($ExpectedMissions);
//////////////////////////////////////////////////////////////////////////
// On enrichit le template
$args = array(
// L'identifiant de l'extension - 1
$this->Expansion->getId(),
// Le code de l'extension - 2
$this->Expansion->getCode(),
// Le nom de l'extension - 3
$this->Expansion->getName(),
// Le rang d'affichage de l'extension - 4
$this->Expansion->getDisplayRank(),
// Le nombre de Survivants de l'extension - 5
$this->Expansion->getNbSurvivants(),
// Le nombre théorique de Survivants de l'extension - 6
$nbExpectedSurvivors,
// Le nombre de Missions de l'extension - 7
$this->Expansion->getNbMissions(),
// Le nombre théorique de Missions - 8
$nbExpectedMissions,
// L'extension est-elle officielle ? - 9
($this->Expansion->isOfficial() ? self::CST_CHECKED : ''),
// La liste des Survivants en question - 10
$strExpectedSurvivors,
);
// Puis on le restitue.
return $this->getRender($this->urlAdminEdit, $args);
}
public function getListContentPage()
{
$strRows = '';
$nbPerPage = 15;
$curPage = $this->initVar(self::WP_CURPAGE, 1);
$orderby = $this->initVar(self::WP_ORDERBY, self::FIELD_NAME);
$order = $this->initVar(self::WP_ORDER, self::ORDER_ASC);
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(), $orderby, $order);
$nbElements = count($Expansions);
$nbPages = ceil($nbElements/$nbPerPage);
$curPage = max(1, min($curPage, $nbPages));
$DisplayedExpansions = array_slice($Expansions, ($curPage-1)*$nbPerPage, $nbPerPage);
if (!empty($DisplayedExpansions)) {
foreach ($DisplayedExpansions as $Expansion) {
$ExpansionBean = new ExpansionBean($Expansion);
$strRows .= $ExpansionBean->getRowForAdminPage();
}
}
$queryArg = array(
self::CST_ONGLET => self::CST_EXPANSION,
self::WP_ORDERBY => $orderby,
self::WP_ORDER => $order
);
// Pagination
$strPagination = $this->getPagination($queryArg, $post_status, $curPage, $nbPages, $nbElements);
$args = array(
// Liste des extensions affichées - 1
$strRows,
// Filtres - 2
'',
// Url pour créer une nouvelle Extension - 3
'/wp-admin/post-new.php',
// Subs - 4
'',
// Pagination - 5
$strPagination,
);
return $this->getRender($this->urlExpansionListing, $args);
}
/**
* @return string
*/
public function getCheckCard()
{
/////////////////////////////////////////////////
// Gestion des Extensions.
// On récupère la liste des Extensions qui ont un Article. Puis les données dans la base. On compare et on effectue un diagnostic.
$Act = new ExpansionActions();
$strBilan = $Act->dealWithExpansionVerif();
$args = array(
// Le titre de la carte - 1
$this->title,
// L'id du container de retour pour afficher les vérifications - 2
self::CST_EXPANSION,
// Le contenu du container de vérification - 3
$strBilan,
);
return $this->getRender($this->tplHomeCheckCard, $args);
}
}
<file_sep>/core/domain/MissionOnline.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionOnline
* @author Hugues.
* @since 1.11.01
* @version 1.11.01
*/
class MissionOnline extends WpPostRelais
{
protected $urlDirLiveMissions = 'web/rsc/missions/live/';
public function __construct($Mission=null)
{
parent::__construct();
$this->MissionServices = new MissionServices();
$this->WpPostServices = new WpPostServices();
$this->EquipmentExpansionServices = new EquipmentExpansionServices();
if ($Mission==null) {
$this->openMissionFile();
$codeMission = $this->objXmlDocument->attributes()['code'];
$Missions = $this->MissionServices->getMissionsWithFilters(array(self::FIELD_CODE=>$codeMission));
$Mission = array_shift($Missions);
}
$this->Mission = $Mission;
$this->WpPost = $Mission->getWpPost();
}
private function openMissionFile()
{
$this->fileId = $_SESSION['zombieKey'];
$this->fileName = PLUGIN_PATH.$this->urlDirLiveMissions.$this->fileId.".mission.xml";
$this->objXmlDocument = simplexml_load_file($this->fileName);
}
private function saveMissionFile()
{
$this->objXmlDocument->asXML($this->fileName);
}
public function setUp()
{
// On ouvre le fichier pour pouvoir le modifier.
$this->openMissionFile();
// On récupère les Custom Fields liées à la Mission
$WpPosts = $this->WpPostServices->getWpPostsByCustomField(self::FIELD_MISSIONID, $this->WpPost->getID());
while (!empty($WpPosts)) {
// On les parcourt.
$WpPost = array_shift($WpPosts);
$hasCategory = false;
// On en récupère les Catégories
$WpCategories = $WpPost->getCategories();
while (!empty($WpCategories)) {
$WpCategory = array_shift($WpCategories);
if ($WpCategory->getCatId()==self::WP_CAT_RULE_ID) {
// On cherche les Catégories de type MissionRule
$hasCategory = true;
}
}
if ($hasCategory) {
// Si on en une, on récupère la donnée Méta Code associée
$metaValue = $WpPost->getPostMeta(self::FIELD_CODE);
if ($metaValue!='') {
// Et on vérifie que rien n'est à faire dans le Set Up.
$this->dealWithSetUpRule($metaValue);
}
}
}
$this->setUpSpawns();
$this->setUpItems();
// On sauvegarde les éventuels changements.
$this->saveMissionFile();
// Et on mélange.
// Besoin de sauvegarder avant car l'action de remélange réouvre le fichier...
$args = array(
self::CST_AJAXACTION => 'updateLiveMission',
'uniqid' => $_SESSION['zombieKey'],
'act' => 'shuffle',
'type' => 'Spawn',
);
LiveMissionActions::dealWithStatic($args);
// Besoin de sauvegarder avant car l'action de remélange réouvre le fichier...
$args['type'] = 'Item';
LiveMissionActions::dealWithStatic($args);
}
public function deleteSpawns()
{
// On doit supprimer tous les Spawns.
$Spawns = $this->objXmlDocument->xpath('//spawns');
// On récupère l'intervalle actuel.
$interval = $this->objXmlDocument->xpath('//spawns')[0]->attributes()['interval'];
// On vire le noeud Spawns
unset($Spawns);
// Et on recrée le nouveau, avec le bon intervalle.
$Spawns = $this->objXmlDocument->addChild('spawns');
$Spawns->attributes()['interval'] = $interval;
}
public function setUpSpawns($interval='')
{
// Si l'intervalle n'est pas défini, on va le chercher dans le fichier.
if ($interval=='') {
$interval = $this->objXmlDocument->xpath('//spawns')[0]->attributes()['interval'];
} else {
// S'il est défini, on va le sauvegarder dans le fichier.
$this->objXmlDocument->xpath('//spawns')[0]->attributes()['interval'] = $interval;
}
$intervals = explode(',', $interval);
$rank = 1;
foreach ($intervals as $interval) {
list($interval, $multi) = explode('x', $interval);
list($start, $end) = explode('-', $interval);
if ($multi=='') {
$multi = 1;
}
if ($end=='') {
$end = $start;
}
for ($i=1; $i<=$multi; $i++) {
for ($j=$start; $j<=$end; $j++) {
$spawn = $this->objXmlDocument->spawns->addChild('spawn');
$spawn->addAttribute('id', 'spawn-'.$rank);
$spawn->addAttribute('src', 'x'.str_pad($j, 3, 0, STR_PAD_LEFT));
$spawn->addAttribute('rank', $rank);
$spawn->addAttribute('status', 'deck');
$rank++;
}
}
}
}
public function setUpItems()
{
$season = $this->objXmlDocument->xpath('//items')[0]->attributes()['season'];
$ItemExpansions = $this->EquipmentExpansionServices->getEquipmentExpansionsWithFilters(array(self::FIELD_EXPANSIONID=>$season));
$rank = 1;
while (!empty($ItemExpansions)) {
$ItemExpansion = array_shift($ItemExpansions);
$qte = $ItemExpansion->getQuantity();
$Item = $ItemExpansion->getEquipment();
for ($i=0; $i<$qte; $i++) {
$item = $this->objXmlDocument->items->addChild('item');
$item->addAttribute('id', 'item-'.$rank);
$item->addAttribute('src', str_pad($Item->getId(), 3, 0, STR_PAD_LEFT).str_pad($season, 2, 0, STR_PAD_LEFT));
$item->addAttribute('rank', $rank);
if ($Item->isStarter()) {
$item->addAttribute('status', 'start');
} elseif ($Item->isPimp()) {
$item->addAttribute('status', 'pimp');
} elseif ($Item->hasKeyword('Composite')) {
$item->addAttribute('status', 'combo');
} else {
$item->addAttribute('status', 'deck');
}
$rank++;
}
}
}
private function dealWithSetUpRule($metaValue)
{
// Les Règles qui débutent par "AMONG_RED" permettent de mélanger des objectifs de couleur parmi les rouges.
if (substr($metaValue, 0, 9)=='AMONG_RED') {
$arrColors = explode('_', $metaValue);
for ($i=2; $i<count($arrColors); $i++) {
$lstElements = $this->objXmlDocument->xpath('//map/chips/chip[@type="Objective"][@color="red"]');
$nbElements = count($lstElements);
$rnd = random_int(0, $nbElements-1);
$this->objXmlDocument->xpath('//map/chips/chip[@type="Objective"][@color="red"]')[$rnd]->attributes()['color'] = strtolower($arrColors[$i]);
}
}
}
}
<file_sep>/core/actions/LocalActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* LocalActions
* @author Hugues
* @since 1.04.00
* @version 1.04.00
*/
class LocalActions extends GlobalActions implements ConstantsInterface
{
/**
* Class Constructor
*/
public function __construct()
{
}
/**
* Retourne une chaine json
* @param string $msg
* @param string $id
* @param boolean $directReturn
* @return string
*/
protected function jsonString($msg, $id, $directReturn)
{
$content = '"'.$id.'":'.json_encode($msg);
return ($directReturn ? '{'.$content.'}' : $content);
}
/**
* @return bool
*/
public static function isAdmin()
{ return current_user_can('manage_options'); }
}
<file_sep>/core/domain/Level.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Level
* @author Hugues.
* @version 1.0.00
* @since 1.0.00
*/
class Level extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* nom du niveau de Difficulté
* @var string $name
*/
protected $name;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return string
*/
public function getName()
{ return $this->name; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id = $id; }
/**
* @param string $name
*/
public function setName($name)
{ $this->name = $name; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Level'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Level
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Level(), self::getClassVars(), $row); }
/**
* @return LevelBean
*/
public function getBean()
{ return new LevelBean($this); }
}
<file_sep>/core/domain/EquipmentWeaponProfile.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe EquipmentWeaponProfile
* @author Hugues.
* @since 1.0.00
* @version 1.04.28
*/
class EquipmentWeaponProfile extends LocalDomain
{
/**
* Id technique de la jointure
* @var int $id
*/
protected $id;
/**
* Id technique de la carte Equipement
* @var int $equipmentCardId
*/
protected $equipmentCardId;
/**
* Id technique du profil de l'arme
* @var int $weaponProfileId
*/
protected $weaponProfileId;
/**
* L'arme est-elle bruyante
* @var int $noisy
*/
protected $noisy;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @ return int
*/
public function getEquipmentCardId()
{ return $this->equipmentCardId; }
/**
* @ return int
*/
public function getWeaponProfileId()
{ return $this->weaponProfileId; }
/**
* @return int
*/
public function isNoisy()
{ return $this->noisy; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $equipmentCardId
*/
public function setEquipmentCardId($equipmentCardId)
{ $this->equipmentCardId = $equipmentCardId; }
/**
* @param int $weaponProfileId
*/
public function setWeaponProfileId($weaponProfileId)
{ $this->weaponProfileId = $weaponProfileId; }
/**
* @param int $noisy
*/
public function setNoisy($noisy)
{ $this->noisy = $noisy; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('EquipmentWeaponProfile'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return EquipmentWeaponProfile
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new EquipmentWeaponProfile(), self::getClassVars(), $row); }
/**
* @return WeaponProfile
*/
public function getWeaponProfile()
{
if ($this->WeaponProfile==null) {
$this->WeaponProfile = $this->WeaponProfileServices->select(__FILE__, __LINE__, $this->weaponProfileId);
}
return $this->WeaponProfile;
}
}
<file_sep>/core/services/ObjectiveServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe ObjectiveServices
* @author Hugues.
* @since 1.04.08
* @version 1.04.27
*/
class ObjectiveServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var RuleDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new ObjectiveDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_CODE));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_DESCRIPTION));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getObjectivesWithFilters($arrFilters=array(), $orderby=self::FIELD_CODE, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
/**
* @param string $value
* @param string $prefix
* @param string $classe
* @param bool $multiple
* @param string $defaultLabel
* @return string
*/
public function getObjectiveSelect($value='', $prefix='id', $classe='form-control', $multiple=false, $defaultLabel='---')
{
$Objectives = $this->getObjectivesWithFilters();
$arrSetLabels = array();
foreach ($Objectives as $Objective) {
$arrSetLabels[$Objective->getId()] = $Objective->getCode();
}
$this->labelDefault = $defaultLabel;
$this->classe = $classe;
$this->multiple = $multiple;
return $this->getSetSelect($file, $line, $arrSetLabels, $prefix.'objectiveId', $value);
}
}
<file_sep>/core/bean/UtilitiesBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe UtilitiesBean
* @author Hugues
* @since 1.02.00
* @version 1.05.09
*/
class UtilitiesBean implements ConstantsInterface
{
/**
* @param string $balise
* @param string $label
* @param array $attributes
* @return string
*/
public function getBalise($balise, $label='', $attributes=array())
{ return '<'.$balise.$this->getExtraAttributesString($attributes).'>'.$label.'</'.$balise.'>'; }
/**
* @param array $attributes
* @return array
*/
private function getExtraAttributesString($attributes)
{
$extraAttributes = '';
if (!empty($attributes)) {
foreach ($attributes as $key => $value) {
$extraAttributes .= ' '.$key.'="'.$value.'"';
}
}
return $extraAttributes;
}
/**
* @param array $attributes
* @return string
*/
protected function getIcon($attributes=array())
{ return $this->getBalise(self::TAG_I, '', $attributes); }
/**
* @return string
*/
protected function getIconFarCheckSquare()
{ return $this->getIcon(array(self::ATTR_CLASS=>'far fa-check-square')); }
/**
* @return string
*/
protected function getIconFarSquare()
{ return $this->getIcon(array(self::ATTR_CLASS=>'far fa-square')); }
/**
* @return string
*/
protected function getIconFarSquarePointer()
{ return $this->getIcon(array(self::ATTR_CLASS=>'far fa-square pointer')); }
/**
* @return string
*/
protected function getIconFarWindowClose()
{ return $this->getIcon(array(self::ATTR_CLASS=>'far fa-window-close')); }
/**
* @param string $urlTemplate
* @param array $args
* @return string
*/
public function getRender($urlTemplate, $args)
{
$pattern = "/web\/pages\/(admin|public)\/[fragments\/]?/";
return (preg_match($pattern, $urlTemplate) ? vsprintf(file_get_contents(PLUGIN_PATH.$urlTemplate), $args) : '');
}
}
<file_sep>/core/daoimpl/MissionDaoImpl.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionDaoImpl
* @author Hugues.
* @since 1.0.00
* @version 1.04.30
*/
class MissionDaoImpl extends LocalDaoImpl
{
/**
* Class constructor
*/
public function __construct()
{ parent::__construct('Mission'); }
/**
* @param array $rows
* @return array
*/
protected function convertToArray($rows)
{
$Items = array();
if (!empty($rows)) {
foreach ($rows as $row) {
$Items[] = Mission::convertElement($row);
}
}
return $Items;
}
/**
* @param string $file
* @param int $line
* @param array $arrParams
* @return array|Mission
*/
public function select($file, $line, $arrParams)
{ return parent::localSelect($file, $line, $arrParams, new Mission()); }
/**
* @param string $file
* @param int $line
* @param array $arrParams
* @param array $filters
* @return array
*/
public function selectEntriesWithFiltersIn($file, $line, $arrParams, $filters)
{
// On s'appuie sur la requête de base.
$requete = $this->selectRequest.$this->fromRequest;
// On doit faire une jointure externe pour lier la table mission_expansion si on cherche sur ce critère
if (isset($filters[self::FIELD_EXPANSIONID])) {
$requete .= 'INNER JOIN wp_11_zombicide_mission_expansion me ON m.id=me.missionId ';
}
// On passe ensuite aux critères de sélection.
$requete .= 'WHERE 1=1 ';
// Contrainte sur la difficulté
if (isset($filters[self::FIELD_LEVELID])) {
$requete .= 'AND levelId IN ('.$filters[self::FIELD_LEVELID].') ';
}
// Contrainte sur la durée
if (isset($filters[self::FIELD_DURATIONID])) {
$requete .= 'AND durationId IN ('.$filters[self::FIELD_DURATIONID].') ';
}
// Contrainte sur le nombre de joueurs
if (isset($filters[self::FIELD_PLAYERID])) {
$requete .= 'AND playerId IN ('.implode(',', $filters[self::FIELD_PLAYERID]).') ';
}
// Contrainte sur l'origine
if (isset($filters[self::FIELD_ORIGINEID])) {
$requete .= 'AND origineId IN ('.implode(',', $filters[self::FIELD_ORIGINEID]).') ';
}
// Contrainte sur l'extension
if (isset($filters[self::FIELD_EXPANSIONID])) {
if (strpos($filters[self::FIELD_EXPANSIONID], ',')===false) {
$requete .= 'AND expansionId = '.$filters[self::FIELD_EXPANSIONID].' ';
} else {
$requete .= 'AND expansionId IN ('.$filters[self::FIELD_EXPANSIONID].') ';
}
}
// On peut aussi trier
$requete .= $this->orderBy;
// Et retourner le tableau de résultats.
return $this->convertToArray($this->selectEntriesAndLogQuery($file, $line, $requete, $arrParams));
}
public function selectMissionsByExpansionId($expansionId)
{
$request = "SELECT DISTINCT zm.id FROM `wp_11_zombicide_tile` zt ";
$request .= "INNER JOIN wp_11_zombicide_mission_tile zmt ON zmt.tileId = zt.id ";
$request .= "INNER JOIN wp_11_zombicide_mission zm ON zmt.missionId = zm.id ";
$request .= "WHERE zt.expansionId='$expansionId' ";
$request .= "ORDER BY zm.title ASC;";
return MySQL::wpdbSelect($request);
}
}
<file_sep>/core/services/DurationServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe DurationServices
* @author Hugues.
* @since 1.04.16
* @version 1.04.27
*/
class DurationServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var DurationDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new DurationDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_MINDURATION));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_MAXDURATION));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getDurationsWithFilters($arrFilters=array(), $orderby=self::FIELD_MINDURATION, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
/**
* @param int $id
* @return Duration
*/
public function selectDuration($id)
{ return $this->select(__FILE__, __LINE__, $id); }
}
<file_sep>/core/services/EquipmentWeaponProfileServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe EquipmentWeaponProfileServices
* @author Hugues.
* @since 1.0.00
* @version 1.04.27
*/
class EquipmentWeaponProfileServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var EquipmentWeaponProfileDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new EquipmentWeaponProfileDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_EQUIPMENTCARDID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_WEAPONPROFILEID));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getEquipmentWeaponProfilesWithFilters($arrFilters=array(), $orderby='id', $order='asc')
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
}
<file_sep>/core/services/SpawnTypeServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SpawnTypeServices
* @author Hugues.
* @since 1.04.27
* @version 1.04.27
*/
class SpawnTypeServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var SpawnTypeDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new SpawnTypeDaoImpl();
}
}
<file_sep>/core/domain/Mission.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Mission
* @author Hugues.
* @since 1.04.00
* @version 1.05.14
*/
class Mission extends WpPostRelais
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Titre de la Mission
* @var string $title
*/
protected $title;
/**
* Code de la donnée
* @var string $code
*/
protected $code;
/**
* Id de la difficulté de la Mission
* @var int $levelId
*/
protected $levelId;
/**
* Id du nb de joueurs de la Mission
* @var int $playerId
*/
protected $playerId;
/**
* Id de la durée de la Mission
* @var int $durationId
*/
protected $durationId;
/**
* Id de l'origine de la Mission
* @var int $origineId
*/
protected $origineId;
/**
* Nombre de dalles en largeur
* @var int $width
*/
protected $width;
/**
* Nombre de dalles en hauteur
* @var int $height
*/
protected $height;
/**
* La mission a-t-elle était publiée ?
* @var int $published
*/
protected $published;
/**
* La mission peut-elle être jouée en ligne ?
* @var int $liveAble
*/
protected $liveAble;
/**
* @return int
*/
public function getId()
{return $this->id; }
/**
* @return string
*/
public function getTitle()
{ return $this->title; }
/**
* @return string
*/
public function getCode()
{ return $this->code; }
/**
* @return int
*/
public function getLevelId()
{ return $this->levelId; }
/**
* @return int
*/
public function getPlayerId()
{ return $this->playerId; }
/**
* @return int
*/
public function getDurationId()
{ return $this->durationId; }
/**
* @return int
*/
public function getOrigineId()
{ return $this->origineId; }
/**
* @return int
*/
public function getWidth()
{ return $this->width; }
/**
* @return int
*/
public function getHeight()
{ return $this->height; }
/**
* @return boolean
*/
public function isPublished()
{ return ($this->published==1); }
/**
* @return boolean
*/
public function isLiveAble()
{ return ($this->liveAble==1); }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param string $title
*/
public function setTitle($title)
{ $this->title=$title; }
/**
* @param string $code
*/
public function setCode($code)
{ $this->code=$code; }
/**
* @param int $levelId
*/
public function setLevelId($levelId)
{ $this->levelId=$levelId; }
/**
* @param int $playerId
*/
public function setPlayerId($playerId)
{ $this->playerId=$playerId; }
/**
* @param int $durationId
*/
public function setDurationId($durationId)
{ $this->durationId=$durationId; }
/**
* @param int $origineId
*/
public function setOrigineId($origineId)
{ $this->origineId=$origineId; }
/**
* @param int $width
*/
public function setWidth($width)
{ $this->width=$width; }
/**
* @param int $height
*/
public function setHeight($height)
{ $this->height=$height; }
/**
* @param boolean $published
*/
public function setPublished($published)
{ $this->published=$published; }
/**
* @param boolean $liveAble
*/
public function setLiveAble($liveAble)
{ $this->liveAble=$liveAble; }
///////////////////////////////////////////////////////////////
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Mission'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Mission
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Mission(), self::getClassVars(), $row); }
/**
* @return MissionBean
*/
public function getBean()
{ return new MissionBean($this); }
///////////////////////////////////////////////////////////////
/**
* @return string
*/
public function getWpPost()
{ return $this->getMainWpPost(self::FIELD_CODE, $this->code, self::WP_CAT_MISSION_ID); }
/**
* @param string $orderBy
* @param string $order
* @return array MissionTile
*/
public function getMissionTiles($orderBy='id', $order='asc')
{
if ($this->MissionTiles == null) {
$arrFilters = array(self::FIELD_MISSIONID=>$this->id);
$this->MissionTiles = $this->MissionTileServices->getMissionTilesWithFilters($arrFilters, $orderBy, $order);
}
return $this->MissionTiles;
}
/**
* @return array MissionZone
*/
public function getMissionZones()
{
if ($this->MissionZones == null) {
$arrFilters = array(self::CST_MISSIONID=>$this->id);
$this->MissionZones = $this->MissionZoneServices->getMissionZonesWithFilters(__FILE__, __LINE__, $arrFilters);
}
return $this->MissionZones;
}
/**
* @return array MissionRule
*/
public function getMissionRules($orderBy='id')
{
if ($this->MissionRules == null) {
$arrFilters = array(self::FIELD_MISSIONID=>$this->id);
$this->MissionRules = $this->MissionRuleServices->getMissionRulesWithFilters($arrFilters, $orderBy);
}
return $this->MissionRules;
}
/**
* @return array MissionObjective
*/
public function getMissionObjectives($oBy='id')
{
if ($this->MissionObjectives == null) {
$arrF = array(self::FIELD_MISSIONID=>$this->id);
$this->MissionObjectives = $this->MissionObjectiveServices->getMissionObjectivesWithFilters($arrF, $oBy);
}
return $this->MissionObjectives;
}
/**
* @return array MissionExpansion
*/
public function getMissionExpansions()
{
if ($this->MissionExpansions == null) {
$arrFilters = array(self::FIELD_MISSIONID=>$this->id);
$this->MissionExpansions = $this->MissionExpansionServices->getMissionExpansionsWithFilters($arrFilters);
}
return $this->MissionExpansions;
}
/**
* @return Duration
*/
public function getDuration()
{
if ($this->Duration == null) {
$this->Duration = $this->DurationServices->selectDuration($this->durationId);
}
return $this->Duration;
}
/**
* @return Level
*/
public function getLevel()
{
if ($this->Level == null) {
$this->Level = $this->LevelServices->selectLevel($this->levelId);
}
return $this->Level;
}
/**
* @return Origine
*/
public function getOrigine()
{
if ($this->Origine==null) {
$this->Origine = $this->OrigineServices->selectOrigine($this->origineId);
}
return $this->Origine;
}
/**
* @return Player
*/
public function getPlayer()
{
if ($this->Player == null) {
$this->Player = $this->PlayerServices->selectPlayer($this->playerId);
}
return $this->Player;
}
public function getImgUrl()
{ return '/wp-content/plugins/hj-zombicide/web/rsc/img/missions/'.$this->getCode().'-Mission.png'; }
public function getThumbUrl()
{ return '/wp-content/plugins/hj-zombicide/web/rsc/img/missions/'.$this->getCode().'-Thumb.png'; }
/**
* @return string
*/
public function getStrRules()
{
$MissionRules = $this->getMissionRules(self::FIELD_TITLE);
$strList = '';
if (!empty($MissionRules)) {
foreach ($MissionRules as $MissionRule) {
$strList .= ($strList!='' ? '<br>' : '');
$strList .= '<span class="objRule">'.$MissionRule->getTitle().' <span class="tooltip">';
$strList .= '<header>'.$MissionRule->getRuleCode().'</header>';
$strList .= '<content>'.$MissionRule->getRuleDescription().'</content></span></span> ';
}
}
return $strList;
}
/**
* @return string
*/
public function getStrObjectives()
{
$MissionObjectives = $this->getMissionObjectives(self::FIELD_TITLE);
$strList = '';
if (!empty($MissionObjectives)) {
foreach ($MissionObjectives as $MissionObjective) {
$strList .= ($strList!='' ? '<br>' : '');
$strList .= '<span class="objRule">'.$MissionObjective->getTitle().' <span class="tooltip">';
$strList .= '<header>'.$MissionObjective->getObjectiveCode().'</header>';
$strList .= '<content>'.$MissionObjective->getObjectiveDescription().'</content></span></span> ';
}
}
return $strList;
}
/**
* @return string
*/
public function getStrTiles()
{
$MissionTiles = $this->getMissionTiles();
$strName = '';
while (!empty($MissionTiles)) {
$MissionTile = array_shift($MissionTiles);
if ($strName!='') {
$strName .= ', ';
}
$strName .= $MissionTile->getTile()->getCode();
}
return $strName;
}
/**
* @return string
*/
public function getStrExpansions()
{
$MissionExpansions = $this->getMissionExpansions();
$strName = '';
if (!empty($MissionExpansions)) {
foreach ($MissionExpansions as $MissionExpansion) {
if ($strName!='') {
$strName .= ', ';
}
$strName .= $MissionExpansion->getExpansion()->getName();
}
}
return $strName;
}
/**
* @return string
*/
public function getStrDifPlaDur()
{ return $this->getStrDifficulty().' / '.$this->getStrNbJoueurs().' / '.$this->getStrDuree(); }
/**
* @return string
*/
public function getStrDuree()
{ return $this->getDuration()->getStrDuree(); }
/**
* @return string
*/
public function getStrDifficulty()
{ return $this->getLevel()->getName(); }
/**
* @return string
*/
public function getStrNbJoueurs()
{ return $this->getPlayer()->getNbJoueurs(); }
/**
* @return string
*/
public function getStrOrigine()
{ return $this->getOrigine()->getName(); }
/**
* @param bool $isHome
* @return string
*/
public function getStrClassFilters($isHome)
{
$strClassFilters ='';
$strClassFilters = 'player-'.$this->playerId.' ';
$strClassFilters .= 'duration-'.$this->durationId.' ';
$strClassFilters .= 'level-'.$this->levelId.' ';
return $strClassFilters.' col-12 col-sm-6 col-md-4';
}
/**
* @param int $x
* @param int $y
* @return MissionTile
*/
public function getMissionTile($x, $y)
{
$MissionTiles = $this->getMissionTiles();
if (!empty($MissionTiles)) {
foreach ($MissionTiles as $MissionTile) {
if ($MissionTile->getCoordX()==$x && $MissionTile->getCoordY()==$y) {
return $MissionTile;
}
}
}
return new MissionTile();
}
/**
* @param int $x
* @param int $y
* @return int
*/
public function getTileId($x, $y)
{ return $this->getMissionTile($x, $y)->getTileId(); }
/**
* @param int $x
* @param int $y
* @return string
*/
public function getTileCode($x, $y)
{ return $this->getMissionTile($x, $y)->getTileCode(); }
/**
* @param int $x
* @param int $y
* @return string
*/
public function getTileCodeAndOrientation($x, $y)
{ return $this->getTileCode($x, $y).'-'.$this->getTileOrientation($x, $y); }
/**
* @param int $x
* @param int $y
* @return string
*/
public function getTileOrientation($x, $y)
{ return $this->getMissionTile($x, $y)->getOrientation(); }
/**
* @param array $MissionExpansions
*/
public function setMissionExpansions($MissionExpansions)
{ $this->MissionExpansions = $MissionExpansions; }
/**
* @param array $post
* @return bool
*/
public function updateWithPost($post)
{
$doUpdate = false;
$arr = array(self::FIELD_TITLE, self::FIELD_CODE, self::CST_LEVELID, self::CST_DURATIONID, self::CST_PLAYERID, self::CST_ORIGINEID);
while (!empty($arr)) {
$key = array_shift($arr);
$value = stripslashes($post[$key]);
if ($this->{$key} != $value) {
$doUpdate = true;
$this->{$key} = $value;
}
}
return $doUpdate;
}
/**
* @param array $post
* @return bool
*/
public function initWithPost($post)
{
$doInsert = true;
$arr = array(self::FIELD_TITLE, self::FIELD_CODE, self::CST_LEVELID, self::CST_DURATIONID, self::CST_PLAYERID, self::CST_ORIGINEID);
while (!empty($arr)) {
$key = array_shift($arr);
if ($post[$key] == '') {
$doInsert = false;
} else {
$this->{$key} = stripslashes($post[$key]);
}
}
return $doInsert;
}
/**
* @param int $ruleId
* @return boolean
*/
public function hasRule($ruleId)
{
$hasRule = false;
$MissionRules = $this->getMissionRules();
while (!empty($MissionRules)) {
$MissionRule = array_shift($MissionRules);
if ($MissionRule->getRuleId()==$ruleId) {
$hasRule = true;
}
}
return $hasRule;
}
/**
* @return int
*/
public function getStartingMissionZoneId()
{
return 14;
}
/**
* @param Live $Live
* @param array $LiveSurvivors
*/
public function addStandardStartingEquipment($Live, $LiveSurvivors)
{
shuffle($LiveSurvivors);
// On va checker les éventuelles règles qui perturbent cette distribution.
if ($this->hasRule(2)) {
// On ne distribue que jusqu'à 3 Poêles aux Survivants. id de la Poêle : 27
$cpt=0;
while (!empty($LiveSurvivors) && $cpt<3) {
$LiveSurvivor = array_shift($LiveSurvivors);
$args = array(self::CST_LIVESURVIVORID=>$LiveSurvivor->getId());
$EquipmentLiveDecks = $this->EquipmentLiveDeckServices->getEquipmentLiveDecksWithFilters(__FILE__, __LINE__, $args);
$rk = count($EquipmentLiveDecks);
$args = array(
self::CST_LIVEID=>$Live->getId(),
self::CST_EQUIPMENTCARDID=>27,
'rank'=>$rk,
self::CST_STATUS=>'E',
self::CST_LIVESURVIVORID=>$LiveSurvivor->getId()
);
$EquipmentLiveDeck = new EquipmentLiveDeck($args);
$this->EquipmentLiveDeckServices->insert(__FILE__, __LINE__, $EquipmentLiveDeck);
$cpt++;
}
} else {
// On vérifie l'extension rattachée à la Mission et en fonction on donnera du matériel.
}
}
}
<file_sep>/core/actions/ToolActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* ExpansionActions
* @author Hugues
* @since 1.05.09
* @version 1.08.01
*/
class ToolActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct($post=array())
{
parent::__construct();
$this->post = $post;
$this->TileServices = new TileServices();
$this->debug = '';
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new ToolActions($post);
if ($post[self::CST_AJAXACTION]==self::AJAX_GETTHROWDICE) {
$returned = $Act->dealWithThrowDice();
} elseif ($post[self::CST_AJAXACTION]==self::AJAX_GETRANDOMMAP) {
$returned = $Act->dealWithRandomMap();
} elseif ($post[self::CST_AJAXACTION]=='getBuildingMap') {
$returned = $Act->dealWithBuildingMap();
} elseif ($post[self::CST_AJAXACTION]=='getNonUsedTiles') {
$returned = $Act->dealWithNonUsedTiles();
} elseif ($post[self::CST_AJAXACTION]=='getEmptyCell') {
$returned = $Act->getEmptyCell();
} elseif ($post[self::CST_AJAXACTION]=='getImageMap') {
$returned = $Act->getImageMap();
} else {
$returned = 'Erreur dans ToolActions > dealWithStatic, '.$_POST[self::CST_AJAXACTION].' inconnu.';
}
return $returned;
}
public function getImageMap()
{
$this->initPost();
$urlTarget = '/wp-content/plugins/hj-zombicide/web/rsc/img/maps/'.date('Y-m-d_H:i:s').'-map.png';
$targetPath = getcwd().'/..'.$urlTarget;
$im = @imagecreatetruecolor(500*$this->width, 500*$this->height);
for ($row=0; $row<$this->height; $row++) {
for ($col=0; $col<$this->width; $col++) {
if (isset($this->lockedMapTiles[$row][$col])) {
$MapTile = $this->lockedMapTiles[$row][$col];
$srcFile = getcwd().'/../wp-content/plugins/hj-zombicide/web/rsc/img/tiles/'.$MapTile->getTile()->getCode().'-500px.png';
$imgTile = @imagecreatefrompng($srcFile);
switch ($MapTile->getOrientation()) {
case 'left' :
$angle = 90;
break;
case 'bottom' :
$angle = 180;
break;
case 'right' :
$angle = 270;
break;
default :
$angle = 0;
break;
}
$rotate = imagerotate($imgTile, $angle, 0);
imagecopymerge($im, $rotate, 500*$col, 500*$row, 0, 0, 500, 500, 100);
}
}
}
imagepng($im, $targetPath);
imagedestroy($im);
return $urlTarget;
}
private function getStrEmptyCell()
{
$result = '<div class="cell hidden cellModel"><img alt="Non définie">';
$result .= '<nav class="hoverActions nav nav-fill nav-pills"><i class="nav-item far fa-check-square fakeCb" data-cell="cell_0_0"></i>';
$result .= '<i class="nav-item fas fa-unlock fakeLock" data-lock="cell_0_0"></i><i class="nav-item fas fa-cog"></i>';
return $result.'<i class="nav-item fas fa-undo"></i><i class="nav-item fas fa-redo"></i></nav></div>';
}
public function getEmptyCell()
{ return $this->jsonString($this->getStrEmptyCell(), 'empty-cell', true); }
private function initPost()
{
//////////////////////////////////////////////////////////////////////
// Initialisation des variables
$params = $this->post['params'];
$arrParams = explode('&', $params);
while (!empty($arrParams)) {
$param = array_shift($arrParams);
list($key, $value) = explode('=', $param);
$this->{$key} = $value;
}
$this->expansionIds = str_replace('set-', '', $this->expansionIds);
$this->cellIds = explode(',', $this->cells);
// On initialise la MapTile.
$this->lockedMapTiles = array();
$this->lockedOCodes = array();
$locks = explode(',', $this->locks);
while (!empty($locks)) {
$lock = array_shift($locks);
list($label, $row, $col, $code, $orientation) = explode('_', $lock);
$Tiles = $this->TileServices->getTilesWithFilters(array(self::FIELD_CODE=>$code));
$Tile = array_shift($Tiles);
$MapTile = new MapTile($Tile, $orientation, true);
$this->lockedMapTiles[$row][$col] = $MapTile;
$this->debug .= 'On a une Tile : '.$Tile->getCode().'<br>';
array_push($this->lockedOCodes, $Tile->getCode());
array_push($this->lockedOCodes, $Tile->getOCode());
}
}
public function dealWithNonUsedTiles()
{
$this->initPost();
// L'actuelle :
list($label, $row, $col, $code, $orientation) = explode('_', $this->current);
$Tiles = $this->TileServices->getTilesWithFilters(array(self::FIELD_CODE=>$code));
$Tile = array_shift($Tiles);
$code = $Tile->getCode();
$oCode = $Tile->getOCode();
$arrTmp = array_keys($this->lockedOCodes, $code);
unset($this->lockedOCodes[$arrTmp[0]]);
$arrTmp = array_keys($this->lockedOCodes, $oCode);
unset($this->lockedOCodes[$arrTmp[0]]);
$Bean = new LocalBean();
//////////////////////////////////////////////////////////////////////
// On récupère les Dalles que l'on veut pouvoir utiliser..
$Tiles = $this->TileServices->getTilesWithFiltersIn(array(self::FIELD_EXPANSIONID=>$this->expansionIds));
$lstTiles = '';
while (!empty($Tiles)) {
$Tile = array_shift($Tiles);
if (in_array($Tile->getCode(), $this->lockedOCodes)) {
continue;
}
// On défini les paramètres de base des différentes balises.
$argsDiv = array(
self::ATTR_CLASS => 'cell',
'style' => 'display: inline-block;',
);
$args = array(
'data-row' => $row,
'data-col' => $col,
'data-src' => $Tile->getImgUrl(),
'data-code' => $Tile->getCode(),
'src' => $Tile->getImgUrl(),
);
// On affiche les 4 positions
for ($i=0; $i<4; $i++) {
$this->getNextOrientation();
$MapTile = new MapTile($Tile, $this->orientation);
$isCompatible = $MapTile->isCompatibleV2($this->lockedMapTiles, $row, $col);
if ($isCompatible) {
$args['data-orientation'] = $this->orientation;
$args[self::ATTR_CLASS] = $this->orientation;
$lstTiles .= $Bean->getBalise(self::TAG_DIV, $Bean->getBalise(self::TAG_IMG, '', $args), $argsDiv);
}
}
}
$result = '<section class="displayMap proposals" style="width:500px;">'.$lstTiles.'</section>';
return $this->jsonString($result, self::PAGE_GENERATION_MAP, true);
}
public function dealWithBuildingMap()
{
$this->initPost();
unset($this->version);
$this->post['params'] = str_replace('&version=2', '', $this->post['params']);
$this->dealWithRandomMap();
//////////////////////////////////////////////////////////////////////
$result = '<div class="overlay"><div class="spinner"></div></div>';
$result .= $this->getStrEmptyCell();
$result .= '<section class="displayMap travaux map'.$this->width.'x'.$this->height.' mapWidth'.$this->width.' mb-2">';
for ($i=0; $i<$this->height; $i++) {
$result .= '<div class="row">';
for ($j=0; $j<$this->width; $j++) {
$result .= '<div class="cell">';
if (!in_array('cell_'.$i.'_'.$j, $this->cellIds)) {
if (isset($this->MapTiles[$i][$j])) {
$MapTile = $this->MapTiles[$i][$j];
$Tile = $MapTile->getTile();
$orientation = $MapTile->getOrientation();
$isLocked = $MapTile->isLocked();
$result .= '<img class="'.$orientation.'" src="'.$Tile->getImgUrl().'">';
$dataLock = 'cell_'.$i.'_'.$j.'_'.$Tile->getCode().'_'.$orientation;
} else {
$result .= '<img alt="Non définie"/>';
$dataLock = '';
}
$htmlContent = '<i class="nav-item far fa-check-square fakeCb" data-cell="cell_'.$i.'_'.$j.'"></i>';
$htmlContent .= '<i class="nav-item fas fa-'.($isLocked ? '' : 'un').'lock fakeLock" data-lock="'.$dataLock.'"></i>';
} else {
$result .= '<img alt="Non sélectionnée"/>';
$htmlContent = '<i class="nav-item far fa-square fakeCb" data-cell="cell_'.$i.'_'.$j.'"></i>';
$htmlContent .= '<i class="nav-item fas fa-unlock fakeLock" data-lock=""></i>';
}
$htmlContent .= '<i class="nav-item fas fa-cog"></i>';
$htmlContent .= '<i class="nav-item fas fa-undo"></i>';
$htmlContent .= '<i class="nav-item fas fa-redo"></i>';
$result .= '<nav class="hoverActions nav nav-fill nav-pills">'.$htmlContent.'</nav>';
$result .= '</div>';
}
$result .= '</div>';
}
$result .= '</section>';
//////////////////////////////////////////////////////////////////////////////////////////////////////
$result = '<section id="page-generation-map" class="row">'.$result.'</section>';
if (self::isAdmin()) {
$result .= $this->debug;
}
return $this->jsonString($result, self::PAGE_GENERATION_MAP, true);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Bloc de gestion de génération aléatoire de map
public function dealWithRandomMap()
{
//////////////////////////////////////////////////////////////////////
// Initialisation des variables
$params = $this->post['params'];
$arrParams = explode('&', $params);
while (!empty($arrParams)) {
$param = array_shift($arrParams);
list($key, $value) = explode('=', $param);
$this->{$key} = $value;
}
if (isset($this->version) && $this->version==2) {
return $this->dealWithBuildingMap();
}
$result = '<section class="displayMap map'.$this->width.'x'.$this->height.' mapWidth'.$this->width.' mb-2">';
$this->expansionIds = str_replace('set-', '', $this->expansionIds);
//////////////////////////////////////////////////////////////////////
// On récupère les Dalles que l'on veut pouvoir utiliser. Et on les mélange.
$Tiles = $this->TileServices->getTilesWithFiltersIn(array(self::FIELD_EXPANSIONID=>$this->expansionIds));
// Puis on construit la Map aléatoire.
$this->nbTestsGlobals = 0;
$this->maxTestGlobals = ($this->expansionIds=='' ? 1 : 500);
$content = '';
do {
$this->col = 0;
$this->row = 0;
$this->MapTiles = isset($this->lockedMapTiles) ? $this->lockedMapTiles : array();
$this->arrOCode = isset($this->lockedOCodes) ? $this->lockedOCodes : array();
shuffle($Tiles);
$this->pendingTiles = array();
$this->launchGeneration($Tiles);
} while (count($this->MapTiles)!=$this->width*$this->height && $this->nbTestsGlobals!=$this->maxTestGlobals && $this->row!=$this->height);
// Et on l'affiche.
$hasError = false;
$nbDalles = 0;
for ($i=0; $i<$this->height; $i++) {
for ($j=0; $j<$this->width; $j++) {
if (isset($this->MapTiles[$i][$j])) {
$MapTile = $this->MapTiles[$i][$j];
$Tile = $MapTile->getTile();
$orientation = $MapTile->getOrientation();
$result .= '<img class="'.$orientation.'" src="'.$Tile->getImgUrl().'">';
$this->debug .= '['.$Tile->getCode().'/'.$orientation.']';
$nbDalles++;
} else {
$hasError = true;
}
}
}
if ($hasError) {
$content = '<div class="alert alert-danger"><h4 class="alert-heading">Une erreur est survenue durant la génération</h4>';
$content .= 'Ca peut arriver sur de grandes maps ou lorsque le nombre de Dalles disponibles est très proche de la taille de la Map.<br>';
$content .= 'Si dans la ligne de debug ci-dessous, le nombre en face de "Tests Global" est <strong>'.$this->nbTestsGlobals.'</strong>, c\'est que vous avez atteint le nombre de tentatives. Réessayez !';
$content .= '<hr><p>'.date('H:i:s').' - Dimensions : LxH ['.$this->width.';'.$this->height.'] - Nb de Dalles : '.$nbDalles.', Rebut : '.count($this->pendingTiles).', Tests Global : '.$this->nbTestsGlobals.'</p></div>';
}
$result .= $content.'</section>';
$result = '<section id="page-generation-map" class="row">'.$result;
$result .= '</section>';
return $this->jsonString($result, self::PAGE_GENERATION_MAP, true);
}
private function launchGeneration($Tiles)
{
$this->nbTestsGlobals++;
$this->debug .= '-=-=-=-=- GLOBAL TEST ['.$this->nbTestsGlobals.'] -=-=-=-=-=-<br>';
$this->debug .= 'Row '.$this->row.' / Col '.$this->col.'<br>';
$this->nbTests = 0;
// Si on n'a plus de Dalles disponibles et si on a fait trop de tentatives, on quitte.
// Ou qu'on a rempli la Map
if (empty($Tiles) || $this->nbTestsGlobals==$this->maxTestGlobals || $this->row==$this->height) {
return;
}
// On prend la première Dalle
$Tile = array_shift($Tiles);
// Si le recto de cette Dalle a déjà utilisé dans la Map, on ne peut pas la reprendre.
if (in_array($Tile->getCode(), $this->arrOCode)) {
$this->debug .= 'Already used '.$Tile->getCode().'<br>';
// On passe à la Dalle suivante.
return $this->launchGeneration($Tiles);
}
do {
// On lui affecte une orientation
($this->nbTests==0 ? $this->getRandomOrientation() : $this->getNextOrientation());
// On défini la MapTile
if (isset($this->MapTiles[$this->row][$this->col])) {
$this->debug .= 'MapTile '.$this->row.';'.$this->col.' définie<br>';
$MapTile = $this->MapTiles[$this->row][$this->col];
} else {
$this->debug .= 'MapTile '.$this->row.';'.$this->col.' non définie<br>';
$MapTile = new MapTile($Tile, $this->orientation);
}
$isCompatible = true;
// On teste si elle est viable.
$this->debug .= 'Testing '.$Tile->getCode().'/'.$this->orientation.' ['.$this->nbTests.']<br>';
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Début des contrôles pour ajouter la Tile à la Map.
$isCompatible = $MapTile->isCompatibleV2($this->MapTiles, $this->row, $this->col);
$this->nbTests++;
if ($this->nbTests==4) {
break;
}
if ($isCompatible) {
break;
}
} while (true);
// Soit la DAlle est compatible dans la Map, soit on a essayé les 4 côtés et ça n'a rien donné.
if (!$isCompatible) {
// On n'a pas réussi à ajouter cette Dalle, elle ne peut pas être positionnée à cet endroit de la Map.
$this->debug .= 'Failing to add '.$Tile->getCode().'/'.$this->orientation.'<br>';
// On met la Dalle de côté.
array_push($this->pendingTiles, $Tile);
// On passe à la Dalle suivante.
return $this->launchGeneration($Tiles);
} else {
// On ajoute la Dalle à la liste
$this->debug .= 'Adding '.$Tile->getCode().'/'.$this->orientation.'<br>';
// On ajout le code du recto pour l'exclure du champ des possibilités.
array_push($this->arrOCode, $Tile->getOCode());
$this->MapTiles[$this->row][$this->col] = $MapTile;
// On met à jour les coordonnées pour la prochaine Dalle
$this->upColAndRow();
// On reprend les Dalles mises de côté et on les met à la fin.
$Tiles = array_merge($Tiles, $this->pendingTiles);
// On réinitialise la liste des mises de côté.
$this->pendingTiles = array();
// On recherche la Dalle suivante
return $this->launchGeneration($Tiles);
}
}
private function upColAndRow()
{
$this->col++;
if ($this->col==$this->width) {
$this->col=0;
$this->row++;
}
}
private function getNextOrientation()
{
switch ($this->orientation) {
case 'top' :
$this->orientation = 'right';
break;
case 'right' :
$this->orientation = 'bottom';
break;
case 'bottom' :
$this->orientation = 'left';
break;
case 'left' :
$this->orientation = 'top';
break;
default :
$this->getRandomOrientation();
break;
}
}
private function getRandomOrientation()
{
$arrOrientation = array('top', 'right', 'bottom', 'left');
shuffle($arrOrientation);
$this->orientation = array_shift($arrOrientation);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Bloc de gestion du lancer de dés
public function dealWithThrowDice()
{
$Bean = new UtilitiesBean();
$tag = self::TAG_SPAN;
$this->modif = 0;
$this->seuil = 4;
//////////////////////////////////////////////////////////////////////
// Initialisation des variables
$params = $this->post['params'];
$arrParams = explode('&', $params);
while (!empty($arrParams)) {
$param = array_shift($arrParams);
list($key, $value) = explode('=', $param);
$this->{$key} = $value;
}
// TODO : Prendre en compte "surunsix" et "dual".
// Si on a un nombre dans Barbare / Mode Automatique, on prend le plus gros score entre le nombre de dés de l'arme et le nombre d'acteurs dans al Zone.
$this->nbDice = max($this->nbDice, $this->barabauto);
$arrDice = array();
for ($i=0; $i<$this->nbDice; $i++) {
$dice = rand(1, 6);
if ($dice==1) {
$color = self::COLOR_RED;
$dice = min(6,max(1,$dice+$this->modif));
} else {
$dice = min(6,max(1,$dice+$this->modif));
if ($dice>=6) {
$color = self::COLOR_BLUE;
$this->nbDice += $this->surunsix;
} elseif ($dice>=$this->seuil) {
$color = self::COLOR_YELLOW;
} else {
$color = self::COLOR_ORANGE;
}
}
$attributes = array(
self::ATTR_CLASS => 'badge badge-'.$color.'-skill',
);
array_push($arrDice, $Bean->getBalise($tag, $dice, $attributes));
}
$result = '';
while (!empty($arrDice)) {
$num = array_shift($arrDice);
$result .= '['.$num.']';
}
$result = '<section id="page-piste-de-des">Tirage '.date('d-m-Y H:i:s').' : '.$result.'</section>';
return $this->jsonString($result, self::PAGE_PISTE_DE_DES, true);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
}
<file_sep>/core/bean/WpPageSurvivorsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageSurvivorsBean
* @author Hugues
* @since 1.04.00
* @version 1.07.19
*/
class WpPageSurvivorsBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-survivors.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->SkillServices = new SkillServices();
$this->SurvivorServices = new SurvivorServices();
$this->ExpansionServices = new ExpansionServices();
}
/**
* On vérifie si on est ici pour traiter la page des Survivants, ou un Survivant en particulier.
* Pour le cas d'un Survivant, on retourne une WpPostSurvivorBean.
* @return string
*/
public function getContentPage()
{
$this->setFilters();
return $this->getListContentPage();
}
/**
* @return string
*/
public function getListContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste des Survivants puis les éléments nécessaires à la pagination.
if (!$this->isSkillSearched()) {
$Survivors = $this->SurvivorServices->getSurvivorsWithFilters($this->arrFilters, $this->colSort, $this->colOrder);
} else {
$Survivors = $this->SurvivorServices->getSurvivorsWithFiltersIn($this->arrFilters, $this->colSort, $this->colOrder);
}
$this->nbElements = count($Survivors);
$this->nbPages = ceil($this->nbElements/$this->nbperpage);
// On slice la liste pour n'avoir que ceux à afficher
$displayedSurvivors = array_slice($Survivors, $this->nbperpage*($this->paged-1), $this->nbperpage);
// On construit le corps du tableau
$strBody = '';
if (!empty($displayedSurvivors)) {
foreach ($displayedSurvivors as $Survivor) {
$strBody .= $Survivor->getBean()->getRowForPublicPage();
}
}
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Affiche-t-on le filtre ?
$showFilters = isset($this->arrFilters[self::FIELD_NAME])&&$this->arrFilters[self::FIELD_NAME]!=''
|| isset($this->arrFilters[self::FIELD_EXPANSIONID])&&$this->arrFilters[self::FIELD_EXPANSIONID]!=''
|| $this->isSkillSearched();
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Les lignes du tableau - 1
$strBody,
// On affiche le dropdown par pages - 2
$this->getDropdownNbPerPages(),
// On affiche la pagination - 3
$this->getNavPagination(),
// Affiche ou non le bloc filtre - 4
$showFilters ? 'block' : 'none',
// Si le Nom est renseigné - 5
$this->arrFilters[self::FIELD_NAME],
// Liste des Extensions - 6
$this->getExpansionFilters($this->arrFilters[self::FIELD_EXPANSIONID]),
// Liste des Compétences bleues - 7
$this->getBeanSkillFilters(self::COLOR_BLUE, $this->arrFilters['blue-skillId']),
// Liste des Compétences jaunes - 8
$this->getBeanSkillFilters(self::COLOR_YELLOW, $this->arrFilters['yellow-skillId']),
// Liste des Compétences oranges - 9
$this->getBeanSkillFilters(self::COLOR_ORANGE, $this->arrFilters['orange-skillId']),
// Liste des Compétences rouges - 10
$this->getBeanSkillFilters(self::COLOR_RED, $this->arrFilters['red-skillId']),
);
return $this->getRender($this->urlTemplate, $args);
}
private function getExpansionFilters($expansionId='')
{ return parent::getBeanExpansionFilters($expansionId, self::FIELD_EXPANSIONID); }
/**
* @return string
*/
public function getRandomTeam($post)
{
// On récupère les paramètres nécessaires
$nbMax = $post['nbSurvSel'];
$arrValues = explode(',', $post['value']);
// On mélange les valeurs possibles
shuffle($arrValues);
$nb = 0;
$strReturned = '';
while (!empty($arrValues) && $nb<$nbMax) {
$value = array_shift($arrValues);
$Survivor = $this->SurvivorServices->selectSurvivor($value);
$strReturned .= $Survivor->getBean()->getRowForPublicPage();
$nb++;
}
return '<div id="page-selection-survivants">'.$strReturned.'</div>';
}
/**
* @param array $post
*/
public function setFilters($post=null)
{ parent::setBeanFilters($post, self::FIELD_NAME); }
private function isSkillSearched()
{
return isset($this->arrFilters[self::COLOR_BLUE.'-'.self::FIELD_SKILLID])&&$this->arrFilters[self::COLOR_BLUE.'-'.self::FIELD_SKILLID]!=''
|| isset($this->arrFilters[self::COLOR_YELLOW.'-'.self::FIELD_SKILLID])&&$this->arrFilters[self::COLOR_YELLOW.'-'.self::FIELD_SKILLID]!=''
|| isset($this->arrFilters[self::COLOR_ORANGE.'-'.self::FIELD_SKILLID])&&$this->arrFilters[self::COLOR_ORANGE.'-'.self::FIELD_SKILLID]!=''
|| isset($this->arrFilters[self::COLOR_RED.'-'.self::FIELD_SKILLID])&&$this->arrFilters[self::COLOR_RED.'-'.self::FIELD_SKILLID]!='';
}
}
<file_sep>/core/actions/SurvivorActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* SurvivorActions
* @author Hugues
* @since 1.04.00
* @version 1.07.19
*/
class SurvivorActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct($post=array())
{
parent::__construct();
$this->post = $post;
$this->ExpansionServices = new ExpansionServices();
$this->SurvivorServices = new SurvivorServices();
$this->WpPostServices = new WpPostServices();
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new SurvivorActions($post);
switch ($post[self::CST_AJAXACTION]) {
case self::AJAX_GETSURVIVORS :
$returned = $Act->dealWithGetSurvivors();
break;
case self::AJAX_GETRANDOMTEAM :
$returned = $Act->dealWithGetRandomTeam();
break;
case self::AJAX_SURVIVORVERIF :
$returned = $Act->dealWithSurvivorVerif(true);
break;
default :
$returned = '';
break;
}
return $returned;
}
/**
* Récupération du contenu de la page via une requête Ajax.
* @param array $post
* @return string
*/
public function dealWithGetSurvivors()
{
$Bean = new WpPageSurvivorsBean();
$Bean->setFilters($this->post);
return $this->jsonString($Bean->getListContentPage(), self::PAGE_SURVIVOR, true);
}
/**
* @return string;
*/
public function dealWithGetRandomTeam()
{
$Bean = new WpPageSurvivorsBean();
return $this->jsonString($Bean->getRandomTeam($this->post), self::PAGE_SELECT_SURVIVORS, true);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Bloc de gestion de vérifications des Survivants en Home Admin
/**
* @param boolean $isVerif
* @return string
*/
public function dealWithSurvivorVerif($isVerif=false)
{
// On récupère les articles de survivants
$args = array(
self::WP_CAT => self::WP_CAT_SURVIVOR_ID,
self::WP_TAXQUERY => array(),
self::WP_POSTSTATUS => self::WP_PUBLISH.', future',
);
$this->WpPostSurvivors = $this->WpPostServices->getArticles($args);
$nbWpPostSurvivors = count($this->WpPostSurvivors);
// Et les Survivants en base
$this->Survivors = $this->SurvivorServices->getSurvivorsWithFilters();
$nbSurvivors = count($this->Survivors);
if ($isVerif) {
$this->checkSurvivors();
$strBilan = $this->jsonString($this->strBilan, self::AJAX_SURVIVORVERIF, true);
} elseif ($nbWpPostSurvivors!=$nbSurvivors) {
$strBilan = "Le nombre d'articles ($nbWpPostSurvivors) ne correspond pas au nombre de Survivants en base ($nbSurvivors).";
$strBilan .= "<br>Une vérification est vivement conseillée.";
} else {
$strBilan = "Le nombre d'articles ($nbWpPostSurvivors) correspond au nombre de Survivants en base.";
}
return $strBilan;
}
private function insertSurvivor()
{
// Si on n'en a pas, on doit créer une entrée correspondante.
$Survivor = new Survivor();
$name = $this->WpPost->getPostTitle();
$Survivor->setName($name);
$description = $this->WpPost->getPostContent();
$description = substr($description, 25, -27);
$Survivor->setBackground($description);
$Survivor->setExpansionId($this->getExpansionId());
$arrProfiles = unserialize($this->WpPost->getPostMeta('profils'));
foreach ($arrProfiles as $value) {
switch ($value) {
case self::LBL_STANDARD :
$Survivor->setStandard(1);
break;
case self::LBL_ZOMBIVANT :
$Survivor->setZombivor(1);
break;
case self::LBL_ULTIMATE :
$Survivor->setUltimate(1);
break;
case self::LBL_ULTIMATEZOMBIVANT :
$Survivor->setUltimatez(1);
break;
default :
break;
}
}
$this->SurvivorServices->insertSurvivor($Survivor);
$this->strBilan .= '<br>Survivant créé en base : '.$name.'.';
}
private function checkSurvivors()
{
// On regarde les articles créés et on vérifie les données en base, si elles existent et si elles sont cohérentes entre elles.
while (!empty($this->WpPostSurvivors)) {
// On récupère le WpPost et ses données
$this->WpPost = array_shift($this->WpPostSurvivors);
$survivorId = $this->WpPost->getPostMeta(self::FIELD_SURVIVORID);
// On recherche un Survivant dans la base de données qui correspond.
$Survivor = $this->SurvivorServices->selectSurvivor($survivorId);
if ($Survivor->getId()=='') {
$this->insertSurvivor();
} else {
// Si on en a juste une, c'est tranquille.
$this->checkSurvivor($Survivor);
}
}
// Puis, on regarde les données en base et on vérifie que des articles ont été créés pour elles.
while (!empty($this->Survivors)) {
// On récupère l'extension.
$Survivor = array_shift($this->Survivors);
$args = array(
self::WP_METAKEY => self::FIELD_SURVIVORID,
self::WP_METAVALUE => $Survivor->getId(),
self::WP_TAXQUERY => array(),
self::WP_POSTSTATUS => 'publish, future',
self::WP_CAT => self::WP_CAT_SURVIVOR_ID,
);
$WpPost = $this->WpPostServices->getArticles($args);
if (empty($WpPost)) {
$this->strBilan .= '<br>Article à créer pour un Survivant : '.$Survivor->getName().' ['.$Survivor->toJson().'].';
}
}
if ($this->strBilan=='') {
$this->strBilan = 'Il semblerait que tout aille à la perfection. Aucune anomalie remontée.';
}
}
private function getExpansionId()
{
$postId = $this->WpPost->getPostMeta(self::FIELD_EXPANSIONID);
$Wp_post = get_post($postId);
$WpPost = WpPost::convertElement($Wp_post);
$codeExpansion = $WpPost->getPostMeta(self::FIELD_CODE);
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(self::FIELD_CODE=>$codeExpansion));
$Expansion = array_shift($Expansions);
return $Expansion->getId();
}
private function checkSurvivor($Survivor)
{
$this->Survivor = $Survivor;
// On initialise les données
$this->doUpdate = false;
$name = $this->WpPost->getPostTitle();
$background = $this->WpPost->getPostContent();
$expansionId = $this->getExpansionId();
$this->arrProfils = unserialize($this->WpPost->getPostMeta('profils'));
// On vérifie si la donnée en base correspond à l'article.
if ($this->Survivor->getName()!=$name) {
$this->Survivor->setName($name);
$this->doUpdate = true;
}
if ($this->Survivor->getExpansionId()!=$expansionId) {
$this->Survivor->setExpansionId($expansionId);
$this->strBilan .= '<br>Survivant mis à jour au niveau de l extension : '.$this->Survivor->getExpansionId().' - '.$expansionId.'.';
$this->doUpdate = true;
}
if ($this->Survivor->getBackground()!=$background && $background!='' ) {
$this->strBilan .= '<br>Background KO.';
$this->Survivor->setBackground($background);
$this->doUpdate = true;
}
$this->checkProfiles();
// Vérifications terminées, on fait la mise à jour si nécessaire.
if ($this->doUpdate) {
// Si nécessaire, on update en base.
$this->SurvivorServices->updateSurvivor($this->Survivor);
$this->strBilan .= '<br>Survivant mis à jour : '.$name.'.';
}
}
private function checkStandardProfile()
{
if (!$this->Survivor->isStandard() && in_array(self::LBL_STANDARD, $this->arrProfils)) {
$this->Survivor->setStandard(1);
$this->doUpdate = true;
} elseif ($this->Survivor->isStandard() && !in_array(self::LBL_STANDARD, $this->arrProfils)) {
$this->Survivor->setStandard(0);
$this->doUpdate = true;
}
}
private function checkZombivorProfile()
{
if (!$this->Survivor->isZombivor() && in_array(self::LBL_ZOMBIVANT, $this->arrProfils)) {
$this->Survivor->setZombivor(1);
$this->doUpdate = true;
} elseif ($this->Survivor->isZombivor() && !in_array(self::LBL_ZOMBIVANT, $this->arrProfils)) {
$this->Survivor->setZombivor(0);
$this->doUpdate = true;
}
}
private function checkProfiles()
{
if (isset($this->arrProfils)) {
// On vérifie le profil Standard
$this->checkStandardProfile();
// On vérifie le profil Zombivant
$this->checkZombivorProfile();
// On vérifie le profil Ultimate
if (!$this->Survivor->isUltimate() && in_array(self::LBL_ULTIMATE, $this->arrProfils)) {
$this->Survivor->setUltimate(1);
$this->doUpdate = true;
} elseif ($this->Survivor->isUltimate() && !in_array(self::LBL_ULTIMATE, $this->arrProfils)) {
$this->Survivor->setUltimate(0);
$this->doUpdate = true;
}
// On vérifie le profil Ultimate Zombivant
if (!$this->Survivor->isUltimatez() && in_array(self::LBL_ULTIMATEZOMBIVANT, $this->arrProfils)) {
$this->Survivor->setUltimatez(1);
$this->doUpdate = true;
} elseif ($this->Survivor->isUltimatez() && !in_array(self::LBL_ULTIMATEZOMBIVANT, $this->arrProfils)) {
$this->Survivor->setUltimatez(0);
$this->doUpdate = true;
}
}
}
// Fin du bloc relatif à la vérification des extensions sur la Home Admin.
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
}
<file_sep>/core/services/KeywordServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe KeywordServices
* @author Hugues.
* @since 1.0.00
* @version 1.04.28
*/
class KeywordServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var KeywordDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new KeywordDaoImpl();
}
/**
* @param int $id
* @return Keyword
*/
public function selectKeyword($id)
{ return $this->select(__FILE__, __LINE__, $id); }
}
<file_sep>/core/services/SurvivorSkillServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SurvivorSkillServices
* @author Hugues.
* @since 1.04.27
* @version 1.05.07
*/
class SurvivorSkillServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var SurvivorDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new SurvivorSkillDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_SURVIVORID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_SKILLID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_SURVIVORTYPEID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_TAGLEVELID));
}
/**
* @param string $file
* @param string $line
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getSurvivorSkillsWithFilters($arrFilters=array(), $orderby=null, $order=array(self::ORDER_ASC, self::ORDER_ASC))
{
if ($orderby==null) {
$orderby = array(self::FIELD_SURVIVORTYPEID, self::FIELD_TAGLEVELID);
}
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
public function deleteBulkSurvivorSkill($arrFilters)
{
$this->buildFilters($arrFilters);
return $this->Dao->deleteBulkEntriesWithFilters($this->arrParams);
}
public function insertSurvivorSkill($SurvivorSkill)
{ $this->insert(__FILE__, __LINE__, $SurvivorSkill); }
}
<file_sep>/core/domain/Keyword.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Keyword
* @author Hugues.
* @version 1.0.00
* @since 1.0.00
*/
class Keyword extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Nom du keyword
* @var string $name
*/
protected $name;
/**
* Description du keyword
* @var string $description
*/
protected $description;
/**
* @return int
*/
public function getId()
{return $this->id; }
/**
* @return string
*/
public function getName()
{ return $this->name; }
/**
* @return string
*/
public function getDescription()
{ return $this->description; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param string $name
*/
public function setName($name)
{ $this->name=$name; }
/**
* @param string $description
*/
public function setDescription($description)
{ $this->description=$description; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Keyword'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Keyword
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Keyword(), self::getClassVars(), $row); }
/**
* @return KeywordBean
*/
public function getBean()
{ return new KeywordBean($this); }
}
<file_sep>/core/bean/WpPageToolsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageToolsBean
* @author Hugues
* @since 1.04.24
* @version 1.05.09
*/
class WpPageToolsBean extends WpPageBean
{
protected $urlTemplatePriorityOrder = 'web/pages/public/wppage-ordre-de-priorite.php';
protected $urlTemplatePisteDes = 'web/pages/public/wppage-pistededes.php';
protected $urlTemplateRandomMap = 'web/pages/public/wppage-generationmap.php';
protected $urlTemplateSelSurv = 'web/pages/public/wppage-selectsurvivors.php';
protected $urlTemplateRandomMapV2 = 'web/pages/public/wppage-generationmap-v2.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->ExpansionServices = new ExpansionServices();
$this->SurvivorServices = new SurvivorServices();
}
/**
* @return string
*/
public function getSelectSurvivorsContent()
{
// Gestion du menu des Stand Alone et Extensions officielles
$arrExpansions = array(
'Saison 1' => array(23, 9, 10),
'Saison 2' => array(24, 11, 12),
'Saison 3' => array(25, 16, 17),
'Extensions' => array(4, 14, 19, 26),
);
$str = '';
$arrOfficiels = array();
foreach ($arrExpansions as $parent=>$arrChildren) {
$str .= '<div type="button" class="btn btn-dark btn-expansion-group"><span><i class="fa fa-chevron-circle-down"></i></span> '.$parent.'</div>';
while (!empty($arrChildren)) {
$childId = array_shift($arrChildren);
array_push($arrOfficiels, $childId);
$Expansion = $this->ExpansionServices->selectExpansion($childId);
$str .= $Expansion->getBean()->getButton(' btn-secondary hidden');
}
}
// Gestion des extensions fan-made
$strFanMade = '';
$Expansions = $this->ExpansionServices->getExpansionsWithFilters();
while (!empty($Expansions)) {
$Expansion = array_shift($Expansions);
$childId = $Expansion->getId();
if (in_array($childId, $arrOfficiels) || $Expansion->getNbSurvivants()==0) {
continue;
}
$strFanMade .= $Expansion->getBean()->getButton(' btn-secondary hidden');
}
// Gestion du menu des Survivants officiels
$Survivors = $this->SurvivorServices->getSurvivorsWithFilters();
$strSurvivors = '';
while (!empty($Survivors)) {
$Survivor = array_shift($Survivors);
$strSurvivors .= $Survivor->getBean()->getButton();
}
$args = array(
// Liste des Extensions Officielles et ses Survivants - 1
$str,
// Liste des cartouches de tous les Survivants - 2
$strSurvivors,
// Liste des extensions Fan-Made - 3
$strFanMade,
);
return $this->getRender($this->urlTemplateSelSurv, $args);
}
/**
* @return string
*/
public function getThrowSomeDiceContent()
{
$args = array(
'','','',
);
return $this->getRender($this->urlTemplatePisteDes, $args);
}
public function getPriorityOrderContent()
{
$args = array(
// Le titre de la page - 1
$this->WpPage->getPostTitle(),
// Le contenu de la page - 2
$this->WpPage->getPostContent(),
);
return $this->getRender($this->urlTemplatePriorityOrder, $args);
}
public function getRandomMapV2()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste de toutes les Extensions
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(), self::FIELD_DISPLAYRANK);
$strExpansions = '';
$strIcon = $this->getBalise(self::TAG_SPAN, $this->getBalise(self::TAG_I, '', array(self::ATTR_CLASS=>'far fa-square')));
while (!empty($Expansions)) {
$Expansion = array_shift($Expansions);
// Si l'extension n'a pas de dalles, on passe à l'extension suivante.
if ($Expansion->getNbDalles()==0) {
continue;
}
// On en profite aussi pour construire le bloc de filtres.
$attributes = array(
self::ATTR_TYPE => 'button',
self::ATTR_CLASS => 'btn btn-expansion btn-dark',
self::ATTR_DATA_EXPANSIONID => $Expansion->getId(),
'data-nb-dalles' => $Expansion->getNbDalles(),
);
$strExpansions .= $this->getBalise(self::TAG_DIV, $strIcon.' '.$Expansion->getName(), $attributes);
}
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// La liste des extensions - 1
$strExpansions
);
return $this->getRender($this->urlTemplateRandomMapV2, $args);
}
public function getRandomMap()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste de toutes les Extensions
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(), self::FIELD_DISPLAYRANK);
$strFilters = '';
$strSpawns = '';
while (!empty($Expansions)) {
$Expansion = array_shift($Expansions);
// Si l'extension n'a pas de dalles, on passe à l'extension suivante.
if ($Expansion->getNbDalles()==0) {
continue;
}
// On en profite aussi pour construire le bloc de filtres.
$strFilters .= $this->getBalise(self::TAG_OPTION, $Expansion->getName(), array(self::ATTR_VALUE => 'set-'.$Expansion->getId()));
}
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// La liste des extensions - 1
$strFilters
);
return $this->getRender($this->urlTemplateRandomMap, $args);
}
}
<file_sep>/core/domain/MissionTile.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionTile
* @author Hugues.
* @since 1.0.00
* @version 1.04.28
*/
class MissionTile extends LocalDomain
{
/**
* Id technique de la jointure
* @var int $id
*/
protected $id;
/**
* Id technique de la Mission
* @var int $missionId
*/
protected $missionId;
/**
* Id technique de la Tuile
* @var int $tileId
*/
protected $tileId;
/**
* Orientation de la Dalle sur la Mission
* @var string $orientation
*/
protected $orientation;
/**
* Coordonnées en abscisses
* @var int $coordX
*/
protected $coordX;
/**
* Coordonnée en ordonnées
* @var int $coordY
*/
protected $coordY;
/**
* @return int
*/
public function getId()
{return $this->id; }
/**
* @ return int
*/
public function getMissionId()
{ return $this->MissionId; }
/**
* @ return int
*/
public function getTileId()
{ return $this->tileId; }
/**
* @return string
*/
public function getOrientation()
{ return $this->orientation; }
/**
* @return int
*/
public function getCoordX()
{ return $this->coordX; }
/**
* @return int
*/
public function getCoordY()
{ return $this->coordY; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $missionId
*/
public function setMissionId($missionId)
{ $this->missionId = $missionId; }
/**
* @param int $tileId
*/
public function setTileId($tileId)
{ $this->tileId = $tileId; }
/**
* @param string $orientation
*/
public function setOrientation($orientation)
{ $this->orientation=$orientation; }
/**
* @param int $coordX
*/
public function setCoordX($coordX)
{ $this->coordX=$coordX; }
/**
* @param int $coordY
*/
public function setCoordY($coordY)
{ $this->coordY=$coordY; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('MissionTile'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return MissionExpansion
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new MissionTile(), self::getClassVars(), $row); }
/**
* @return Tile
*/
public function getTile()
{
if ($this->Tile == null) {
$this->Tile = $this->TileServices->selectTile($this->tileId);
}
return $this->Tile;
}
}
<file_sep>/core/bean/MainPageBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MainPageBean
* @author Hugues
* @since 1.00.00
* @version 1.08.01
*/
class MainPageBean extends UtilitiesBean implements ConstantsInterface
{
/**
* Template pour afficher le header principal
* @var $tplMainHeaderContent
*/
public static $tplMainHeaderContent = 'web/pages/public/public-main-header.php';
/**
* Template pour afficher le footer principal
* @var $tplMainFooterContent
*/
public static $tplMainFooterContent = 'web/pages/public/public-main-footer.php';
/**
* Option pour cacher le Header et le footer.
* @var $showHeaderAndFooter
*/
public $showHeaderAndFooter = true;
/**
* La classe du shell pour montrer plus ou moins le haut de l'image de fond.
* @var $shellClass
*/
protected $shellClass;
/**
* Class Constructor
*/
public function __construct()
{
$this->lang = (substr($_SERVER['REQUEST_URI'], 0, 3) == '/en' ? 'en' : 'fr');
$this->WpPostServices = new WpPostServices();
}
/**
* @return string
*/
public function displayPublicFooter()
{
$args = array(admin_url('admin-ajax.php'));
return $this->getRender(self::$tplMainFooterContent, $args);
}
/////////////////////////// Fin gestion PublicFooter ///////////////////////////
/**
* @return string
*/
public function displayPublicHeader()
{
//////////////////////////////////////////////////////////////////////////////////
// Récupération des pages devant être affichées dans le menu du Header
$args = array(
self::WP_POSTTYPE => 'page',
self::WP_ORDERBY => 'menu_order',
self::WP_TAXQUERY => array(),
'post_parent' => 0,
);
$WpPosts = $this->WpPostServices->getArticles($args, false, 'WpPage');
// On construit le lien vers l'accueil
if ($this->lang=='fr') {
$label = $this->getBalise(self::TAG_SPAN, 'Accueil');
$strPages = $this->getBalise(self::TAG_A, $label, array(self::ATTR_HREF=>get_site_url()));
} else {
$label = $this->getBalise(self::TAG_SPAN, 'Home');
$strPages = $this->getBalise(self::TAG_A, $label, array(self::ATTR_HREF=>get_site_url().'/en/'));
}
// Pour chaque page, si le parent ne vaut pas 0, on passe au suivant...
while (!empty($WpPosts)) {
$WpPost = array_shift($WpPosts);
$strPages .= $this->addWpPageToMenu($WpPost);
}
//////////////////////////////////////////////////////////////////////////////////
// On enrichi le Template et on le restitue.
$args = array(
// Plus d'actualité - 1
'',
// Plus d'actualité - 2
'',
// Contenu du Menu à afficher, mais pas tout le temps (pleine page) - 3
($this->showHeaderAndFooter ? $strPages : '')
);
return $this->getRender(self::$tplMainHeaderContent, $args);
}
/**
* @param WpPage $WpPost
* @return string
*/
private function addWpPageToMenu($WpPost)
{
$strMenu = '';
// On récupère la WpPage qu'on veut afficher.
$WpPage = new WpPage($WpPost->getID());
$labelParent = $this->getBalise(self::TAG_SPAN, $WpPage->getPostTitle());
// On vérifie la présence d'enfants éventuels.
$Children = $WpPage->hasChildren();
if (empty($Children)) {
// S'il n'y en a pas, c'est un siple lien.
$strMenu = $this->getBalise(self::TAG_A, $labelParent, array(self::ATTR_HREF => $WpPage->getPermalink()));
} else {
// Sinon, on doit construire la liste des enfants pour le sous menu
$strSubMenus = '';
while (!empty($Children)) {
$Child = array_shift($Children);
if ($Child->getMenuOrder()==0 && !self::isAdmin()) {
// On n'affiche que les enfants ayant un OrderMenu différent de 0
continue;
}
$childLabel = $this->getBalise(self::TAG_SPAN, $Child->getPostTitle());
$childLink = $this->getBalise(self::TAG_A, $childLabel, array(self::ATTR_HREF=>$Child->getPermalink()));
$strSubMenus .= $this->getBalise(self::TAG_LI, $childLink);
}
$parentLink = $this->getBalise(self::TAG_A, $labelParent, array(self::ATTR_HREF=>'#'));
$listChildren = $this->getBalise(self::TAG_UL, $strSubMenus);
$strMenu .= $this->getBalise(self::TAG_SPAN, $parentLink.$listChildren, array(self::ATTR_CLASS => 'hasDropDown'));
}
return $strMenu;
}
/////////////////////////// Fin gestion PublicHeader ///////////////////////////
/**
* @return Bean
*/
public static function getPageBean()
{
if (is_front_page()) {
$returned = new WpPageHomeBean();
} else {
$scriptUrl = $_SERVER['REDIRECT_SCRIPT_URL'];
if (strpos($scriptUrl, '/tag/')!==false) {
$returned = new WpPageTagBean($scriptUrl);
} else {
$post = get_post();
if (empty($post)) {
// On a un problème (ou pas). On pourrait être sur une page avec des variables, mais qui n'est pas prise en compte.
$slug = str_replace('/', '', $_SERVER['REDIRECT_SCRIPT_URL']);
$args = array(
'name'=>$slug,
'post_type'=>'page',
'numberposts'=>1
);
$my_posts = get_posts($args);
$post = array_shift($my_posts);
}
if ($post->post_type == 'page') {
$returned = new WpPageBean($post);
} elseif ($post->post_type == 'post') {
$WpPostBean = new WpPostBean($post);
$returned = $WpPostBean->getBean();
} else {
$returned = new WpPageError404Bean();
}
}
}
return $returned;
}
/**
* @param array $addArg
* @param array $remArg
* @return string
*/
public function getQueryArg($addArg, $remArg=array())
{
$addArg['page'] = 'hj-zombicide/admin_manage.php';
$remArg[] = 'form';
$remArg[] = 'id';
return add_query_arg($addArg, remove_query_arg($remArg, 'http://zombicide.jhugues.fr/wp-admin/admin.php'));
}
/**
* @return bool
*/
public static function isAdmin()
{ return current_user_can('manage_options'); }
/**
* @return string
*/
public function getShellClass()
{ return $this->shellClass; }
/**
* @param string $id
* @param string $default
* @return mixed
*/
public function initVar($id, $default='')
{
if (isset($_POST[$id])) {
return $_POST[$id];
}
if (isset($_GET[$id])) {
return $_GET[$id];
}
return $default;
}
}
<file_sep>/core/bean/WpPageError404Bean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageError404Bean
* @author Hugues
* @since 1.0.00
* @version 1.07.20
*/
class WpPageError404Bean extends WpPageBean
{
/**
* @param WpPage $WpPage
* @return string
*/
public static function getStaticPageContent($WpPage='')
{
$Bean = new WpPageError404Bean($WpPage);
return $Bean->getContentPage();
}
/**
* @return string
*/
public function getContentPage()
{
return '<section id="page-live-spawn">Cette page ne peut pas être trouvée. Peut-être devriez vous retourner sur la Home.</section>';
}
}
<file_sep>/core/bean/AdminPageSurvivorsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* AdminPageSurvivorsBean
* @author Hugues
* @since 1.05.01
* @version 1.05.10
*/
class AdminPageSurvivorsBean extends AdminPageBean
{
protected $urlFragmentSurvSkillTabContent = 'web/pages/admin/fragments/fragment-survivor-skills-tabcontent.php';
protected $tplHomeCheckCard = 'web/pages/admin/fragments/home-check-card.php';
protected $urlSurvivorListing = 'web/pages/admin/survivor-listing.php';
protected $urlSurvivorEdit = 'web/pages/admin/survivor-edit.php';
/**
* Class Constructor
*/
public function __construct($urlParams='')
{
$this->urlParams = $urlParams;
parent::__construct(self::CST_SURVIVOR);
$this->title = 'Survivants';
$this->ExpansionServices = new ExpansionServices();
$this->SkillServices = new SkillServices();
$this->SurvivorServices = new SurvivorServices();
$this->SurvivorSkillServices = new SurvivorSkillServices();
}
/**
* @param array $urlParams
* @return $Bean
*/
public function getSpecificContentPage()
{
if (isset($this->urlParams[self::FIELD_ID])) {
$this->Survivor = $this->SurvivorServices->selectSurvivor($this->urlParams[self::FIELD_ID]);
}
if (isset($_POST)&&!empty($_POST)) {
$this->dealWithPost();
}
switch ($this->urlParams[self::CST_POSTACTION]) {
case 'confirmEdit' :
case self::CST_EDIT :
return $this->getEditContentPage();
break;
default :
return $this->getListContentPage();
break;
}
}
private function dealWithPost()
{
if ($this->urlParams[self::CST_POSTACTION]=='confirmEdit') {
// Faudrait supprimer les données existantes.
$SurvivorSkill = new SurvivorSkill();
$SurvivorSkill->setSurvivorId($this->Survivor->getId());
$this->SurvivorSkillServices->deleteBulkSurvivorSkill(array(self::FIELD_SURVIVORID=>$this->Survivor->getId()));
foreach ($_POST as $key=>$value) {
list($check, $survivorTypeId, $tagLevelId) = explode('_', $key);
if ($check!='ss' || $value=='') {
continue;
}
$SurvivorSkill->setSkillId($value);
$SurvivorSkill->setSurvivorTypeId($survivorTypeId);
$SurvivorSkill->setTagLevelId($tagLevelId);
$this->SurvivorSkillServices->insertSurvivorSkill($SurvivorSkill);
}
}
}
public function getListContentPage()
{
//////////////////////////////////////////////////////////////////
// On récupère les filtres éventuels.
$argFilters = array();
$this->expansionId = isset($_POST[self::FIELD_EXPANSIONID]) ? $_POST[self::FIELD_EXPANSIONID] : '';
$argFilters[self::FIELD_EXPANSIONID] = $this->expansionId;
//////////////////////////////////////////////////////////////////
$strRows = '';
$nbPerPage = 10;
$orderby = $this->initVar(self::WP_ORDERBY, self::FIELD_NAME);
$order = $this->initVar(self::WP_ORDER, self::ORDER_ASC);
$Survivors = $this->SurvivorServices->getSurvivorsWithFilters($argFilters, $orderby, $order);
$nbElements = count($Survivors);
$nbPages = ceil($nbElements/$nbPerPage);
$curPage = $this->initVar(self::WP_CURPAGE, 1);
$curPage = max(1, min($curPage, $nbPages));
$DisplayedSurvivors = array_slice($Survivors, ($curPage-1)*$nbPerPage, $nbPerPage);
while (!empty($DisplayedSurvivors)) {
$Survivor = array_shift($DisplayedSurvivors);
$strRows .= $Survivor->getBean()->getRowForAdminPage();
}
$queryArg = array(
self::CST_ONGLET => self::CST_SURVIVOR,
self::WP_ORDERBY => $orderby,
self::WP_ORDER => $order
);
//////////////////////////////////////////////////////////////////
// Construction des filtres utilisés
$lstFiltres = '';
$Expansions = $this->ExpansionServices->getExpansionsWithFilters();
$lstFiltres .= '<select name="expansionId"><option value="">Toutes les extensions</option>';
while (!empty($Expansions)) {
$Expansion = array_shift($Expansions);
if ($Expansion->getNbSurvivants()==0) {
continue;
}
$lstFiltres .= '<option value="'.$Expansion->getId().'" '.($this->expansionId==$Expansion->getId() ? 'selected' : '').'>'.$Expansion->getName().'</option>';
}
$lstFiltres .= '</select>';
//////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// Pagination
$strPagination = $this->getPagination($queryArg, $post_status, $curPage, $nbPages, $nbElements);
// Tris
$queryArg[self::WP_ORDERBY] = self::FIELD_NAME;
$queryArg[self::WP_ORDER] = ($orderby==self::FIELD_NAME && $order==self::ORDER_ASC ? self::ORDER_DESC : self::ORDER_ASC);
$urlSortTitle = $this->getQueryArg($queryArg);
//////////////////////////////////////////////////////////////////
$args = array(
// Liste des survivants affichés - 1
$strRows,
// Liste des Filtres utilisés - 2
$lstFiltres,
// Lien pour ajouter un nouveau Survivant - 3
'/wp-admin/post-new.php',
// 4
'',
// Pagination - 5
$strPagination,
// 6 & 7
'','',
// class pour le tri sur title - 8
($orderby==self::FIELD_NAME ? $order : self::ORDER_DESC),
// url pour le tri sur title - 9
$urlSortTitle,
'','','','','','','','','','','','','','','','','','','','','','',''
);
return $this->getRender($this->urlSurvivorListing, $args);
}
private function getOption($value, $name, $selection='')
{ return '<option value="'.$value.'"'.($value==$selection ? ' selected' : '').'>'.$name.'</option>'; }
private function getSkillSelect($name, $id='')
{
$strReturned = $this->getOption('', 'Aucune', $id);
$Skills = $this->Skills;
while (!empty($Skills)) {
$Skill = array_shift($Skills);
$strReturned .= $this->getOption($Skill->getId(), $Skill->getName(), $id);
}
return '<select name="'.$name.'">'.$strReturned.'</select>';
}
public function getEditContentPage()
{
$msgError = "Ce Survivant n'a pas de profil de ce type. Il n'est donc pas possible de sélectionner des compétences.";
$this->Skills = $this->SkillServices->getSkillsWithFilters();
$args = array(
// Le Survivant a-t-il un profil Standard ? - 1
(!$this->Survivor->isStandard() ? $msgError : $this->getListSelects(self::CST_SURVIVORTYPEID_S)),
// Le Survivant a-t-il un profil Zombivant ? - 2
(!$this->Survivor->isZombivor() ? $msgError : $this->getListSelects(self::CST_SURVIVORTYPEID_Z)),
// Le Survivant a-t-il un profil Ultimate ? - 3
(!$this->Survivor->isUltimate() ? $msgError : $this->getListSelects(self::CST_SURVIVORTYPEID_U)),
// Le Survivant a-t-il un profil Ultimate Zombivant ? - 4
(!$this->Survivor->isUltimatez() ? $msgError : $this->getListSelects(self::CST_SURVIVORTYPEID_UZ)),
// L'identifiant du Survivant - 5
$this->Survivor->getId(),
// Le nom du Survivant - 6
$this->Survivor->getName(),
// A-t-il un profil Standard ? - 7
($this->Survivor->isStandard() ? ' '.self::CST_CHECKED : ''),
// A-t-il un profil Standard ? - 8
($this->Survivor->isZombivor() ? ' '.self::CST_CHECKED : ''),
// A-t-il un profil Standard ? - 9
($this->Survivor->isUltimate() ? ' '.self::CST_CHECKED : ''),
// A-t-il un profil Standard ? - 10
($this->Survivor->isUltimatez() ? ' '.self::CST_CHECKED : ''),
// Extension d'origine du Survivant - 11
$this->Survivor->getExpansion()->getName(),
// Liste des portraits - 12
$this->Survivor->getBean()->getAllPortraits(false),
);
return $this->getRender($this->urlSurvivorEdit, $args);
}
private function getListSelects($survivorTypeId)
{
$tagLevelIds = array('10', '11', '20', '30', '31', '40', '41', '42');
$args = array();
while (!empty($tagLevelIds)) {
$levelId = array_shift($tagLevelIds);
$name = 'ss_'.$survivorTypeId.'_'.$levelId;
array_push($args, $this->getSkillSelect($name, $this->Survivor->getSkill($survivorTypeId, $levelId)->getId()));
}
return $this->getRender($this->urlFragmentSurvSkillTabContent, $args);
}
/**
* @return string
*/
public function getCheckCard()
{
/////////////////////////////////////////////////
// Gestion des Survivants.
// On récupère la liste des Survivants qui ont un Article. Puis les données dans la base. On compare et on effectue un diagnostic.
$Act = new SurvivorActions();
$strBilan = $Act->dealWithSurvivorVerif();
$args = array(
// Le titre de la carte - 1
$this->title,
// L'id du container de retour pour afficher les vérifications - 2
self::CST_SURVIVOR,
// Le contenu du container de vérification - 3
$strBilan,
);
return $this->getRender($this->tplHomeCheckCard, $args);
}
}
<file_sep>/core/interface/ConstantsInterface.php
<?php
/**
* @author Hugues
* @since 1.00.00
* @version 1.07.25
*/
interface ConstantsInterface
{
/////////////////////////////////////////////////
// Icons : https://pngtree.com/free-icon/backpack-management_577946
// Action Ajax
const AJAX_ADDMORENEWS = 'addMoreNews';
const AJAX_CARDRIVE = 'carDrive';
const AJAX_CARIN = 'carIn';
const AJAX_CAROUT = 'carOut';
const AJAX_CARSWAP = 'carSwap';
const AJAX_ENDOFTURN = 'endOfTurn';
const AJAX_GETEXPANSIONS = 'getExpansions';
const AJAX_EXPANSIONVERIF = 'expansion-verif';
const AJAX_GETRANDOMMAP = 'getRandomMap';
const AJAX_GETMISSIONS = 'getMissions';
const AJAX_GETRANDOMTEAM = 'getRandomTeam';
const AJAX_GETSKILLS = 'getSkills';
const AJAX_GETSURVIVORS = 'getSurvivants';
const AJAX_GETTILES = 'getTiles';
const AJAX_MAKENOISE = 'makeNoise';
const AJAX_MELEEATTACK = 'meleeAttack';
const AJAX_MISSIONVERIF = 'mission-verif';
const AJAX_MOVE = 'move';
const AJAX_OPENDOOR = 'openDoor';
const AJAX_ORGANIZE = 'organize';
const AJAX_PAGED = 'paged';
const AJAX_POSTCHAT = 'postChat';
const AJAX_RANGEDATTACK = 'rangedAttack';
const AJAX_REFRESHCHAT = 'refreshChat';
const AJAX_SEARCH = 'search';
const AJAX_SKILLVERIF = 'skill-verif';
const AJAX_STARTTURN = 'startTurn';
const AJAX_SURVIVORVERIF = 'survivor-verif';
const AJAX_TRADE = 'trade';
const AJAX_GETTHROWDICE = 'getThrowDice';
/////////////////////////////////////////////////
// Attributs
const ATTR_ALT = 'alt';
const ATTR_CLASS = 'class';
const ATTR_HEIGHT = 'height';
const ATTR_HREF = 'href';
const ATTR_ID = 'id';
const ATTR_NAME = 'name';
const ATTR_SELECTED = 'selected';
const ATTR_SRC = 'src';
const ATTR_TITLE = 'title';
const ATTR_TYPE = 'type';
const ATTR_VALUE = 'value';
const ATTR_WIDTH = 'width';
// Data
const ATTR_DATA_AJAXACTION = 'data-ajaxaction';
const ATTR_DATA_DISPLAYNAME = 'data-displayname';
const ATTR_DATA_EXPANSIONID = 'data-expansion-id';
const ATTR_DATA_ID = 'data-id';
const ATTR_DATA_KEYDECK = 'data-keydeck';
const ATTR_DATA_PAGED = 'data-paged';
const ATTR_DATA_SURVIVORID = 'data-survivor-id';
const ATTR_DATA_TIMESTAMP = 'data-timestamp';
const ATTR_DATA_TYPE = 'data-type';
/////////////////////////////////////////////////
// Chat
const CHAT_CLEAN = '/clean';
const CHAT_EXIT = '/exit';
const CHAT_GAMES = '/games';
const CHAT_HELP = '/help';
const CHAT_INVITE = '/invite';
const CHAT_JOIN = '/join';
const CHAT_USERS = '/users';
const CHAT_ACTIVATEZOMBIES = '/activateZombies';
/////////////////////////////////////////////////
// Les niveaux de danger
const COLOR_BLUE = 'blue';
const COLOR_ORANGE = 'orange';
const COLOR_RED = 'red';
const COLOR_YELLOW = 'yellow';
/////////////////////////////////////////////////
// On conserve malgré tout quelques constantes
const CST_ACTIVE = 'active';
const CST_AJAXACTION = 'ajaxAction';
const CST_CHANGEPROFILE = 'changeProfile';
const CST_CHECKED = 'checked';
const CST_CLONE = 'clone';
const CST_COLORDER = 'colorder';
const CST_COLSORT = 'colsort';
const CST_DISABLED = 'disabled';
const CST_EDIT = 'edit';
const CST_EXPANSION = 'expansion';
const CST_FILTERS = 'filters';
const CST_HIDDEN = 'hidden';
const CST_NBPERPAGE = 'nbperpage';
const CST_MISSION = 'mission';
const CST_ONGLET = 'onglet';
const CST_PARAMETRE = 'parametre';
const CST_POSTACTION = 'postAction';
const CST_SELECTED = 'selected';
const CST_SKILL = 'skill';
const CST_SURVIVOR = 'survivor';
const CST_SURVIVORTYPEID_S = 1;
const CST_SURVIVORTYPEID_Z = 2;
const CST_SURVIVORTYPEID_U = 3;
const CST_SURVIVORTYPEID_UZ = 4;
const CST_TRASH = 'trash';
const CST_ULTIMATE = 'ultimate';
const CST_ULTIMATEZ = 'ultimatez';
const CST_ZOMBIVOR = 'zombivor';
/////////////////////////////////////////////////
// Fields
const FIELD_ID = 'id';
const FIELD_ACTIVETILE = 'activeTile';
const FIELD_BACKGROUND = 'background';
const FIELD_CODE = 'code';
const FIELD_COORDX = 'coordX';
const FIELD_COORDY = 'coordY';
const FIELD_DATEUPDATE = 'dateUpdate';
const FIELD_DECKKEY = 'deckKey';
const FIELD_DESCRIPTION = 'description';
const FIELD_DISPLAYRANK = 'displayRank';
const FIELD_DURATIONID = 'durationId';
const FIELD_EQUIPMENTCARDID = 'equipmentCardId';
const FIELD_EXPANSIONID = 'expansionId';
const FIELD_HEIGHT = 'height';
const FIELD_KEYWORDID = 'keywordId';
const FIELD_LIVEABLE = 'liveAble';
const FIELD_LIVEID = 'liveId';
const FIELD_LEVELID = 'levelId';
const FIELD_MAXDURATION = 'maxDuration';
const FIELD_MINDURATION = 'minDuration';
const FIELD_MISSIONID = 'missionId';
const FIELD_NAME = 'name';
const FIELD_NBMISSIONS = 'nbMissions';
const FIELD_NBSURVIVANTS = 'nbSurvivants';
const FIELD_OBJECTIVEID = 'objectiveId';
const FIELD_OFFICIAL = 'official';
const FIELD_ORIGINEID = 'origineId';
const FIELD_PLAYERID = 'playerId';
const FIELD_PUBLISHED = 'published';
const FIELD_RULEID = 'ruleId';
const FIELD_SENDERID = 'senderId';
const FIELD_SENDTOID = 'sendToId';
const FIELD_SETTING = 'setting';
const FIELD_SKILLID = 'skillId';
const FIELD_SPAWNNUMBER = 'spawnNumber';
const FIELD_SURVIVORID = 'survivorId';
const FIELD_SURVIVORTYPEID = 'survivorTypeId';
const FIELD_TAGLEVELID = 'tagLevelId';
const FIELD_TEXTE = 'texte';
const FIELD_TILEID = 'tileId';
const FIELD_TIMESTAMP = 'timestamp';
const FIELD_TITLE = 'title';
const FIELD_ULTIMATE = 'ultimate';
const FIELD_WEAPONPROFILEID = 'weaponProfileId';
const FIELD_WIDTH = 'width';
const FIELD_ZOMBIVOR = 'zombivor';
/////////////////////////////////////////////////
// Formats
const FORMAT_DATE_YmdHis = 'Y-m-d H:i:s';
/////////////////////////////////////////////////
// Identifiant DOM
const ID_HEADER_UL_CHAT_SAISIE = 'header-ul-chat-saisie';
const ID_ONLINE_CHAT_CONTENT = 'online-chat-content';
/////////////////////////////////////////////////
// Labels
const LBL_STANDARD = 'Standard';
const LBL_SURVIVANT = 'Survivant';
const LBL_ZOMBIVANT = 'Zombivant';
const LBL_ULTIMATE = 'Ultimate';
const LBL_ULTIMATEZOMBIVANT = 'Ultimate Zombivant';
/////////////////////////////////////////////////
// Niveau de Danger
const LVL_BLUE = 'blue';
const LVL_YELLOW = 'yellow';
const LVL_ORANGE = 'orange';
const LVL_RED = 'red';
/////////////////////////////////////////////////
// Messages
const MSG_CHAT_EMPTIED = 'CHAT_EMPTIED';
const MSG_CHAT_USER_LEFT = 'CHAT_USER_LEFT';
const MSG_CHAT_BACK_DEFAULT = 'CHAT_BACK_DEFAULT';
const MSG_CHAT_HELP = 'CHAT_HELP';
const MSG_CHAT_UNKNOWN_USER = 'CHAT_UNKNOWN_USER';
const MSG_CHAT_JOIN_INVITE = 'CHAT_JOIN_INVITE';
const MSG_CHAT_INVITE_SENT_TO = 'CHAT_INVITE_SENT_TO';
const MSG_CHAT_USER_JOINED = 'CHAT_USER_JOINED';
/////////////////////////////////////////////////
// Allowed Pages :
const PAGE_ONLINE = 'online';
const PAGE_EQUIPMENT = 'page-equipmentcards';
const PAGE_EXTENSION = 'page-extensions';
const PAGE_GENERATION_MAP = 'page-generation-map';
const PAGE_MISSION = 'page-missions';
const PAGE_ORDRE_PRIORITE = 'page-ordre-de-priorite';
const PAGE_PISTE_DE_DES = 'page-piste-de-des';
const PAGE_SELECT_SURVIVORS = 'page-selection-survivants';
const PAGE_SKILL = 'page-competences';
const PAGE_SPAWN = 'page-spawncards';
const PAGE_SURVIVOR = 'page-survivants';
const PAGE_TILE = 'page-dalles';
/////////////////////////////////////////////////
// Session
const SESSION_DECKKEY = 'deckKey';
const SQL_WHERE = 'where';
/////////////////////////////////////////////////
// Tags
const TAG_A = 'a';
const TAG_BUTTON = 'button';
const TAG_DD = 'dd';
const TAG_DIV = 'div';
const TAG_DL = 'dl';
const TAG_DT = 'dt';
const TAG_I = 'i';
const TAG_IMG = 'img';
const TAG_LI = 'li';
const TAG_OPTION = 'option';
const TAG_SELECT = 'select';
const TAG_SPAN = 'span';
const TAG_UL = 'ul';
/////////////////////////////////////////////////
// Constantes XML
const XML_ATTRIBUTES = '@attributes';
/////////////////////////////////////////////////
// Wordpress
const WP_CAT = 'cat';
/*
const WP_CAT_EXPANSION_ID = 77;
const WP_CAT_MISSION_ID = 2;
const WP_CAT_NEWS_ID = 54;
const WP_CAT_OBJECTIVE_ID = 71;
const WP_CAT_RULE_ID = 72;
const WP_CAT_SKILL_ID = 75;
const WP_CAT_SURVIVOR_ID = 58;
*/
const WP_CAT_EXPANSION_ID = 111;
const WP_CAT_MISSION_ID = 114;
const WP_CAT_NEWS_ID = 117;
const WP_CAT_OBJECTIVE_ID = 115;
const WP_CAT_RULE_ID = 116;
const WP_CAT_SKILL_ID = 110;
const WP_CAT_SURVIVOR_ID = 119;
const WP_CURPAGE = 'cur_page';
const WP_FIELD = 'field';
const WP_METAKEY = 'meta_key';
const WP_METAVALUE = 'meta_value';
const WP_NUMBERPOSTS = 'numberposts';
const WP_OFFSET = 'offset';
const WP_ORDER = 'order';
const WP_ORDERBY = 'orderby';
const WP_POST = 'post';
const WP_POSTSPERPAGE = 'posts_per_page';
const WP_POSTSTATUS = 'post_status';
const WP_POSTTAG = 'post_tag';
const WP_POSTTITLE = 'post_title';
const WP_POSTTYPE = 'post_type';
const WP_PUBLISH = 'publish';
const WP_SLUG = 'slug';
const WP_TAXONOMY = 'taxonomy';
const WP_TAXQUERY = 'tax_query';
const WP_TERMS = 'terms';
/////////////////////////////////////////////////
// Divers
const IMG_PNG = '.png';
const ORDER_ASC = 'ASC';
const ORDER_DESC = 'DESC';
const ORDER_RAND = 'rand';
/////////////////////////////////////////////////
// Deprecated
const CST_TIMESTAMP = 'timestamp';
/**
* Chaîne Constante coordY
*/
const CST_COORDY = 'coordY';
/**
* Chaîne Constante cur_page
*/
const CST_CURPAGE = 'cur_page';
/**
* Chaîne Constante current
*/
const CST_CURRENT = 'current';
/**
* Chaîne Constante danger
*/
const CST_DANGER = 'danger';
/**
* Chaîne Constante description
*/
const CST_DESCRIPTION = 'description';
/**
* Chaîne Constante durationId
*/
const CST_DURATIONID = 'durationId';
/**
* Chaîne Constante equipment
*/
const CST_EQUIPMENT = 'equipment';
/**
* Chaîne Constante equipmentCardId
*/
const CST_EQUIPMENTCARDID = 'equipmentCardId';
/**
* Chaîne Constante Y-m-d H:i:s
*/
const CST_FORMATDATE = 'Y-m-d H:i:s';
/**
* Chaîne Constante form-control
*/
const CST_FORMCONTROL = 'form-control';
/**
* Chaîne Constante firstRow
*/
const CST_FIRSTROW = 'firstRow';
/**
* Chaîne Constante future
*/
const CST_FUTURE = 'future';
/**
* Chaîne Constante keyAccess
*/
const CST_KEYACCESS = 'keyAccess';
/**
* Chaîne Constante level
*/
const CST_LEVEL = 'level';
/**
* Chaîne Constante levelId
*/
const CST_LEVELID = 'levelId';
/**
* Chaîne Constante liveDeckId
*/
const CST_LIVEDECKID = 'liveDeckId';
/**
* Chaîne Constante liveId
*/
const CST_LIVEID = 'liveId';
/**
* Chaîne Constante liveSurvivorId
*/
const CST_LIVESURVIVORID = 'liveSurvivorId';
/**
* Chaîne Constante minDuration
*/
const CST_MINDURATION = 'minDuration';
/**
* Chaîne Constante missionId
*/
const CST_MISSIONID = 'missionId';
/**
* Chaîne Constante name
*/
const CST_NAME = 'name';
/**
* Chaîne Constante nbMissions
*/
const CST_NBMISSIONS = 'nbMissions';
/**
* Chaîne Constante objective
*/
const CST_OBJECTIVE = 'objective';
/**
* Chaîne Constante order
*/
const CST_ORDER = 'order';
/**
* Chaîne Constante orderby
*/
/**
* Chaîne Constante origineId
*/
const CST_ORIGINEID = 'origineId';
/**
* Chaîne Constante pending
*/
const CST_PENDING = 'pending';
/**
* Chaîne Constante playerId
*/
const CST_PLAYERID = 'playerId';
/**
* Chaîne Constante post_status
*/
const CST_POSTSTATUS = 'post_status';
/**
* Chaîne Constante publish
*/
const CST_PUBLISH = 'publish';
/**
* Chaîne Constante published
*/
const CST_PUBLISHED = 'published';
/**
* Chaîne Constante rmvCol
*/
const CST_RMVCOL = 'rmvCol';
/**
* Chaîne Constante rmvRow
*/
const CST_RMVROW = 'rmvRow';
/**
* Chaîne Constante selected
*/
/**
* Chaîne Constante sendToId
*/
const CST_SENDTOID = 'sendToId';
/**
* Chaîne Constante setting
*/
const CST_SETTING = 'setting';
/**
* Chaîne Constante spawn
*/
const CST_SPAWN = 'spawn';
/**
* Chaîne Constante spawnNumber
*/
const CST_SPAWNNUMBER = 'spawnNumber';
/**
* Chaîne Constante square pointer
*/
const CST_SQUAREPOINTER = 'square pointer';
/**
* Chaîne Constante status
*/
const CST_STATUS = 'status';
/**
* Chaîne Constante success
*/
const CST_SUCCESS = 'success';
/**
* Chaîne Constante survivorTypeId
*/
const CST_SURVIVORTYPEID = 'survivorTypeId';
/**
* Chaîne Constante table
*/
const CST_TABLE = 'table';
/**
* Chaîne Constante tagLevelId
*/
const CST_TAGLEVELID = 'tagLevelId';
/**
* Chaîne Constante </td><td>
*/
const CST_TD_SEP = '</td><td>';
/**
* Chaîne Constante texte
*/
const CST_TEXTE = 'texte';
/**
* Chaîne Constante upload_files
*/
const CST_UPLOADFILES = 'upload_files';
/**
* Chaîne Constante window-close
*/
const CST_WINDOWCLOSE = 'window-close';
}
<file_sep>/core/bean/WpPageExpansionsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageExpansionsBean
* @author Hugues
* @since 1.07.21
* @version 1.07.21
*/
class WpPageExpansionsBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-expansions.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->ExpansionServices = new ExpansionServices();
}
/**
* @return string
*/
public function getContentPage()
{
$this->setFilters();
return $this->getListContentPage();
}
/**
* @return string
*/
public function getListContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste des Extensions puis les éléments nécessaires à la pagination.
$Expansions = $this->ExpansionServices->getExpansionsWithFilters($this->arrFilters, $this->colSort, $this->colOrder);
$this->nbElements = count($Expansions);
$this->nbPages = ceil($this->nbElements/$this->nbperpage);
// On slice la liste pour n'avoir que ceux à afficher
$displayedExpansions = array_slice($Expansions, $this->nbperpage*($this->paged-1), $this->nbperpage);
// On construit le corps du tableau
$strBody = '';
if (!empty($displayedExpansions)) {
foreach ($displayedExpansions as $Expansion) {
$strBody .= $Expansion->getBean()->getRowForPublicPage();
}
}
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Affiche-t-on le filtre ?
$showFilters = isset($this->arrFilters[self::FIELD_NAME])&&$this->arrFilters[self::FIELD_NAME]!='';
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Les lignes du tableau - 1
$strBody,
// On affiche le dropdown par pages - 2
$this->getDropdownNbPerPages(),
// On affiche la pagination - 3
$this->getNavPagination(),
// Affiche ou non le bloc filtre - 4
$showFilters ? 'block' : 'none',
// Si le Nom est renseigné - 5
$this->arrFilters[self::FIELD_NAME],
'','','','','','','','','','','','','','','','','',''
);
return $this->getRender($this->urlTemplate, $args);
}
/**
* @param array $post
*/
public function setFilters($post=null)
{ parent::setBeanFilters($post, self::FIELD_NAME); }
}
<file_sep>/web/rsc/admin_zombicide.js
/*
* @version 1.04.30
*/
var $hj = jQuery;
$hj(document).ready(function(){
/***************
*** #01 - Home
*** Si on est sur la Homepage
***************/
$hj('#homeAdminBoard .ajaxAction').unbind().click(function(){
resolveHomeAdminBoardAjaxActions($hj(this));
return false;
});
});
function resolveHomeAdminBoardAjaxActions(clicked) {
var ajaxaction = clicked.data('ajaxaction');
// On initialise les données de tri et de filtres.
var data = {'action': 'dealWithAjax', 'ajaxAction': ajaxaction};
resolveCallAjax(data, ajaxaction);
}
function resolveCallAjax(data, idPage) {
$hj.post(
ajaxurl,
data,
function(response) {
try {
var obj = JSON.parse(response);
if (obj[idPage] != '' ) {
$hj('#'+idPage).html(obj[idPage]);
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
);
}
<file_sep>/core/bean/WpPostSkillBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPostSkillBean
* @author Hugues
* @since 1.00.00
* @version 1.05.20
*/
class WpPostSkillBean extends WpPostBean
{
protected $urlTemplate = 'web/pages/public/wppage-skill.php';
protected $arrLvls = array(1=>'S', 2=>'Z', 3=>'U', 4=>'UZ');
/**
* Class Constructor
*/
public function __construct($WpPost)
{
parent::__construct();
$this->SkillServices = new SkillServices();
$this->SurvivorServices = new SurvivorServices();
$this->SurvivorSkillServices = new SurvivorSkillServices();
// TODO : Quand toutes les compétences auront leur article, on peut virer ce test et l'exécution secondaire.
if ($WpPost instanceof WpPost) {
$this->WpPost = $WpPost;
$code = $this->WpPost->getPostMeta(self::FIELD_CODE);
$Skills = $this->SkillServices->getSkillsWithFilters(array(self::FIELD_CODE=>$code));
$this->Skill = (!empty($Skills) ? array_shift($Skills) : new Skill());
} else {
$this->Skill = $this->SkillServices->selectSkill($WpPost);
}
}
/**
* On retourne la page dédiée à la compétence.
* @return string
*/
public function getContentPage()
{
//////////////////////////////////////////////////////////////////
// On enrichi les tableaux de données nécessaires.
$arrF = array(self::FIELD_SKILLID => $this->Skill->getId());
$arrTags = array(
self::COLOR_BLUE => array(10, 11),
self::COLOR_YELLOW => array(20),
self::COLOR_ORANGE => array(30, 31),
self::COLOR_RED => array(40, 41, 42),
);
//////////////////////////////////////////////////////////////////
// On construit le tableau nécessaire au listing des Survivants
foreach ($arrTags as $key => $value) {
while (!empty($value)) {
$val = array_shift($value);
$arrF[self::FIELD_TAGLEVELID] = $val;
foreach ($this->arrLvls as $k => $v) {
$arrF[self::FIELD_SURVIVORTYPEID] = $k;
$SurvivorSkills = $this->SurvivorSkillServices->getSurvivorSkillsWithFilters($arrF);
foreach ($SurvivorSkills as $SurvivorSkill) {
$Survivor = $SurvivorSkill->getSurvivor();
$this->skills[$key][$k][$Survivor->getNiceName()] = $Survivor;
}
if (!empty($this->skills[$key][$k])) {
ksort($this->skills[$key][$k]);
}
}
}
}
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Nom de la Compétence - 1
$this->Skill->getName(),
// Description de la Compétence - 2
$this->Skill->getDescription(),
// Liste des Survivants ayant la compétence en Bleu (Zombivant et Ultimate compris) - 3
$this->buildSkillBadges(1, $arrTags),
// Liste des Survivants ayant la compétence en Jaune (Zombivant et Ultimate compris) - 4
$this->buildSkillBadges(2, $arrTags),
// Liste des Survivants ayant la compétence en Orange (Zombivant et Ultimate compris) - 5
$this->buildSkillBadges(3, $arrTags),
// Liste des Survivants ayant la compétence en Rouge (Zombivant et Ultimate compris) - 6
$this->buildSkillBadges(4, $arrTags),
// Lien de navigation - 7
$this->getNavLinks(),
);
return $this->getRender($this->urlTemplate, $args);
}
private function getNavLinks()
{
//////////////////////////////////////////////////////////////////
// On construit les liens de navigation
// On récupère toutes les compétences, classées par ordre alphabétique.
$Skills = $this->SkillServices->getSkillsWithFilters();
$firstSkill = null;
while (!empty($Skills)) {
$Skill = array_shift($Skills);
// On les parcourt jusqu'à trouver la courante.
if ($Skill->getId()==$this->Skill->getId()) {
break;
}
if ($firstSkill==null) {
$firstSkill = $Skill;
}
$prevSkill = $Skill;
}
$nextSkill = array_shift($Skills);
if (empty($prevSkill)) {
$prevSkill = array_pop($Skills);
}
if (empty($nextSkill)) {
$nextSkill = $firstSkill;
}
$nav = '';
// On exploite la précédente et la suivante.
if (!empty($prevSkill)) {
$attributes = array(self::ATTR_HREF=>$prevSkill->getWpPost()->getPermalink(), self::ATTR_CLASS=>'adjacent-link col-3');
$nav .= $this->getBalise(self::TAG_A, '« '.$prevSkill->getWpPost()->getPostTitle(), $attributes);
}
if (!empty($nextSkill)) {
$attributes = array(self::ATTR_HREF=>$nextSkill->getWpPost()->getPermalink(), self::ATTR_CLASS=>'adjacent-link col-3');
$nav .= $this->getBalise(self::TAG_A, $nextSkill->getWpPost()->getPostTitle().' »', $attributes);
}
return $nav;
}
private function buildSkillBadges($rank, $arrTags)
{
$strReturned = '';
foreach ($arrTags as $key=>$value) {
$cartoucheAttributes = array(self::ATTR_CLASS=>'cartouche badge badge-'.$key.'-skill');
$Survivors = $this->skills[$key][$rank];
if (!empty($Survivors)) {
ksort($Survivors);
while (!empty($Survivors)) {
$Survivor = array_shift($Survivors);
$strReturned .= $Survivor->getBean()->getCartouche($cartoucheAttributes, true);
}
$strReturned .= '<br>';
}
}
return $strReturned;
}
}
<file_sep>/web/rsc/non-used-yet-anymore.js
/***************
*** More News
***************
if ($hj('#page-missions').length!=0 ) {
$hj('#page-missions .ajaxAction').unbind().click(function(){
addPageMissionAjaxActions($hj(this));
return false;
});
}
if ($hj('#filters').length!=0 ) {
$hj('#filters select').unbind().change(function(){
var set = $hj(this).val();
$hj('#card-container .card').each(function(){
if (set == '' || $hj(this).hasClass(set) ) {
$hj(this).css('display', 'inline-block');
} else {
$hj(this).css('display', 'none');
}
})
});
$hj('#idReset').unbind().click(function(){
$hj('#filters select').val('');
$hj('#card-container .card').each(function(){
$hj(this).css('display', 'inline-block');
});
return false;
});
}
if ($hj('#page-live-spawn').length!=0 ) {
addPageLiveSpawnActions();
return false;
}
if ($hj('#page-live-equipment').length!=0 ) {
addPageLiveEquipmentActions();
return false;
}
if ($hj('#page-online').length!=0 ) {
$hj('#startGame').unbind().click(function(){ $hj('form')[0].submit(); });
// Sélection de la Mission
$hj('.btn.btn-mission').unbind().click(function(){
var missionId = $hj(this).data('mission-id');
$hj(this).find('input').prop('checked', true);
$hj(this).siblings().removeClass('active');
$hj(this).toggleClass('active');
$hj(this).siblings().find('svg').removeClass('fa-check-square').addClass('fa-square');
$hj(this).find('svg').removeClass('fa-square').addClass('fa-check-square');
});
// Sélection des Survivants
$hj('.btn.btn-survivor').unbind().click(function(){
var survivorId = $hj(this).data('survivor-id');
var isChecked = $hj(this).find('input').prop('checked');
$hj(this).find('input').prop('checked', !isChecked);
$hj(this).toggleClass('active');
$hj(this).find('svg').toggleClass('fa-square fa-check-square');
});
}
if ($hj('#canvas-background').length !=0 ) {
$hj('canvas').drawImage({
source: srcImg,
x: xStart, y: yStart
});
}
$hj('.menuBurger').click(function(){
$hj(this).toggleClass('open');
});
/********
* Filtres sur l'interface Survivors
********
$hj('.survivorFilters a').unbind().click(function() {
if (!$hj(this).hasClass('selected') ) {
var exp = $hj(this).data('exp');
var nb = $hj('article[data-exp="'+exp+'"]').length;
if (nb==0 ) { getSurvivorsByExpansionCode(exp); }
}
$hj(this).toggleClass('selected');
filterSurvivors();
});
$hj('.survivorFilters select').unbind().change(function() {
filterSurvivors();
});
$hj('.survivorTool .selectSkill a').unbind().click(function() {
var exp = $hj(this).data('exp');
var txt = $hj(this).html();
var strLi = '<li class=\\"active\\"><article><input type=\\"checkbox\\" checked=\\"checked\\" name=\\"cb-sk-'+exp+'\\" id=\\"cb-sk-'+exp+'\\" class=\\"hidden\\">';
strLi += '<i class=\\"glyphicon glyphicon-check pull-right\\"></i><span>'+txt+'</span></article></li>';
var json = '{"selectionSkills":"'+strLi+'"}';
var obj = JSON.parse(json);
reloadComponents(obj, 'append');
$hj('#selectionSkills i').unbind().click(function() {
$hj(this).parent().parent().remove();
})
});
$hj('.survivorTool .selectSurvivor a').unbind().click(function() {
var exp = $hj(this).data('exp');
if (!$hj(this).hasClass('selected') ) {
var nb = $hj('article[data-exp="'+exp+'"]').length;
if (nb!=0 ) {
$hj('article[data-exp="'+exp+'"]').each(function(e) {
$hj(this).parent().remove();
});
}
getSurvivorsByExpansionCode(exp, 'cartouche');
} else {
$hj('article[data-exp="'+exp+'"]').each(function() {
$hj(this).parent().remove();
/*
$hj(this).parent().removeClass('active');
var inpSib = $hj(this).find('input');
inpSib.prop("checked", !inpSib.prop("checked"))
*
})
}
$hj(this).toggleClass('selected');
});
$hj('#generateTeam').click(function(e) {
e.preventDefault();
var obj;
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getTeam', 'filters': $hj('#filters').serialize()};
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
} catch (e) {
console.log("error: "+e);
console.log(response);
}
reloadComponents(obj, 'replace');
}
);
return false;
});
$hj('#exportSelectionTeam').click(function(e) {
var data = '';
$hj('#selectionSurvivors input').each(function(e) {
if ($hj(this).is(':checked') ) {
if (data !='' ) {
data += ';';
}
data += $hj(this).attr('name').substr(8);
}
});
var filename = 'selectionTeam.txt';
var file = new Blob([data], {type: 'text'});
if (window.navigator.msSaveOrOpenBlob) // IE10+
window.navigator.msSaveOrOpenBlob(file, filename);
else { // Others
var a = document.createElement("a"),
url = URL.createObjectURL(file);
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
setTimeout(function() {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 0);
}
return false;
});
$hj('#importSelectionTeam').change(function(e) {
var obj;
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getSurvivorsForImport', 'value': $hj(this).val()};
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
} catch (e) {
console.log("error: "+e);
console.log(response);
}
reloadComponents(obj, 'replace');
$hj('#selectionSurvivors i').unbind().click(function() {
$hj(this).parent().parent().toggleClass('active');
var inpSib = $hj(this).siblings('input');
inpSib.prop("checked", !inpSib.prop("checked"))
});
}
);
});
/********
* Filtres sur l'interface Equipements
********
$hj('.equipmentFilters select').unbind().change(function() {
if ($hj(this).hasClass('zoom') ) {
$hj('.batchArticles.equipments').removeClass().addClass('batchArticles equipments '+$hj(this).val());
} else {
filterCards();
}
});
$hj('.invasionFilters select').unbind().change(function() {
if ($hj(this).hasClass('zoom') ) {
$hj('.batchArticles.invasions').removeClass().addClass('batchArticles invasions '+$hj(this).val());
} else {
filterCards();
}
});
/********
* Filtres sur l'interface Skills
********
$hj('input.chosen-search-input-skill').keyup(function() {
var filter = $hj('input.chosen-search-input-skill').val().toLowerCase();
$hj('.batchArticles.skills article').each(function() {
var value = $hj(this).data('title');
if (value.toLowerCase().indexOf(filter)!=-1 ) {
$hj(this).show();
} else {
$hj(this).hide();
}
});
});
/********
* Filtres sur l'interface Missions
********
$hj('.missionFilters a').unbind().click(function() {
var isSelected = $hj(this).hasClass('selected');
if (isSelected ) {
if ($hj(this).parent().parent().find('a.selected').length == 1 ) {
$hj(this).parent().siblings('li').find('a[data-filter="all"]').addClass('selected');
}
} else {
$hj(this).parent().siblings('li').find('a[data-filter="all"]').removeClass('selected');
}
$hj(this).toggleClass('selected');
filterMissions();
});
$hj('div.chosen-container.chosen-container-single a')
.unbind()
.click(
function(){
fillOptionsList();
$hj(this).parent().toggleClass('chosen-with-drop chosen-container-active');
$hj('ul.chosen-results li.result-selected').addClass('highlighted');
$hj('input.chosen-search-input').val('').focus();
}
);
addActionsOnList();
$hj('input.chosen-search-input').keyup(function() { fillOptionsList(); });
/***************
*** Mission Online
***************
$hj('#genKey').click(function(e){
e.preventDefault();
var n = 16;
var str = 'az<KEY>jkmwxcvbnAZERTYUPQSDFGHJKMWXCVBN23456789';
var max = str.length;
var password = '';
for (var i=1; i<=n; i++ ) {
var start = Math.floor(Math.random()*max);
password += str.substring(start, start+1);
}
$hj('#deckKey').val(password);
});
$hj('.slideinfo').click(function(){
if ($hj(this).hasClass('show') ) {
$hj('.slideinfo').removeClass('show');
} else {
$hj('.slideinfo').removeClass('show');
$hj(this).addClass('show');
}
});
$hj('#liveMissionSelection li input').click(function(){
var id = $hj(this).val();
$hj('.hideAllMaps > li').hide();
$hj('.hideAllMaps > li[data-missionId="'+id+'"]').show();
});
$hj('#filter-expansion').change(function(){
var filtre = $hj(this).val();
$hj('#liveSurvivorsSelection li').each(function(){
if ($hj(this).hasClass(filtre) || filtre=='' ) {
$hj(this).show();
} else {
$hj(this).hide();
}
});
});
$hj('#liveSurvivorsSelection input').change(function(){
$hj('#nbSurvivorsSelected').html($hj('#liveSurvivorsSelection input:checked').length);
});
$hj('#randomSelect').unbind().click(function(e){
e.preventDefault();
var nb = 6-$hj('#liveSurvivorsSelection input:checked').length;
var nbEligible = $hj('#liveSurvivorsSelection input:visible:not(:checked)').length;
if (nbEligible <= nb ) {
$hj('#liveSurvivorsSelection input:visible:not(:checked)').each(function(){
$hj(this).prop('checked', true);
});
} else {
for (var i=nb; i>0; i-- ) {
var rk = Math.floor(Math.random() * nbEligible);
$hj('#liveSurvivorsSelection input:visible:not(:checked)').eq(rk).prop('checked', true);
nbEligible--;
}
}
$hj('#nbSurvivorsSelected').html($hj('#liveSurvivorsSelection input:checked').length);
});
$hj('.checkall').click(function(){
$hj(this).parent().siblings().find('input').prop('checked', $hj(this).prop('checked'));
});
$hj('#spawnSetupSelection input').click(function(){
var isChecked = $hj(this).is(':checked');
var span = $hj(this).data('href');
var node = $hj('#invasionSpanSelection');
if (isChecked && node.val().indexOf(span)==-1 ) {
node.val(node.val()+span);
} else if (!isChecked && node.val().indexOf(span)!=-1 ) {
node.val(node.val().replace(span, ''));
}
});
$hj(function() {
$hj("#sortable1, #sortable2" ).sortable({
connectWith: ".connectedSortable"
}).disableSelection();
});
/***
***
$hj('.toolEquipments ul li i').unbind().click(function() {
$hj(this).toggleClass('glyphicon-triangle-bottom').toggleClass('glyphicon-triangle-top');
});
$hj('#piocheXCards').unbind().click(function() {
$hj(this).parent().toggleClass('open');
});
* */
/*
function addActionsOnList() {
$hj('ul.chosen-results li.active-result')
.unbind()
.hover(
function() {
$hj(this).siblings().removeClass('highlighted');
$hj(this).addClass('highlighted');
},
function() {
$hj(this).removeClass('highlighted');
}
)
.click(
function() {
$hj(this).siblings().removeClass('result-selected');
$hj(this).addClass('result-selected');
var rank = $hj(this).data('option-array-index');
var libelle = $hj('#searchSkill option:nth-child('+rank+')').text();
var value = $hj('#searchSkill option:nth-child('+rank+')').val();
getSurvivorsBySkillId(value);
$hj('a.chosen-single span').html(libelle);
$hj('div.chosen-container.chosen-container-single a').parent().removeClass('chosen-with-drop chosen-container-active');
}
);
}
function fillOptionsList() {
var filter = $hj('input.chosen-search-input').val().toLowerCase();
var str = '';
var cpt = 1;
var nb = 0;
$hj('select.chosen-select option').each(function() {
var value = $hj(this).html();
if (value.toLowerCase().indexOf(filter)!=-1 ) {
var libelle = $hj('#searchSkill option:nth-child('+cpt+')').text().toLowerCase();
if (filter!='' ) {
libelle = libelle.replace(filter, '<em>'+filter+'</em>');
}
str += '<li class="active-result'+(nb==0 ? ' highlighted' : '')+'" style="" data-option-array-index="'+cpt+'">'+libelle+'</li>';
nb++;
}
cpt++;
}).promise()
.done(function() {
$hj('ul.chosen-results').html(str);
addActionsOnList();
});
}
/********
* Filtres sur les articles Missions
********
function filterMissions() {
$hj('.batchArticles article.mission').each(function(){
var nodeArticle = $hj(this);
var showArticle = true;
var cpt = 0;
$hj('.missionFilters ul').each(function(){
var tmpShow = false;
$hj(this).find('a.selected').each(function(){
var filter = $hj(this).data('filter');
if (filter == 'all' ){
tmpShow = true;
cpt++;
} else {
if (nodeArticle.hasClass(filter) ) {
tmpShow = true;
}
}
});
showArticle = (showArticle && tmpShow);
});
if (!showArticle && cpt==4 ) { showArticle = true; }
if (showArticle && !nodeArticle.is(':visible')
|| !showArticle && nodeArticle.is(':visible') ) {
nodeArticle.animate({width: 'toggle'}, 2500);
}
});
}
/********
* Filtres sur les articles Equipements
********
function filterCards() {
$hj('.batchArticles article.equipment').each(function(){
var nodeArticle = $hj(this);
var showArticle = true;
$hj('.equipmentFilters select').each(function(){
var select = $hj(this).val();
if (select!='' && !$hj(this).hasClass('zoom') ) {
if (!nodeArticle.hasClass(select) ) {
showArticle = false;
}
}
});
if (showArticle && !nodeArticle.is(':visible')
|| !showArticle && nodeArticle.is(':visible') ) {
nodeArticle.animate({width: 'toggle'}, 2500);
}
});
$hj('.batchArticles article.invasion').each(function(){
var nodeArticle = $hj(this);
var showArticle = true;
$hj('.invasionFilters select').each(function(){
var select = $hj(this).val();
if (select!='' && !$hj(this).hasClass('zoom') ) {
if (!nodeArticle.hasClass(select) ) {
showArticle = false;
}
}
});
if (showArticle && !nodeArticle.is(':visible')
|| !showArticle && nodeArticle.is(':visible') ) {
nodeArticle.animate({width: 'toggle'}, 2500);
}
});
}
/********
* Filtres sur les articles Survivants
********
function filterSurvivors() {
var typeSelected = $hj('#survivorType').val();
$hj('section.batchArticles')
.removeClass()
.addClass('batchArticles show-'+typeSelected);
$hj('.batchArticles article.survivor').each(function() {
var nodeSurvivor = $hj(this);
var showArticle = true;
var expValue = $hj(this).data('exp');
if (!$hj('.survivorFilters a[data-exp="'+expValue+'"]').hasClass('selected') ) {
showArticle = false;
}
var blueSelection = $hj('#searchBlue').val();
if (blueSelection!=0 && showArticle ) {
showArticle = false;
nodeSurvivor.find('li.compBlue').each(function() {
if ($hj(this).data('idskill') == blueSelection ) {
showArticle = true;
}
});
}
var yellowSelection = $hj('#searchYellow').val();
if (yellowSelection!=0 && showArticle ) {
showArticle = false;
nodeSurvivor.find('li.compYellow').each(function() {
if ($hj(this).data('idskill') == yellowSelection ) {
showArticle = true;
}
});
}
var orangeSelection = $hj('#searchOrange').val();
if (orangeSelection!=0 && showArticle ) {
showArticle = false;
nodeSurvivor.find('li.compOrange').each(function() {
if ($hj(this).data('idskill') == orangeSelection ) {
showArticle = true;
}
});
}
var redSelection = $hj('#searchRed').val();
if (redSelection!=0 && showArticle ) {
showArticle = false;
nodeSurvivor.find('li.compRed').each(function() {
if ($hj(this).data('idskill') == redSelection ) {
showArticle = true;
}
});
}
var allSelection = $hj('#searchAll').val();
if (allSelection!=0 && showArticle ) {
showArticle = false;
nodeSurvivor.find('li').each(function() {
if ($hj(this).data('idskill') == allSelection ) {
showArticle = true;
}
});
}
if (typeSelected=='zombivor' && !$hj(this).hasClass('zombivor')
|| (typeSelected=='usurvivor' || typeSelected=='uzombivor') && !$hj(this).hasClass('ultimate') ) {
showArticle = false;
}
if (showArticle && !nodeSurvivor.is(':visible')
|| !showArticle && nodeSurvivor.is(':visible') ) {
nodeSurvivor.animate({width: 'toggle'}, 2500);
}
});
}
function getSurvivorsBySkillId(skillId) {
var obj;
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getSurvivorsBySkillId', 'value': skillId};
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
} catch (e) {
console.log("error: "+e);
console.log(response);
}
reloadComponents(obj, 'replace');
}
);
}
function getSurvivorsByExpansionCode(exp, type='') {
var obj;
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getSurvivorsByExpansionCode', 'value': exp, 'type': type};
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
} catch (e) {
console.log("error: "+e);
console.log(response);
}
reloadComponents(obj, 'append');
if (type=='' ) {
filterSurvivors();
sortSurvivors();
} else if (type=='cartouche' ) {
$hj('#selectionSurvivors i').unbind().click(function() {
$hj(this).parent().parent().toggleClass('active');
var inpSib = $hj(this).siblings('input');
inpSib.prop("checked", !inpSib.prop("checked"))
});
}
}
);
}
/********
* Ajax Utilities
********
function reloadComponents(obj, type) {
for (var anchor in obj ) {
if ($hj('#'+anchor).length==1 ) {
switch (anchor ) {
case 'descSkill' :
case 'homeSectionArticles' :
case 'moreSurvivors' :
case 'selectionSkills' :
case 'selectionSurvivors' :
switch (type ) {
case 'append' : $hj('#'+anchor).append(obj[anchor]); break;
case 'prepend' : $hj('#'+anchor).prepend(obj[anchor]); break;
case 'replace' : $hj('#'+anchor).html(obj[anchor]); break;
}
break;
}
}
}
}
function sortSurvivors() {
$hj('#moreSurvivors article')
.sort(sortSurvivorsArticles)
.appendTo('#moreSurvivors');
}
function sortSurvivorsArticles(a, b){
return ($hj(b).data('name')) < ($hj(a).data('name')) ? 1 : -1;
}
/********
* Ajax Actions - Missions Page
********
var displayValue = $hj('#displayedRows').val();
var colsort = $hj('section th[data-colorder="_asc"]').data('colsort');
if (colsort=='' ) { colsort = $hj('section th[data-colorder="_asc"]').data('colsort'); }
var colorder = 'asc';
var paged = $hj('.pagination.justify-content-end .page-item.disabled a').data('paged');
function addPageMissionAjaxActions(clicked) {
var ajaxaction = clicked.data('ajaxaction');
var callAjax = true;
switch (ajaxaction ) {
// On change le critère de tri
case 'sort' :
colsort = clicked.data('colsort');
if (!clicked.hasClass('sorting') ) {
var actualorder = clicked.data('colorder');
if (actualorder=='_asc' ) { colorder = 'desc'; }
else { colorder = 'asc'; }
}
break;
// On change le nombre d'éléments affichés
case 'display' :
displayValue = clicked.val();
paged = 1;
break;
// On change la page affichée
case 'paged' :
paged = clicked.data('paged');
break;
case 'filter' :
if ($hj('#rowFilterMission').hasClass('hidden') ) {
callAjax = false;
$hj('#rowFilterMission').removeClass('hidden');
}
break;
default : callAjax = false; break;
}
if (callAjax ) {
var obj;
var filters = $hj('select.filters').serialize();
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getMissions', 'colsort': colsort, 'colorder': colorder, 'nbperpage': displayValue, 'paged': paged, 'filters': filters};
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
if (obj['page-missions'] != '' ) {
$hj('#page-missions').replaceWith(obj['page-missions']);
$hj('#page-missions .ajaxAction').unbind().click(function(){
addPageMissionAjaxActions($hj(this));
return false;
});
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
);
}
}
function addPageSurvivantAjaxActions(clicked) {
var ajaxaction = clicked.data('ajaxaction');
var callAjax = true;
switch (ajaxaction ) {
// On change le critère de tri
case 'sort' :
colsort = clicked.data('colsort');
if (!clicked.hasClass('sorting') ) {
var actualorder = clicked.data('colorder');
if (actualorder=='_asc' ) { colorder = 'desc'; }
else { colorder = 'asc'; }
}
break;
// On change le nombre d'éléments affichés
case 'display' :
displayValue = clicked.val();
paged = 1;
break;
// On change la page affichée
case 'paged' :
paged = clicked.data('paged');
break;
case 'filter' :
var filter = clicked.data('filter');
var top = clicked.offset().top;
var left = clicked.offset().left;
var widthCol = clicked.width();
var widthPopup = $hj('#popover'+filter.charAt(0).toUpperCase()+filter.slice(1)).width();
$hj('#popover'+filter.charAt(0).toUpperCase()+filter.slice(1))
.toggleClass('show')
.css('transform', 'translate3d('+(left+widthCol-widthPopup+34)+'px, '+(top-182)+'px, 0px)')
.find('.arrow').css('left', (widthPopup-34)+'px');
callAjax = !$hj('#popover'+filter.charAt(0).toUpperCase()+filter.slice(1)).hasClass('show');
break;
default : callAjax = false; break;
}
if (callAjax ) {
var obj;
var filters = $hj('input.filters').serialize();
var data = {'action': 'dealWithAjax', 'ajaxAction': 'getSurvivants', 'colsort': colsort, 'colorder': colorder, 'nbperpage': displayValue, 'paged': paged, 'filters': filters};
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
if (obj['page-survivants'] != '' ) {
$hj('#page-survivants').replaceWith(obj['page-survivants']);
$hj('#page-survivants .ajaxAction').unbind().click(function(){
addPageSurvivantAjaxActions($hj(this));
});
$hj('#page-survivants .changeProfile').unbind().click(function(){
addPageSurvivantLocalActions($hj(this));
return false;
});
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
);
}
}
function doEquipmentDeckActions(data, type) {
var obj;
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
if (type == 'reload' ) {
location.href = 'http://zombicide.jhugues.fr/page-live-pioche-equipment/';
}
for (var anchor in obj ) {
if (type == 'insert' ) {
$hj('#'+anchor).html(obj[anchor]);
}
}
$hj('.discardEquipButton').unbind().click(function(){
var data = {'action': 'dealWithAjax', 'ajaxAction': 'discardEquippedCard', 'keyAccess': $hj(this).data('keyaccess'), 'id': $hj(this).data('id')};
doEquipmentDeckActions(data, 'insert');
});
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
);
}
function doSpawnDeckActions(data, type) {
var obj;
$hj.post(
ajaxurl,
data,
function(response) {
try {
obj = JSON.parse(response);
if (type == 'reload' ) {
location.href = 'http://zombicide.jhugues.fr/page-live-pioche-invasion/';
}
for (var anchor in obj ) {
if (type == 'insert' ) {
$hj('#'+anchor).html(obj[anchor]);
}
}
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
);
}
function addGenKeyActions() {
$hj('#genKey').click(function(e){
e.preventDefault();
var n = 16;
var str = 'azertyupqsdfghjkmwxcvbnAZERTYUPQSDFGHJKMWXCVBN23456789';
var max = str.length;
var password = '';
for (var i=1; i<=n; i++ ) {
var start = Math.floor(Math.random()*max);
password += str.substring(start, start+1);
}
$hj('#keyAccess').val(password);
});
}
function addPageLiveEquipmentActions() {
addGenKeyActions();
$hj('#equipmentSetupSelection .btn-expansion').click(function(){
$hj(this).toggleClass('active');
$hj(this).find('svg').toggleClass('fa-square fa-check-square');
var expansionIds = '';
$hj('.btn-expansion.active').each(function(){
if (expansionIds!='') {
expansionIds+=',';
}
expansionIds += $hj(this).data('expansion-id');
});
var data = {'action': 'dealWithAjax', 'ajaxAction': 'pregenEquipmentCard', 'expansionIds': expansionIds};
doEquipmentDeckActions(data, 'insert');
});
if ($hj('#btnDrawEquipmentCard').length!=0 ) {
$hj('.withEquipmentAction').unbind().click(function(){
var action = $hj(this).data('action');
var keyAccess = $hj(this).data('keyaccess');
var data = {'action': 'dealWithAjax', 'ajaxAction': 'EquipmentDeck' , 'ajaxChildAction': action, 'keyAccess': keyAccess};
doEquipmentDeckActions(data, $hj(this).data('type'));
});
}
}
function addPageLiveSpawnActions() {
addGenKeyActions();
$hj('#spawnSetupSelection .btn-expansion span').click(function(){
$hj(this).find('svg').toggleClass('fa-square fa-check-square');
var isChecked = $hj(this).find('svg').hasClass('fa-check-square');
var span = $hj(this).data('spawnspan');
var node = $hj('#invasionSpanSelection');
if (isChecked && node.val().indexOf(span)==-1 ) {
node.val(node.val()+span);
} else if (!isChecked && node.val().indexOf(span)!=-1 ) {
node.val(node.val().replace(span, ''));
}
});
if ($hj('#btnDrawSpawnCard').length!=0 ) {
$hj('.withSpawnAction').unbind().click(function(){
var action = $hj(this).data('action');
var keyAccess = $hj(this).data('keyaccess');
var data = {'action': 'dealWithAjax', 'ajaxAction': 'SpawnDeck' , 'ajaxChildAction': action, 'keyAccess': keyAccess};
doSpawnDeckActions(data, $hj(this).data('type'));
});
}
}
function joinGame() {
var data = {'action': 'dealWithAjax', 'ajaxAction': 'joinGame', 'keyAccess': $hj('#keyAccess').val()};
$hj.post(
ajaxurl,
data,
function(response) {
try {
var obj = JSON.parse(response);
console.log(obj);
} catch (e) {
console.log("error: "+e);
console.log(response);
}
}
);
}
*/
<file_sep>/core/actions/TileActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* TileActions
* @author Hugues
* @since 1.08.30
*/
class TileActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct($post=array())
{
parent::__construct();
$this->post = $post;
$this->TileServices = new TileServices();
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new TileActions($post);
if ($post[self::CST_AJAXACTION]==self::AJAX_GETTILES) {
$returned = $Act->dealWithGetTiles(true);
} else {
$returned = 'Erreur dans TileActions > dealWithStatic, '.$_POST[self::CST_AJAXACTION].' inconnu.';
}
return $returned;
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Bloc de gestion de la récupération des dalles selon une extension
public function dealWithGetTiles()
{
//////////////////////////////////////////////////////////////////////
// Initialisation des variables
$expansionId = $this->post['idSet'];
$Tiles = $this->TileServices->getTilesWithFilters(array(self::FIELD_EXPANSIONID=>$expansionId), self::FIELD_CODE, self::ORDER_ASC);
//////////////////////////////////////////////////////////////////////
// On parcourt la liste des Tiles pour les afficher.
$result = '';
while (!empty($Tiles)) {
$Tile = array_shift($Tiles);
$result .= '<div class="card"><img class="card-img-top" src="'.$Tile->getImgUrl().'"/></div>';
}
$result = '<div id="tile-container"><div class="card-columns">'.$result.'</div></div>';
return $this->jsonString($result, 'tile-container', true);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
}
<file_sep>/core/actions/AjaxActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* AjaxActions
* @author Hugues
* @since 1.04.00
* @version 1.04.27
*/
class AjaxActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct()
{}
/**
* Gère les actions Ajax
* @version 1.04.30
* @version 1.07.21
*/
public static function dealWithAjax()
{
switch ($_POST[self::CST_AJAXACTION]) {
case self::AJAX_GETEXPANSIONS :
case self::AJAX_EXPANSIONVERIF :
$returned = ExpansionActions::dealWithStatic($_POST);
break;
case self::AJAX_ADDMORENEWS :
$returned = HomePageActions::dealWithStatic($_POST);
break;
case self::AJAX_GETMISSIONS :
case self::AJAX_MISSIONVERIF :
$returned = MissionActions::dealWithStatic($_POST);
break;
case self::AJAX_GETSKILLS :
case self::AJAX_SKILLVERIF :
$returned = SkillActions::dealWithStatic($_POST);
break;
case self::AJAX_GETRANDOMTEAM :
case self::AJAX_GETSURVIVORS :
case self::AJAX_SURVIVORVERIF :
$returned = SurvivorActions::dealWithStatic($_POST);
break;
case self::AJAX_GETRANDOMMAP :
case self::AJAX_GETTHROWDICE :
case 'getBuildingMap' :
case 'getNonUsedTiles' :
case 'getEmptyCell' :
case 'getImageMap' :
$returned = ToolActions::dealWithStatic($_POST);
break;
case self::AJAX_GETTILES :
$returned = TileActions::dealWithStatic($_POST);
break;
case 'updateLiveMission' :
$returned = LiveMissionActions::dealWithStatic($_POST);
break;
default :
$returned = 'Erreur dans le $_POST['.self::CST_AJAXACTION.'] : '.$_POST[self::CST_AJAXACTION].'<br>';
break;
}
return $returned;
}
}
<file_sep>/core/domain/WeaponProfile.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WeaponProfile
* @author Hugues.
* @version 1.0.00
* @since 1.0.00
*/
class WeaponProfile extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Quelle portée minimale
* @var int $minRange
*/
protected $minRange;
/**
* Quelle portée maximale
* @var int $maxRange
*/
protected $maxRange;
/**
* Combien de dés
* @var int $nbDice
*/
protected $nbDice;
/**
* Quel seuil de réussite
* @var int $successRate
*/
protected $successRate;
/**
* Combien de dégâts
* @var int $damageLevel
*/
protected $damageLevel;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return int
*/
public function getMinRange()
{ return $this->minRange; }
/**
* @return int
*/
public function getMaxRange()
{ return $this->maxRange; }
/**
* @return int
*/
public function getNbDice()
{ return $this->nbDice; }
/**
* @return int
*/
public function getSuccessRate()
{ return $this->successRate; }
/**
* @return int
*/
public function getDamageLevel()
{ return $this->damageLevel; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id = $id; }
/**
* @param int $minRange
*/
public function setMinRange($minRange)
{ $this->minRange = $minRange; }
/**
* @param int $maxRange
*/
public function setMaxRange($maxRange)
{ $this->maxRange = $maxRange; }
/**
* @param int $nbDice
*/
public function setNbDice($nbDice)
{ $this->nbDice = $nbDice; }
/**
* @param int $successRate
*/
public function setSuccessRate($successRate)
{ $this->successRate = $successRate; }
/**
* @param int $damageLevel
*/
public function setDamageLevel($damageLevel)
{ $this->damageLevel = $damageLevel; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('WeaponProfile'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return WeaponProfile
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new WeaponProfile(), self::getClassVars(), $row); }
/**
* @return WeaponProfileBean
*/
public function getBean()
{ return new WeaponProfileBean($this); }
}
<file_sep>/core/bean/SpawnBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SpawnBean
* @author Hugues
* @since 1.02.00
* @version 1.04.26
*/
class SpawnBean extends LocalBean
{
/**
* Class Constructor
* @param Spawn $Spawn
*/
public function __construct($Spawn=null)
{
parent::__construct();
$this->Spawn = ($Spawn==null ? new Spawn() : $Spawn);
}
public function displayCard()
{
$attrImg = array(
self::ATTR_WIDTH => 320,
self::ATTR_HEIGHT => 440,
self::ATTR_SRC => $this->Spawn->getImgUrl(),
self::ATTR_ALT => '#'.$this->Spawn->getSpawnNumber(),
);
$strImg = $this->getBalise(self::TAG_IMG, '', $attrImg);
return $this->getBalise(self::TAG_DIV, $strImg, array(self::ATTR_CLASS => 'card spawn set-'.$this->Spawn->getExpansionId()));
}
}
<file_sep>/admin_manage.php
<?php
/**
* @author Hugues
* @since 1.00.00
* @version 1.07.25
*/
define('ZOMB_SITE_URL', 'http://zombicide.jhugues.fr/');
define('PLUGINS_MYCOMMON', ZOMB_SITE_URL.'wp-content/plugins/mycommon');
define('PLUGINS_ZOMBICIDE', ZOMB_SITE_URL.'wp-content/plugins/hj-zombicide');
?>
<link rel="stylesheet" href="<?php echo PLUGINS_MYCOMMON; ?>/web/rsc/css/jquery-ui.min.css" type="text/css" media="all" />
<link rel="stylesheet" href="<?php echo PLUGINS_ZOMBICIDE; ?>/web/rsc/admin_zombicide.css" type="text/css" media="all" />
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
global $Zombisite;
if (empty($Zombisite)) {
$Zombisite = new Zombisite();
}
$AdminPageBean = new AdminPageBean();
echo $AdminPageBean->getContentPage();
?>
<script type='text/javascript' src='<?php echo PLUGINS_MYCOMMON; ?>/web/rsc/js/jquery-ui-min.js'></script>
<script type='text/javascript' src='<?php echo PLUGINS_ZOMBICIDE; ?>/web/rsc/admin_zombicide.js'></script>
<file_sep>/core/bean/AdminPageMissionsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* AdminPageMissionsBean
* @author Hugues
* @since 1.05.10
* @version 1.07.25
*/
class AdminPageMissionsBean extends AdminPageBean
{
protected $tplHomeCheckCard = 'web/pages/admin/fragments/home-check-card.php';
protected $urlMissionListing = 'web/pages/admin/mission-listing.php';
protected $urlAdminEdit = 'web/pages/admin/mission-edit.php';
/**
* Class Constructor
*/
public function __construct($urlParams='')
{
$this->urlParams = $urlParams;
parent::__construct(self::CST_MISSION);
$this->title = 'Missions';
$this->MissionServices = new MissionServices();
}
/**
* @param array $urlParams
* @return $Bean
*/
public function getSpecificContentPage()
{
if (isset($this->urlParams[self::FIELD_ID])) {
$this->Mission = $this->MissionServices->selectMission($this->urlParams[self::FIELD_ID]);
}
if (isset($_POST)&&!empty($_POST)) {
$this->dealWithPost();
}
switch ($this->urlParams[self::CST_POSTACTION]) {
case 'confirmEdit' :
case self::CST_EDIT :
return $this->getEditContentPage();
break;
default :
return $this->getListContentPage();
break;
}
}
private function dealWithPost()
{
if ($this->urlParams[self::CST_POSTACTION]=='confirmEdit') {
$this->Mission->setWidth($this->urlParams[self::FIELD_WIDTH]);
$this->Mission->setHeight($this->urlParams[self::FIELD_HEIGHT]);
$this->MissionServices->updateMission($this->Mission);
}
}
public function getListContentPage()
{
$strRows = '';
$nbPerPage = 15;
$curPage = $this->initVar(self::WP_CURPAGE, 1);
$orderby = $this->initVar(self::WP_ORDERBY, self::FIELD_TITLE);
$order = $this->initVar(self::WP_ORDER, self::ORDER_ASC);
$filters = array();
if (isset($this->urlParams[self::FIELD_ORIGINEID])) {
$filters[self::FIELD_ORIGINEID] = $this->urlParams[self::FIELD_ORIGINEID];
}
$Missions = $this->MissionServices->getMissionsWithFilters($filters, $orderby, $order);
$nbElements = count($Missions);
$nbPages = ceil($nbElements/$nbPerPage);
$curPage = max(1, min($curPage, $nbPages));
$DisplayedMissions = array_slice($Missions, ($curPage-1)*$nbPerPage, $nbPerPage);
if (!empty($DisplayedMissions)) {
foreach ($DisplayedMissions as $Mission) {
$MissionBean = new MissionBean($Mission);
$strRows .= $MissionBean->getRowForAdminPage();
}
}
$queryArg = array(
self::CST_ONGLET => self::CST_MISSION,
self::WP_ORDERBY => $orderby,
self::WP_ORDER => $order
);
// Pagination
$strPagination = $this->getPagination($queryArg, $post_status, $curPage, $nbPages, $nbElements);
$args = array(
// Liste des missions affichées - 1
$strRows,
// Filtres - 2
'',
// Url pour créer une nouvelle Mission - 3
'/wp-admin/post-new.php',
// Subs - 4
'',
// Pagination - 5
$strPagination,
// Filtre Extensions - 6
OrigineBean::getStaticSelect(self::FIELD_ORIGINEID, $this->urlParams[self::FIELD_ORIGINEID]),
);
return $this->getRender($this->urlMissionListing, $args);
}
public function getEditContentPage()
{
//////////////////////////////////////////////////////////////////////////
// On enrichit le template
$args = array(
// L'identifiant de la mission - 1
$this->Mission->getId(),
// Le code de la Mission - 2
$this->Mission->getCode(),
// Le titre de la Mission - 3
$this->Mission->getTitle(),
// Le synopsis de la Mission - 4
$this->Mission->getWpPost()->getPostContent(),
// La difficulté de la Mission - 5
$this->Mission->getStrDifficulty(),
// Le nombre de Survivants de la Mission - 6
$this->Mission->getStrNbJoueurs(),
// La durée de la Mission - 7
$this->Mission->getStrDuree(),
// L'origine de la Mission - 8
$this->Mission->getStrOrigine(),
// La liste des extensions utilisées - 9
implode(', ', unserialize($this->Mission->getWpPost()->getPostMeta('expansionIds'))),
// La liste des dalles utilisées - 10
$this->Mission->getWpPost()->getPostMeta('tileIds'),
// Url de l'image de la map - 11
$this->Mission->getImgUrl(),
// Largeur de la Map - 12
$this->Mission->getWidth(),
// Hauteur de la Map - 13
$this->Mission->getHeight(),
// Liste des Objectifs - 14
'',
// Liste des Règles Spéciales - 15
'',
'', '', '', '', '', '', '', '', '', '', '',
);
// Puis on le restitue.
return $this->getRender($this->urlAdminEdit, $args);
}
/**
* @return string
*/
public function getCheckCard()
{
/////////////////////////////////////////////////
// Gestion des Missions.
// On récupère la liste des Missions qui ont un Article. Puis les données dans la base. On compare et on effectue un diagnostic.
$Act = new MissionActions();
$strBilan = $Act->dealWithMissionVerif();
$args = array(
// Le titre de la carte - 1
$this->title,
// L'id du container de retour pour afficher les vérifications - 2
self::CST_MISSION,
// Le contenu du container de vérification - 3
$strBilan,
);
return $this->getRender($this->tplHomeCheckCard, $args);
}
}
<file_sep>/core/services/SkillServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SkillServices
* @author Hugues.
* @since 1.00.00
* @version 1.05.06
*/
class SkillServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var SkillDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new SkillDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_CODE));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayWideFilter($arrFilters, self::FIELD_NAME));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayWideFilter($arrFilters, self::FIELD_DESCRIPTION));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_EXPANSIONID));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getSkillsWithFilters($arrFilters=array(), $orderby=self::FIELD_NAME, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getSkillsWithFiltersIn($arrFilters=array(), $orderby=self::FIELD_NAME, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFiltersIn($this->arrParams, $arrFilters);
}
public function insertSkill($Skill)
{ return $this->insert(__FILE__, __LINE__, $Skill); }
public function selectSkill($id)
{ return $this->select(__FILE__, __LINE__, $id); }
public function updateSkill($Skill)
{ return $this->update(__FILE__, __LINE__, $Skill); }
}
<file_sep>/core/domain/Rule.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Rule
* @author Hugues.
* @since 1.04.08
* @version 1.04.08
*/
class Rule extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Eventuel Settings
* @var bool $setting
*/
protected $setting;
/**
* Code de la Règle
* @var string $code
*/
protected $code;
/**
* Description de la Règle
* @var string $description
*/
protected $description;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return int
*/
public function getSetting()
{ return $this->setting; }
/**
* @return string
*/
public function getCode()
{ return $this->code; }
/**
* @return string
*/
public function getDescription()
{ return $this->description; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $setting
*/
public function setSetting($setting)
{ $this->setting=$setting; }
/**
* @param string $code
*/
public function setCode($code)
{ $this->code=$code; }
/**
* @param string $description
*/
public function setDescription($description)
{ $this->description=$description; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Rule'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Rule
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Rule(), self::getClassVars(), $row); }
/**
* @return RuleBean
*/
public function getBean()
{ return new RuleBean($this); }
}
<file_sep>/core/services/MissionServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionServices
* @author Hugues.
* @since 1.00.00
* @version 1.05.10
*/
class MissionServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var MissionDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new MissionDaoImpl();
$this->MissionExpansionServices = new MissionExpansionServices();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayWideFilter($arrFilters, self::FIELD_TITLE));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_CODE));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_LEVELID));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_DURATIONID));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_PLAYERID));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_ORIGINEID));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_PUBLISHED));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_LIVEABLE));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_EXPANSIONID));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getMissionsWithFilters($arrFilters=array(), $orderby=self::FIELD_TITLE, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
if ($arrFilters[self::FIELD_EXPANSIONID]) {
return $this->Dao->selectEntriesWithFiltersIn(__FILE__, __LINE__, $this->arrParams, $arrFilters);
} else {
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getMissionsWithFiltersIn($arrFilters=array(), $orderby=self::FIELD_TITLE, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
return $this->Dao->selectEntriesWithFiltersIn(__FILE__, __LINE__, $this->arrParams, $arrFilters);
}
public function getMissionsByExpansionId($expansionId)
{
$Missions = array();
$MissionExpansions = $this->MissionExpansionServices->getMissionExpansionsWithFilters(array(self::FIELD_EXPANSIONID=>$expansionId));
while (!empty($MissionExpansions)) {
$MissionExpansion = array_shift($MissionExpansions);
array_push($Missions, $this->selectMission($MissionExpansion->getMissionId()));
}
return $Missions;
}
/**
* @param string $file
* @param string $line
* @param string $value
* @param string $prefix
* @return string
*/
public function getDifficultySelect($file, $line, $value='', $prefix='')
{
$arrDifficulties = array('TUTO'=>'Tutoriel', 'EASY'=>'Facile', 'MED'=>'Moyenne', 'HARD'=>'Difficile',
'VHARD'=>'Très Difficile', 'PVP'=>'Compétitive', 'BLUE'=>'Bleue', 'YELLOW'=>'Jaune', 'ORANGE'=>'Orange', 'RED'=>'Rouge');
$arrSetValues = $this->getSetValues($file, $line, 'difficulty', false);
$arrSetLabels = array();
foreach ($arrSetValues as $setValue) {
$arrSetLabels[$setValue] = $arrDifficulties[$setValue];
}
$this->labelDefault = 'Difficultés';
return $this->getSetSelect($file, $line, $arrSetLabels, $prefix.'difficulty', $value);
}
/**
* @param string $file
* @param string $line
* @param string $field
* @param string $isSet
* @return array
*/
public function getSetValues($file, $line, $field, $isSet=true)
{ return $this->Dao->getSetValues($file, $line, $field, $isSet); }
/**
* @param string $file
* @param string $line
* @param string $value
* @param string $prefix
* @return string
*/
public function getNbPlayersSelect($file, $line, $value='', $prefix='')
{
$arrSetValues = $this->getSetValues($file, $line, 'nbPlayers', false);
$arrSetLabels = array();
foreach ($arrSetValues as $setValue) {
if (strpos($setValue, '+')!==false) {
$arrSetLabels[$setValue] = $setValue[0].' Survivants et +';
} else {
list($min, $max) = explode('-', $setValue);
$arrSetLabels[$setValue] = $min.' à '.$max.' Survivants';
}
}
$this->labelDefault = 'Survivants';
return $this->getSetSelect($file, $line, $arrSetLabels, $prefix.'nbPlayers', $value);
}
/**
* @param string $file
* @param string $line
* @param string $field
* @return array
*/
public function getDistinctValues($file, $line, $field)
{ return $this->Dao->getDistinctValues($file, $line, $field); }
/**
* @param string $file
* @param string $line
* @param string $value
* @param string $prefix
* @return string
*/
public function getDimensionsSelect($file, $line, $value='', $prefix='')
{
$arrParams = $this->buildOrderAndLimit(array('width', 'height'), array('ASC', 'ASC'));
$arrSetValues = $this->Dao->selectDistinctDimensions($file, $line, $arrParams);
$arrSetLabels = array();
foreach ($arrSetValues as $setValue) {
$arrSetLabels[$setValue->label] = $setValue->label;
}
$this->labelDefault = 'Dimensions';
return $this->getSetSelect($file, $line, $arrSetLabels, $prefix.'dimension', $value);
}
/**
* @param string $file
* @param string $line
* @param string $value
* @param string $prefix
* @return string
*/
public function getDurationSelect($file, $line, $value='', $prefix='')
{
$arrSetValues = $this->getDistinctValues($file, $line, 'duration');
$arrSetLabels = array();
foreach ($arrSetValues as $setValue) {
$arrSetLabels[$setValue] = $setValue.' minutes';
}
$this->labelDefault = 'Durées';
return $this->getSetSelect($file, $line, $arrSetLabels, $prefix.'duration', $value);
}
/**
* @param int $width
* @return string
*/
public function getWidthSelect($width)
{
$widthSelect = '<select name="width">';
$widthSelect .= '<option value="0">0</option>';
for ($i=1; $i<=6; $i++) {
$widthSelect .= '<option value="'.$i.'"'.($width==$i?' selected="selected"':'').'>'.$i.'</option>';
}
return $widthSelect.'</select>';
}
/**
* @param int $height
*/
public function getHeightSelect($height)
{
$heightSelect = '<select name="height">';
$heightSelect .= '<option value="0">0</option>';
for ($i=1; $i<=6; $i++) {
$heightSelect .= '<option value="'.$i.'"'.($height==$i?' selected="selected"':'').'>'.$i.'</option>';
}
return $heightSelect.'</select>';
}
private function addLiveZombie(&$LiveZombies, $Live, $missionZoneId, $zombieTypeId, $zombieCategoryId, $quantity)
{
$args = array(
'liveId'=>$Live->getId(),
'missionZoneId'=>$missionZoneId,
'zombieTypeId'=>$zombieTypeId,
'zombieCategoryId'=>$zombieCategoryId,
'quantity'=>$quantity,
);
array_push($LiveZombies, new LiveZombie($args));
}
/**
* @param Live $Live
* @param Mission $Mission
* @return array
*/
public function getStartingZombies($Live, $Mission)
{
$LiveZombies = array();
if ($Mission->hasRule(11)) {
switch ($Mission->getId()) {
case 1 :
$this->addLiveZombie($LiveZombies, $Live, 4, 1, 1, 1);
$this->addLiveZombie($LiveZombies, $Live, 12, 1, 1, 1);
break;
case 8 :
$arrIds = array(1, 2, 3, 4, 6, 7, 8, 16, 17, 18, 19, 21, 22, 23, 24);
while (!empty($arrIds)) {
$id = array_shift($arrIds);
$this->addLiveZombie($LiveZombies, $Live, $id, 1, 1, 1);
}
break;
default :
// Une Mission a des Zombies à mettre en place...
break;
}
}
return $LiveZombies;
}
/**
* @param Mission $Mission
* @return array
*/
public function getStartingEquipmentDeck($Mission)
{
$arrEE = array();
// On récupère les Extensions rattachées à la Mission.
$MissionExpansions = $Mission->getMissionExpansions();
while (!empty($MissionExpansions)) {
$MissionExpansion = array_shift($MissionExpansions);
// On récupère les Equipements rattachés aux Extensions
$EquipmentExpansions = $MissionExpansion->getEquipmentExpansions();
while (!empty($EquipmentExpansions)) {
$EquipmentExpansion = array_shift($EquipmentExpansions);
$EquipmentCard = $EquipmentExpansion->getEquipment();
// On ne doit pas prendre les cartes suivantes :
// Starter / Pimp / TODO : gérer les cartes comme le Molotov, la Batte Cloutée...
if ($EquipmentCard->isStarter() || $EquipmentCard->isPimp()) {
continue;
}
// On ajoute autant de fois la carte que requis.
for ($i=0; $i<$EquipmentExpansion->getQuantity(); $i++) {
array_push($arrEE, $EquipmentExpansion->getId());
}
}
}
shuffle($arrEE);
// Certaines règles peuvent demander un traitement spécifique pour certaines cartes.
if ($Mission->hasRule(2)) {
// On rajoute le Pistolet, le Pied-de-biche et la Hache Starters en début de pioche.
$arrAdd = array(13, 23, 25);
shuffle($arrAdd);
$arrEE = array_merge($arrAdd, $arrEE);
}
return $arrEE;
}
/**
* @param Mission $Mission
* @return array
*/
public function getSpawnDeck($Mission)
{
// Certaines règles peuvent demander un traitement spécifique pour certaines cartes.
if ($Mission->hasRule(1)) {
// On ne joue qu'avec les cartes #1, #2, #3, #4 et #41.
$arrNumbers = array(1, 2, 3, 4, 41);
shuffle($arrNumbers);
}
return $arrNumbers;
}
public function selectMission($missionId)
{ return $this->select(__FILE__, __LINE__, $missionId); }
public function updateMission($Mission)
{ return $this->update(__FILE__, __LINE__, $Mission); }
public function insertMission($Mission)
{ return $this->insert(__FILE__, __LINE__, $Mission); }
}
<file_sep>/core/bean/WpPageHomeBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageHomeBean
* @author Hugues
* @since 1.04.00
* @version 1.05.09
*/
class WpPageHomeBean extends WpPageBean
{
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->MissionServices = new MissionServices();
}
/**
* {@inheritDoc}
* @see MainPageBean::getContentPage()
*/
public function getContentPage()
{
$strContent = '<section id="homeSectionArticles" class="batchArticles missions survivors show-survivor">';
$strContent .= $this->addMoreNews();
$strContent .= '</section>';
$strContent .= '<section class="col-xs-4 col-xs-offset-4">';
$strContent .= '<div class="text-center"><div id="more_news" class="special_buttons">';
$strContent .= ($this->lang=='en' ? 'More news' : 'Plus de news');
$strContent .= '</div></div>';
$strContent .= '</section>';
return $strContent.'<div class="clearfix"></div>';
}
/**
* @param number $offset
* @return string
*/
public static function staticAddMoreNews($offset=0)
{
$Bean = new WpPageHomeBean();
return $Bean->addMoreNews($offset, true, false);
}
/**
* @param number $offset
* @param string $isAjax
* @return string
* return $Bean->addMoreNews($offset, true, false);
}
/**
* @param number $offset
* @param string $isAjax
* @return string
*
public function addMoreNews($offset=0, $isAjax=false, $getSticky=true)
{
// TODO :
// Si getSticky vaut true, on cherche le sticky post publié le plus récent.
// Si on a bien un article, on met à jour posts_per_page à 5.
// Si offset ne vaut pas 0, on le réduit de 1
*/
public function addMoreNews($offset=0, $isAjax=false, $getSticky=true)
{
$nbPostPerPage = 6;
$args = array(
self::WP_ORDERBY => 'post_date',
self::WP_ORDER => self::ORDER_DESC,
self::WP_OFFSET =>0,
);
if ($getSticky) {
// Si on veut le Sticky, on en veut un
$args[self::WP_POSTSPERPAGE] = 1;
// Et donc on voudra un article de moins ensuite
$nbPostPerPage--;
// On veut le sticky évidemment
$args['post__in'] = get_option( 'sticky_posts' );
// On initialise TaxQuery pour prendre n'importe quel article
$args[self::WP_TAXQUERY] = array();
// Et on veut qu'il soit publié.
$args[self::WP_POSTSTATUS] = self::WP_PUBLISH;
// Et go !
$WpStickyPosts = $this->WpPostServices->getArticles($args);
} else {
$WpStickyPosts = array();
// Si on veut pas de sticky, c'est qu'on est en train de paginer. On fait -1 pour compenser le sticky sur la première page.
$offset--;
}
// On récupère maintenant les Articles à afficher.
$postStatus = ($this->isAdmin() ? ', private, future' : '');
$args[self::WP_OFFSET] = $offset;
$args[self::WP_POSTSPERPAGE] = $nbPostPerPage;
$args[self::WP_POSTSTATUS] = self::WP_PUBLISH.$postStatus;
$args[self::WP_TAXQUERY] = array(array(
self::WP_TAXONOMY=>self::WP_POSTTAG,
self::WP_FIELD=>self::WP_SLUG,
self::WP_TERMS=>array('mission', 'survivant')
));
$args['post__in'] = '';
$WpPosts = $this->WpPostServices->getArticles($args);
// On merge avec les Sticky
$WpPosts = array_merge($WpStickyPosts, $WpPosts);
$strContent = '';
while (!empty($WpPosts)) {
$WpPost = array_shift($WpPosts);
$WpBean = $WpPost->getBean();
$strContent .= $WpBean->displayWpPost(true);
}
$strContent .= '<div class="clearfix"></div>';
return ($isAjax ? '{"homeSectionArticles":'.json_encode($strContent).'}' : $strContent);
}
}
<file_sep>/core/bean/WpPostBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* WpPostBean
* @author Hugues
* @since 1.04.00
* @version 1.05.01
*/
class WpPostBean extends MainPageBean
{
/**
* WpPost affiché
* @var WpPost $WpPost
*/
protected $WpPost;
/**
* @param string $post
* @param array $services
*/
public function __construct($post='', $services=array())
{
if ($post=='') {
$post = get_post();
}
if (get_class($post) == 'WpPost') {
$this->WpPost = $post;
} else {
$this->WpPost = WpPost::convertElement($post);
}
parent::__construct($services);
}
/**
* @return Bean
*/
public function getBean()
{
$this->WpCategs = $this->WpPost->getCategories();
if (self::isAdmin()) {
print_r($this->WpCategs);
}
if (!empty($this->WpCategs)) {
$this->WpCateg = array_shift($this->WpCategs);
switch ($this->WpCateg->getCatID()) {
case self::WP_CAT_EXPANSION_ID :
$Bean = new WpPostExpansionBean($this->WpPost);
break;
case self::WP_CAT_MISSION_ID :
$Bean = new WpPostMissionBean($this->WpPost);
break;
case self::WP_CAT_NEWS_ID :
$Bean = new WpPostNewsBean($this->WpPost);
break;
case self::WP_CAT_SKILL_ID :
$Bean = new WpPostSkillBean($this->WpPost);
break;
case self::WP_CAT_SURVIVOR_ID :
$Bean = new WpPostSurvivorBean($this->WpPost);
break;
default :
$Bean = new WpPageError404Bean();
break;
}
} else {
$Bean = new WpPageError404Bean();
}
return $Bean;
}
/**
* @return string
*/
public function getShellClass()
{ return ''; }
}
<file_sep>/core/domain/MissionObjective.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionObjective
* @author Hugues.
* @since 1.04.08
* @version 1.04.28
*/
class MissionObjective extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Id technique de la Mission
* @var int $missionId
*/
protected $missionId;
/**
* Id technique de l'Objectif
* @var int $objectiveId
*/
protected $objectiveId;
/**
* titre de la règle
* @var string $title
*/
protected $title;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @return int
*/
public function getMissionId()
{ return $this->missionId; }
/**
* @return int
*/
public function getObjectiveId()
{ return $this->objectiveId; }
/**
* @return string
*/
public function getTitle()
{ return $this->title; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id = $id; }
/**
* @param int $missionId
*/
public function setMissionId($missionId)
{ $this->missionId = $missionId; }
/**
* @param int $objectiveId
*/
public function setObjectiveId($objectiveId)
{ $this->objectiveId = $objectiveId; }
/**
* @param string $title
*/
public function setTitle($title)
{ $this->title = $title; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('MissionObjective'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return MissionRule
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new MissionObjective(), self::getClassVars(), $row); }
}
<file_sep>/core/bean/ExpansionBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe ExpansionBean
* @author Hugues
* @since 1.04.24
* @version 1.08.01
*/
class ExpansionBean extends LocalBean
{
protected $urlRowAdmin = 'web/pages/admin/fragments/expansion-row.php';
protected $urlRowPublic = 'web/pages/public/fragments/expansion-row.php';
/**
* @param Expansion $Expansion
*/
public function __construct($Expansion=null)
{
parent::__construct();
$this->Expansion = ($Expansion==null ? new Expansion() : $Expansion);
$this->ExpansionServices = new ExpansionServices();
$this->EquipmentExpansionServices = new EquipmentExpansionServices();
$this->SpawnServices = new SpawnServices();
}
//////////////////////////////////////////////////////////////////////////
// Différentes modes de présentation
/**
* @return string
*/
public function getRowForAdminPage()
{
///////////////////////////////////////////////////////////////////////////
// On enrichit le template
$args = array(
// L'identifiant de l'extension - 1
$this->Expansion->getId(),
// Le code de l'extension - 2
$this->Expansion->getCode(),
// L'url d'édition du WpPost - 3
$this->Expansion->getWpPostEditUrl(),
// L'url publique de l'extension - 4
$this->Expansion->getWpPostUrl(),
// Son nom - 5
$this->Expansion->getName(),
// Son rang d'affichage - 6
$this->Expansion->getDisplayRank(),
// Le nombre de Survivants - 7
$this->Expansion->getNbSurvivants(),
// Le nombre de Missions - 8
$this->Expansion->getNbMissions(),
// Est une Mission officielle - 9
($this->Expansion->isOfficial() ? 'Oui' : 'Non'),
// Lien de détail de l'extension - 10
$this->Expansion->getEditUrl(self::CST_EXPANSION),
);
///////////////////////////////////////////////////////////////
// Puis on le retourne
return $this->getRender($this->urlRowAdmin, $args);
}
/**
* @return string
*/
public function getRowForPublicPage()
{
///////////////////////////////////////////////////////////////
// On enrichi le template et on le retourne.
$args = array(
// Front Url de la Compétence - 1
$this->Expansion->getWpPostUrl(),
// Nom de la Compétence - 2
$this->Expansion->getName(),
// Identifiant de la Compétence - 3
$this->Expansion->getId(),
// Nb de Survivants / Dalles / Missions - 4
$this->getExpansionDetails(),
// Détails des Zombies - 5
$this->getZombiesDetails(),
// Cartes Equipement et Spawn - 6
$this->getCardsDetails(),
// Officiel ou non - 7
($this->Expansion->isOfficial() ? 'Officielle' : 'Custom'),
);
return $this->getRender($this->urlRowPublic, $args);
}
public function getCardsDetails()
{
$arr = array();
$EquipmentCards = $this->EquipmentExpansionServices->getEquipmentExpansionsWithFilters(array(self::FIELD_EXPANSIONID=>$this->Expansion->getId()));
if (!empty($EquipmentCards)) {
$sum = 0;
while (!empty($EquipmentCards)) {
$EquipmentCard = array_shift($EquipmentCards);
$sum += $EquipmentCard->getQuantity();
}
array_push($arr, $sum.' Cartes Équipement');
}
$SpawnCards = $this->SpawnServices->getSpawnsWithFilters(array(self::FIELD_EXPANSIONID=>$this->Expansion->getId()), self::FIELD_SPAWNNUMBER);
if (!empty($SpawnCards)) {
$First = array_shift($SpawnCards);
$Last = array_pop($SpawnCards);
array_push($arr, 'Cartes Spawns : #'.str_pad($First->getSpawnNumber(), 3, '0', STR_PAD_LEFT).' à #'.str_pad($Last->getSpawnNumber(), 3, '0', STR_PAD_LEFT));
}
return implode('<br>', $arr);
}
private function getZombiesDetails()
{ return 'WIP'; }
public function getExpansionDetails()
{
$arr = array();
/////////////////////////////////////////////////////////////////////////////
// On affiche le nombre de Survivants si nécessaire
if ($this->Expansion->getNbSurvivants()!=0) {
array_push($arr, $this->Expansion->getNbSurvivants().' Survivants');
}
/////////////////////////////////////////////////////////////////////////////
// On affiche le nombre de Dalles si nécessaire
if (self::isAdmin()) {
$Tiles = $this->Expansion->getTiles();
if (count($Tiles)!=$this->Expansion->getNbDalles()) {
$this->Expansion->setNbDalles(count($Tiles));
$this->ExpansionServices->updateExpansion($this->Expansion);
}
}
if ($this->Expansion->getNbDalles()!=0) {
array_push($arr, $this->Expansion->getNbDalles().' Dalles');
}
/////////////////////////////////////////////////////////////////////////////
// On met à jour le nombre de Missions si nécessaire puis on le restitue
$Missions = $this->Expansion->getMissions();
if (count($Missions)!=$this->Expansion->getNbMissions() && !empty($Missions)) {
$this->Expansion->setNbMissions(count($Missions));
$this->ExpansionServices->updateExpansion($this->Expansion);
}
if ($this->Expansion->getNbMissions()!=0) {
array_push($arr, $this->Expansion->getNbMissions().' Missions');
}
return implode('<br>', $arr);
}
/**
* @return string
*/
public function getButton($extraClass='btn-dark')
{
$str = '<div type="button" class="btn btn-expansion'.$extraClass.'" data-expansion-id="'.$this->Expansion->getId();
$str .= '" data-nb-survivants="'.$this->Expansion->getNbSurvivants();
return $str. '"><span><i class="far fa-square"></i></span> '.$this->Expansion->getName().'</div>';
}
////////////////////////////////////////////////////////////////////////////
/**
* @param int $id
* @return string
*
public function getMenuButtonLive($id)
{
$Expansion = $this->Expansion;
$str = '<div type="button" class="btn btn-dark btn-expansion" data-expansion-id="'.$id.'"><span class="';
return $str.'"><i class="far fa-square"></i></span> '.$Expansion->getName().'</div>';
}
/**
* @param string $id
* @param string $spawnSpan
* @return string
*
public function getSpawnMenuButtonLive($id, $spawnSpan)
{
$Expansion = $this->Expansion;
$str = '<div type="button" class="btn btn-dark btn-expansion" data-expansion-id="'.$id.'"><span data-spawnspan="'.$spawnSpan;
return $str.'"><i class="far fa-square"></i></span> '.$Expansion->getName().$spawnSpan.'</div>';
}
* */
}
<file_sep>/core/actions/HomePageActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* HomePageActions
* @author Hugues
* @since 1.04.07
* @version 1.05.09
*/
class HomePageActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct($post)
{
parent::__construct();
$this->post = $post;
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new HomePageActions($post);
if ($post[self::CST_AJAXACTION]==self::AJAX_ADDMORENEWS) {
$returned = $Act->dealWithGetMoreNews();
} else {
$returned = '';
}
return $returned;
}
/**
* Récupération du contenu de la page via une requête Ajax.
* @param array $post
* @return string
*/
public function dealWithGetMoreNews()
{
$Bean = new WpPageHomeBean();
return $Bean->addMoreNews($this->post[self::ATTR_VALUE], true, false);
}
}
<file_sep>/core/bean/MissionBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionBean
* @author Hugues
* @since 1.00.00
* @version 1.07.26
*/
class MissionBean extends LocalBean
{
protected $urlRowAdmin = 'web/pages/admin/fragments/mission-row.php';
protected $urlRowPublic = 'web/pages/public/fragments/mission-row.php';
protected $urlArticle = 'web/pages/public/fragments/mission-article.php';
/**
* @param Mission $Mission
*/
public function __construct($Mission=null)
{
parent::__construct();
$this->Mission = ($Mission==null ? new Mission() : $Mission);
$this->ExpansionServices = new ExpansionServices();
$this->MissionServices = new MissionServices();
$this->WpPostServices = new WpPostServices();
}
//////////////////////////////////////////////////////////////////////////
// Différentes modes de présentation
/**
* @return string
*/
public function getRowForAdminPage()
{
///////////////////////////////////////////////////////////////
// Les infos WpPost regroupées dans une cellule.
$infosWpPost = $this->Mission->getTitle().' - '.$this->Mission->getCode().'<br>';
$infosWpPost .= $this->Mission->getStrDifPlaDur();
/////////////////////////////////////////////////////////////////
// On enrichit le template
$args = array(
// Identifiant de la Mission - 1
$this->Mission->getId(),
// Les infos du WpPost associé - 2
$infosWpPost,
// Url d'édition du WpPost - 3
$this->Mission->getWpPostEditUrl(),
// Url d'édition de la BDD - 4
$this->Mission->getEditUrl(self::CST_MISSION),
// Url pulique de l'article en ligne - 5
$this->Mission->getWpPostUrl(),
// Dimensions de la map - 6
$this->Mission->getHeight().'x'.$this->Mission->getWidth(),
// Url de la Map - 7
$this->Mission->getThumbUrl(),
// Nb d'Objectifs et Règles Spéciales - 8
$this->getObjRules()
);
///////////////////////////////////////////////////////////////
// Puis on le retourne
return $this->getRender($this->urlRowAdmin, $args);
}
/**
* @return string
*/
public function getRowForPublicPage()
{
///////////////////////////////////////////////////////////////
// On enrichi le template et on le retourne.
$urlWpPost = $this->Mission->getWpPostUrl();
$args = array(
// L'identifiant de la Mission - 1
$this->Mission->getId(),
// L'url pour accéder au détail de la Mission - 2
$urlWpPost,
// Le Titre de la Mission - 3
'['.$this->Mission->getCode().'] - '.$this->Mission->getTitle(),
// La Difficulté, le nombre de Survivants et la Durée de la Mission - 4
$this->Mission->getStrDifPlaDur(),
// La liste des Extensions nécessaires à la Mission - 5
$this->Mission->getStrExpansions(),
// L'origine de la publication originelle - 6
$this->getStrOrigine(),
);
///////////////////////////////////////////////////////////////
// Puis on le retourne
return $this->getRender($this->urlRowPublic, $args);
}
/**
* @return string
*/
public function getContentForHome()
{
///////////////////////////////////////////////////////////////
// On enrichit le template et on le retourne.
$args = array(
// Titre de la Mission - 1
$this->Mission->getWpPost()->getPostTitle(),
// Synopsis - 2
$this->Mission->getWpPost()->getPostContent(),
// Extensions nécessaires - 3
$this->getStrExpansions(),
// Dalles nécessaires - 4
$this->getStrTiles(),
// Classes additionnellets - 5
' col-12 col-md-6 col-xl-4',
// Url de l'Article de la Mission - 6
$this->Mission->getWpPostUrl(),
// Url de l'img source de la map - 7
$this->Mission->getThumbUrl(),
// Url vers la page Missions - 7
'/'.self::PAGE_MISSION,
// Difficulté - 9
$this->getLinkedDifficulty(),
// Nb de Survivants - 10
$this->getStrNbJoueurs(),
// Durée - 11
$this->getLinkedDuration(),
'','','','','','','','',
);
///////////////////////////////////////////////////////////////
// Puis on le retourne
return $this->getRender($this->urlArticle, $args);
}
public function getStrExpansions()
{
$expansionIds = unserialize($this->Mission->getWpPost()->getPostMeta('expansionIds'));
if ($expansionIds=='') {
if (self::isAdmin()) {
$strReturned = 'Wip Extensions';
} else {
$strReturned = '';
}
} else {
$strReturned = implode(', ', $expansionIds);
}
return $strReturned;
}
public function getStrTiles()
{
$strTileIds = $this->Mission->getWpPost()->getPostMeta('tileIds');
if ($strTileIds=='') {
$strTileIds = (self::isAdmin() ? 'Wip Tiles' : '');
}
return $strTileIds;
}
public function getLinkedDifficulty()
{ return '<a href="/tag/'.strtolower($this->getStrDifficulty()).'">'.$this->getStrDifficulty().'</a>'; }
private function getStrDifficulty()
{
$strLevel = $this->Mission->getWpPost()->getPostMeta(self::FIELD_LEVELID);
if ($strLevel=='') {
$strLevel = $this->Mission->getLevel()->getName();
}
return $strLevel;
}
public function getStrNbJoueurs()
{
$strPlayers = $this->Mission->getWpPost()->getPostMeta(self::FIELD_PLAYERID);
if ($strPlayers=='') {
$strPlayers = (self::isAdmin() ? 'Wip Nb' : '');
}
return $strPlayers.' Survivants';
}
public function getLinkedDuration()
{ return '<a href="/tag/'.strtolower(str_replace(' ', '-', $this->getStrDuree())).'">'.$this->getStrDuree().'</a>'; }
private function getStrDuree()
{
$strDuree = $this->Mission->getWpPost()->getPostMeta(self::FIELD_DURATIONID);
return ($strDuree=='' ? $this->getMission()->getDuration()->getStrDuree() : $strDuree.' minutes');
}
private function getStrOrigine()
{
$str = $this->Mission->getStrOrigine();
if (empty($str)) {
$str = 'TODO';
}
return $str;
}
// Fin des extras pour l'affichage d'un article de la Home
///////////////////////////////////////////////////////////////
public function getObjRules()
{
$WpPosts = $this->WpPostServices->getWpPostsByCustomField(self::FIELD_MISSIONID, $this->Mission->getWpPost()->getID());
$nbObjs = 0;
$nbRules = 0;
while (!empty($WpPosts)) {
$WpPost = array_shift($WpPosts);
$WpCategories = $WpPost->getCategories();
while (!empty($WpCategories)) {
$WpCategory = array_shift($WpCategories);
if ($WpCategory->getTermTaxonomyId()==self::WP_CAT_OBJECTIVE_ID) {
$nbObjs++;
} elseif ($WpCategory->getTermTaxonomyId()==self::WP_CAT_RULE_ID) {
$nbRules++;
}
}
}
return $nbObjs.' Objectifs<br>'.$nbRules.' Règles Spéciales';
}
protected $urlTemplateExtract = 'web/pages/public/fragments/mission-article.php';
protected $urlTemplateHome = 'web/pages/public/fragments/mission-article-home.php';
protected $strModelObjRules = '<li class="objRule hasTooltip"><span class="tooltip"><header>%1$s</header><div>%2$s</div></span></li>';
protected $h5Ul = '<h5>%1$s</h5><ul>%2$s</ul>';
/**
* Class par défaut du Select
* @var $classe
*/
public $classe = 'custom-select custom-select-sm filters';
private function getMissionContentObjRules($categId, $label)
{
$WpPosts = $this->WpPostServices->getWpPostsByCustomField(self::FIELD_MISSIONID, $this->Mission->getWpPost()->getID());
$strObj = array();
while (!empty($WpPosts)) {
$WpPost = array_shift($WpPosts);
$WpCategories = $WpPost->getCategories();
$isObj = false;
while (!empty($WpCategories)) {
$WpCategory = array_shift($WpCategories);
if ($WpCategory->getCatId()==$categId) {
$isObj = true;
}
}
if ($isObj) {
$rank = $WpPost->getPostMeta('rang');
$strObj[$rank] = vsprintf($this->strModelObjRules, array($WpPost->getPostTitle(), $WpPost->getPostContent()));
}
}
if (!empty($strObj)!=0) {
ksort($strObj);
return vsprintf($this->h5Ul, array($label, implode('', $strObj)));
} else {
return vsprintf($this->h5Ul, array($label, 'Non saisis pour le moment.'));
}
}
public function getMissionContentObjectives()
{ return $this->getMissionContentObjRules(self::WP_CAT_OBJECTIVE_ID, 'Objectifs'); }
public function getMissionContentRules()
{ return $this->getMissionContentObjRules(self::WP_CAT_RULE_ID, 'Regles speciales'); }
/**
* @return Mission
*/
public function getMission()
{ return $this->Mission; }
}
<file_sep>/core/bean/WpPostSurvivorBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPostSurvivorBean
* @author Hugues
* @since 1.04.00
* @version 1.07.19
*/
class WpPostSurvivorBean extends WpPostBean
{
protected $urlTemplate = 'web/pages/public/wppage-survivor.php';
protected $arrLvls = array(1=>'S', 2=>'Z', 3=>'U', 4=>'UZ');
protected $tplPortraitAndSkills = '<div class="col-2" style="margin-bottom:5px;">%1$s</div><div class="col-10" style="margin-bottom:5px;">%2$s</div>';
/**
* Class Constructor
*/
public function __construct($WpPost)
{
parent::__construct();
$this->SurvivorServices = new SurvivorServices();
$this->WpPost = $WpPost;
$postMetas = $this->WpPost->getPostMetas();
$survivorId = $postMetas[self::FIELD_SURVIVORID][0];
$this->Survivor = $this->SurvivorServices->selectSurvivor($survivorId);
}
/**
* @return string
*/
public function displayWpPost()
{ return $this->Survivor->getBean()->getContentForHome(); }
/**
* On retourne la page dédiée au Survivant.
* @return string
*/
public function getContentPage()
{
// On va construire les lignes portrait + compétences
//
$strPortraitsSkills = '';
if ($this->Survivor->isStandard()) {
$arg = array(
// Le portrait - 1
$this->Survivor->getBean()->getPortrait(),
// Les Compétences - 2
$this->Survivor->getBean()->getSkills(),
);
$strPortraitsSkills .= vsprintf($this->tplPortraitAndSkills, $arg);
}
if ($this->Survivor->isZombivor()) {
$arg = array(
// Le portrait - 1
$this->Survivor->getBean()->getPortrait('z'),
// Les Compétences - 2
$this->Survivor->getBean()->getSkills('z'),
);
$strPortraitsSkills .= vsprintf($this->tplPortraitAndSkills, $arg);
}
if ($this->Survivor->isUltimate()) {
$arg = array(
// Le portrait - 1
$this->Survivor->getBean()->getPortrait('u'),
// Les Compétences - 2
$this->Survivor->getBean()->getSkills('u'),
);
$strPortraitsSkills .= vsprintf($this->tplPortraitAndSkills, $arg);
}
if ($this->Survivor->isUltimatez()) {
$arg = array(
// Le portrait - 1
$this->Survivor->getBean()->getPortrait('uz'),
// Les Compétences - 2
$this->Survivor->getBean()->getSkills('uz'),
);
$strPortraitsSkills .= vsprintf($this->tplPortraitAndSkills, $arg);
}
//////////////////////////////////////////////////////////////////
// On enrichit le template puis on le restitue.
$args = array(
// Nom du Survivant - 1
$this->Survivor->getName(),
// Background du Survivant - 2
$this->Survivor->getBackground(),
// Les lignes portrait + compétences - 3
$strPortraitsSkills,
// Lien de navigation - 4
$this->getNavLinks(),
);
return $this->getRender($this->urlTemplate, $args);
}
private function getNavLinks()
{
//////////////////////////////////////////////////////////////////
// On construit les liens de navigation
// On récupère tous les Survivants, classées par ordre alphabétique.
$Survivors = $this->SurvivorServices->getSurvivorsWithFilters();
$firstSurvivor = null;
while (!empty($Survivors)) {
$Survivor = array_shift($Survivors);
// On les parcourt jusqu'à trouver la courante.
if ($Survivor->getId()==$this->Survivor->getId()) {
break;
}
if ($firstSurvivor==null) {
$firstSurvivor = $Survivor;
}
$prevSurvivor = $Survivor;
}
$nextSurvivor = array_shift($Survivors);
if (empty($prevSurvivor)) {
$prevSurvivor = array_pop($Survivors);
}
if (empty($nextSurvivor)) {
$nextSurvivor = $firstSurvivor;
}
$nav = '';
// On exploite la précédente et la suivante.
if (!empty($prevSurvivor)) {
$attributes = array(self::ATTR_HREF=>$prevSurvivor->getWpPost()->getPermalink(), self::ATTR_CLASS=>'adjacent-link col-3');
$nav .= $this->getBalise(self::TAG_A, '« '.$prevSurvivor->getWpPost()->getPostTitle(), $attributes);
}
if (!empty($nextSurvivor)) {
$attributes = array(self::ATTR_HREF=>$nextSurvivor->getWpPost()->getPermalink(), self::ATTR_CLASS=>'adjacent-link col-3');
$nav .= $this->getBalise(self::TAG_A, $nextSurvivor->getWpPost()->getPostTitle().' »', $attributes);
}
return $nav;
}
/**
* @return Survivor
*/
public function getSurvivor()
{ return $this->Survivor; }
}
<file_sep>/core/services/MissionTileServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionTileServices
* @author Hugues.
* @since 1.04.07
* @version 1.07.25
*/
class MissionTileServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var MissionTileDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new MissionTileDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_MISSIONID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_COORDX));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_COORDY));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getMissionTilesWithFilters($arrFilters=array(), $orderby=self::FIELD_ID, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
public function insertMissionTile($MissionTile)
{ return $this->insert(__FILE__, __LINE__, $MissionTile); }
}
<file_sep>/core/domain/EquipmentKeyword.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe EquipmentKeyword
* @author Hugues.
* @version 1.0.00
* @since 1.0.00
*/
class EquipmentKeyword extends LocalDomain
{
/**
* Id technique de la jointure
* @var int $id
*/
protected $id;
/**
* Id technique de la carte Equipement
* @var int $equipmentCardId
*/
protected $equipmentCardId;
/**
* Id technique du profil de l'arme
* @var int $keywordId
*/
protected $keywordId;
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @ return int
*/
public function getEquipmentCardId()
{ return $this->equipmentCardId; }
/**
* @ return int
*/
public function getKeywordId()
{ return $this->keywordId; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $equipmentCardId
*/
public function setEquipmentCardId($equipmentCardId)
{ $this->equipmentCardId = $equipmentCardId; }
/**
* @param int $keywordId
*/
public function setKeywordId($keywordId)
{ $this->keywordId = $keywordId; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('EquipmentKeyword'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return EquipmentKeyword
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new EquipmentKeyword(), self::getClassVars(), $row); }
/**
* @return Keyword
*/
public function getKeyword()
{
if ($this->Keyword == null) {
$this->Keyword = $this->KeywordServices->selectKeyword($this->keywordId);
}
return $this->Keyword;
}
}
<file_sep>/core/services/EquipmentServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe EquipmentServices
* @author Hugues.
* @since 1.04.15
* @version 1.04.27
*/
class EquipmentServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var EquipmentDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new EquipmentDaoImpl();
}
/**
* @param int $equipmentId
* @return Equipment
*/
public function selectEquipment($equipmentId)
{ return $this->select(__FILE__, __LINE__, $equipmentId); }
}
<file_sep>/core/bean/AdminPageSkillsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* AdminPageSkillsBean
* @author Hugues
* @since 1.00.00
* @version 1.05.07
*/
class AdminPageSkillsBean extends AdminPageBean
{
protected $tplHomeCheckCard = 'web/pages/admin/fragments/home-check-card.php';
protected $urlSkillListing = 'web/pages/admin/skill-listing.php';
/**
* Class Constructor
*/
public function __construct($urlParams='')
{
$this->urlParams = $urlParams;
parent::__construct(self::CST_SKILL);
$this->title = 'Compétences';
$this->SkillServices = new SkillServices();
}
/**
* @param array $urlParams
* @return $Bean
*/
public function getSpecificContentPage()
{
$strRows = '';
$nbPerPage = 15;
$curPage = $this->initVar(self::WP_CURPAGE, 1);
$orderby = $this->initVar(self::WP_ORDERBY, self::FIELD_NAME);
$order = $this->initVar(self::WP_ORDER, self::ORDER_ASC);
$Skills = $this->SkillServices->getSkillsWithFilters(array(), $orderby, $order);
$nbElements = count($Skills);
$nbPages = ceil($nbElements/$nbPerPage);
$curPage = max(1, min($curPage, $nbPages));
$DisplayedSkills = array_slice($Skills, ($curPage-1)*$nbPerPage, $nbPerPage);
if (!empty($DisplayedSkills)) {
foreach ($DisplayedSkills as $Skill) {
$SkillBean = new SkillBean($Skill);
$strRows .= $SkillBean->getRowForAdminPage();
}
}
$queryArg = array(
self::CST_ONGLET => self::CST_SKILL,
self::WP_ORDERBY => $orderby,
self::WP_ORDER => $order
);
// Pagination
$strPagination = $this->getPagination($queryArg, $post_status, $curPage, $nbPages, $nbElements);
// Sorts
$queryArg[self::WP_ORDERBY] = self::FIELD_CODE;
$queryArg[self::WP_ORDER] = ($orderby==self::FIELD_CODE && $order==self::ORDER_ASC ? self::ORDER_DESC : self::ORDER_ASC);
$urlSortCode = $this->getQueryArg($queryArg);
$queryArg[self::WP_ORDERBY] = self::FIELD_NAME;
$queryArg[self::WP_ORDER] = ($orderby==self::FIELD_NAME && $order==self::ORDER_ASC ? self::ORDER_DESC : self::ORDER_ASC);
$urlSortTitle = $this->getQueryArg($queryArg);
$args = array(
// Liste des compétences affichées - 1
$strRows,
// Filtres - 2
'',
// Url pour créer une nouvelle Compétence - 3
'/wp-admin/post-new.php',
// Subs - 4
'',
// Pagination - 5
$strPagination,
// class pour le tri sur code - 6
($orderby==self::FIELD_CODE ? $order : self::ORDER_DESC),
// url pour le tri sur code - 7
$urlSortCode,
// class pour le tri sur title - 8
($orderby==self::FIELD_NAME ? $order : self::ORDER_DESC),
// url pour le tri sur title - 9
$urlSortTitle,
'','','','','','','','','','','','',''
);
return $this->getRender($this->urlSkillListing, $args);
}
/**
* @return string
*/
public function getCheckCard()
{
/////////////////////////////////////////////////
// Gestion des Compétences.
// On récupère la liste des Compétences qui ont un Article. Puis les données dans la base. On compare et on effectue un diagnostic.
$Act = new SkillActions();
$strBilan = $Act->dealWithSkillVerif();
$args = array(
// Le titre de la carte - 1
$this->title,
// L'id du container de retour pour afficher les vérifications - 2
self::CST_SKILL,
// Le contenu du container de vérification - 3
$strBilan,
);
return $this->getRender($this->tplHomeCheckCard, $args);
}
}
<file_sep>/core/services/OrigineServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe OrigineServices
* @author Hugues.
* @since 1.04.00
* @version 1.04.27
*/
class OrigineServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var OrigineDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new OrigineDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_NAME));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getOriginesWithFilters($arrFilters=array(), $orderby=self::FIELD_NAME, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
/**
* @param int $id
* @return Origine
*/
public function selectOrigine($id)
{ return $this->select(__FILE__, __LINE__, $id); }
}
<file_sep>/core/bean/SpawnTypeBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SpawnTypeBean
* @author Hugues
* @since 1.0.00
* @version 1.0.00
*/
class SpawnTypeBean extends LocalBean
{
/**
* Class Constructor
* @param SpawnType $SpawnType
*/
public function __construct($SpawnType='')
{
parent::__construct();
$this->SpawnType = ($SpawnType=='' ? new SpawnType() : $SpawnType);
}
/**
* @param string $tBodyButtons Template des Boutons de fin de ligne
* @return string
*/
public function getRowForAdminPage($tBodyButtons)
{
$SpawnType = $this->SpawnType;
$queryArg = array(
self::CST_ONGLET=>'parametre',
self::CST_POSTACTION=>'edit',
'table'=>'spawntype',
'id'=>$SpawnType->getId()
);
$urlEdit = $this->getQueryArg($queryArg);
$queryArg[self::CST_POSTACTION] = 'trash';
$urlTrash = $this->getQueryArg($queryArg);
$args = array(
' ',
$urlEdit,
$urlTrash
);
$tBody = '<tr><td>'.$SpawnType->getId().'</td><td>'.$SpawnType->getName();
return $tBody.vsprintf($tBodyButtons, $args).'</tr>';
}
}
<file_sep>/core/bean/WpPageBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* WpPageBean
* @author Hugues
* @since 1.04.00
* @version 1.07.21
*/
class WpPageBean extends MainPageBean
{
protected $urlTemplateDropdown = 'web/pages/public/fragments/dropdown-nbperpages.php';
protected $urlTemplateNavPagination = 'web/pages/public/fragments/nav-pagination.php';
/**
* WpPost affiché
* @var WpPost $WpPage
*/
protected $WpPage;
/**
* @param string $post
*/
public function __construct($post='')
{
if ($post=='') {
$post = get_post();
}
if ($post!='') {
if (get_class($post)=='WpPost') {
$this->WpPage = $post;
} else {
$this->WpPage = WpPost::convertElement($post);
}
}
parent::__construct();
}
/**
* @return string|Error404PageBean
*/
public function getContentPage()
{
switch ($this->WpPage->getPostName()) {
case self::PAGE_EQUIPMENT :
$Bean = new WpPageEquipmentsBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
case self::PAGE_EXTENSION :
$Bean = new WpPageExpansionsBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
case 'page-travaux' :
$Bean = new WpPageMissionOnlineBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
case self::PAGE_GENERATION_MAP :
$Bean = new WpPageToolsBean($this->WpPage);
$strReturned = $Bean->getRandomMapV2();
break;
case self::PAGE_MISSION :
$Bean = new WpPageMissionsBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
case self::PAGE_ORDRE_PRIORITE :
$Bean = new WpPageToolsBean($this->WpPage);
$strReturned = $Bean->getPriorityOrderContent();
break;
case self::PAGE_PISTE_DE_DES :
$Bean = new WpPageToolsBean($this->WpPage);
$strReturned = $Bean->getThrowSomeDiceContent();
break;
case self::PAGE_SELECT_SURVIVORS :
$Bean = new WpPageToolsBean($this->WpPage);
$strReturned = $Bean->getSelectSurvivorsContent();
break;
case self::PAGE_SKILL :
case 'page-skills' :
$Bean = new WpPageSkillsBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
case self::PAGE_SPAWN :
$Bean = new WpPageSpawnsBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
case self::PAGE_SURVIVOR :
$Bean = new WpPageSurvivorsBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
case self::PAGE_TILE :
$Bean = new WpPageTilesBean($this->WpPage);
$strReturned = $Bean->getContentPage();
break;
default :
if ($this->isAdmin()) {
echo "[[".$this->WpPage->getPostName()."]]";
}
$Bean = new WpPageError404Bean();
$strReturned = $Bean->getContentPage();
break;
}
return $strReturned;
}
/**
* {@inheritDoc}
* @see MainPageBean::getShellClass()
*/
public function getShellClass()
{ return ''; }
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
protected function getDropdownNbPerPages()
{
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
($this->nbperpage==10 ? self::CST_SELECTED : ''),
($this->nbperpage==25 ? self::CST_SELECTED : ''),
($this->nbperpage==50 ? self::CST_SELECTED : ''),
);
return $this->getRender($this->urlTemplateDropdown, $args);
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Méthodes relatives à la pagination
/**
* Retourne le bloc de pagination complet
* @return string
*/
protected function getNavPagination()
{
/////////////////////////////////////////////////////////////////////////////
// On construit les liens de la pagination.
$strPagination = $this->getPaginateLis($this->paged, $this->nbPages);
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// Nb Total - 1
$this->nbElements,
// Si page 1, on peut pas revenir à la première - 2
($this->paged==1 ? ' '.self::CST_DISABLED : ''),
// Liste des éléments de la Pagination - 3
$strPagination,
// Si page $nbPages, on peut pas aller à la dernière - 4
($this->paged==$this->nbperpage ? ' '.self::CST_DISABLED : ''),
// Nombre de pages - 5
$this->nbperpage,
// S'il n'y a qu'une page, la pagination ne sert à rien - 6
($this->nbPages<=1 ? ' '.self::CST_HIDDEN : ''),
);
return $this->getRender($this->urlTemplateNavPagination, $args);
}
/**
* Retourne la liste des liens numérotés d'une pagination
* @param int $curPage Page courante
* @param int $nbPages Nombre de pages
* @return string
*/
private function getPaginateLis($curPage, $nbPages)
{
$strPagination = '';
//////////////////////////////////////////////////////////////////////////
// On renseigne la page 1
$strPagination .= $this->buildPaginationElement(1, $curPage);
//////////////////////////////////////////////////////////////////////////
$hasPrevIgnore = false;
$hasNextIgnore = false;
for ($i=2; $i<$nbPages; $i++) {
if ($i<$curPage-1) {
if (!$hasPrevIgnore) {
$strPagination .= $this->buildPaginationElement('...', '...');
}
$hasPrevIgnore = true;
} elseif ($i>$curPage+1) {
if (!$hasNextIgnore) {
$strPagination .= $this->buildPaginationElement('...', '...');
}
$hasNextIgnore = true;
} else {
$strPagination .= $this->buildPaginationElement($i, $curPage);
}
}
//////////////////////////////////////////////////////////////////////////
// On renseigne la page 12
return $strPagination.$this->buildPaginationElement($nbPages, $curPage);
}
private function buildPaginationElement($i, $curPage)
{
$attributes = array(
self::ATTR_CLASS => 'page-link '.self::CST_AJAXACTION,
self::ATTR_HREF => '#',
self::ATTR_DATA_PAGED => $i,
self::ATTR_DATA_AJAXACTION => self::AJAX_PAGED,
);
$label = $this->getBalise(self::TAG_A, $i, $attributes);
if ($i=='...') {
$attrClass = ' '.self::CST_DISABLED;
} elseif ($i==$curPage) {
$attrClass = ' '.self::CST_ACTIVE;
} else {
$attrClass = '';
}
$argsBalise = array(self::ATTR_CLASS => 'page-item'.$attrClass);
return $this->getBalise(self::TAG_LI, $label, $argsBalise);
}
// Fin des méthodes relatives à la pagination
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* @param array $post
* @param string $fieldNameTitle
*/
public function setBeanFilters($post=null, $fieldNameTitle='')
{
$this->arrFilters = array();
if (isset($post[self::CST_FILTERS])) {
$arrParams = explode('&', $post[self::CST_FILTERS]);
while (!empty($arrParams)) {
$arrParam = array_shift($arrParams);
list($key, $value) = explode('=', $arrParam);
if ($value!='') {
$this->arrFilters[$key]= $value;
}
}
}
$this->paged = (isset($post[self::AJAX_PAGED]) ? $post[self::AJAX_PAGED] : 1);
$this->colSort = (isset($post[self::CST_COLSORT]) ? $post[self::CST_COLSORT] : $fieldNameTitle);
$this->colOrder = (isset($post[self::CST_COLORDER]) ? $post[self::CST_COLORDER] : self::ORDER_ASC);
$this->nbperpage = (isset($post[self::CST_NBPERPAGE]) ? $post[self::CST_NBPERPAGE] : 10);
}
/**
* @return string
*/
public function getBeanExpansionFilters($expansionId='', $fieldToCheck=0)
{
$selExpansionsId = explode(',', $expansionId);
$Expansions = $this->ExpansionServices->getExpansionsWithFilters();
$strReturned = '';
while (!empty($Expansions)) {
$Expansion = array_shift($Expansions);
if ($Expansion->getField($fieldToCheck)==0)
{ continue; }
$strReturned .= $this->getOption($Expansion->getId(), $Expansion->getName(), $selExpansionsId);
}
return $strReturned;
}
/**
* @return string
*/
public function getBeanSkillFilters($color='', $skillId='')
{
switch ($color) {
case self::COLOR_BLUE :
$label = 'Bleues';
$tagLevelIds = '10,11';
break;
case self::COLOR_YELLOW :
$label = 'Jaunes';
$tagLevelIds = '20';
break;
case self::COLOR_ORANGE :
$label = 'Oranges';
$tagLevelIds = '30,31';
break;
case self::COLOR_RED :
$label = 'Rouges';
$tagLevelIds = '40,41,42';
break;
default :
$label = 'Toutes';
$tagLevelIds = '';
break;
}
$strReturned = $this->getOption('', $label, $skillId);
if ( $tagLevelIds!='') {
$filters = array(self::FIELD_TAGLEVELID=>$tagLevelIds);
$Skills = $this->SkillServices->getSkillsWithFiltersIn($filters);
} else {
$Skills = $this->SkillServices->getSkillsWithFilters();
}
while (!empty($Skills)) {
$Skill = array_shift($Skills);
$strReturned .= $this->getOption($Skill->getId(), $Skill->getName(), $skillId);
}
return $strReturned;
}
protected function getOption($value, $name, $selection=array())
{
$strOption = '<option value="'.$value.'"';
if (!is_array($selection)) {
$selection = array($selection);
}
if (in_array($value, $selection)) {
$strOption .= ' selected';
}
return $strOption.'>'.$name.'</option>';
}
}
<file_sep>/core/domain/WpPostRelais.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPostRelais
* @author Hugues.
* @since 1.05.12
* @version 1.05.12
*/
class WpPostRelais extends LocalDomain
{
public function __construct($attributes=array())
{ parent::__construct($attributes); }
////////////////////////////////////////////////////////////////////////////
// Méthodes relatives à l'article WpPost
/**
* @return WpPost
*/
public function getMainWpPost($metakey, $value, $categId)
{
if ($this->WpPost==null) {
$args = array(
self::WP_METAKEY => $metakey,
self::WP_METAVALUE => $value,
self::WP_TAXQUERY => array(),
self::WP_CAT => $categId,
);
if (MainPageBean::isAdmin()) {
$args[self::WP_POSTSTATUS] = self::WP_PUBLISH.', future';
}
$WpPosts = $this->WpPostServices->getArticles($args);
$this->WpPost = (!empty($WpPosts) ? array_shift($WpPosts) : new WpPost());
}
return $this->WpPost;
}
/**
* @return string
*/
public function getWpPostUrl()
{ return $this->getWpPost()->getPermalink(); }
/**
* @return string
*/
public function getWpPostEditUrl()
{ return ($this->getWpPost()->getID()!='' ? '/wp-admin/post.php?post='.$this->getWpPost()->getID().'&action=edit' : '/wp-admin/post-new.php'); }
////////////////////////////////////////////////////////////////////////////
/**
* @return string
*/
public function getEditUrl($onglet)
{
$queryArgs = array(
self::CST_ONGLET => $onglet,
self::CST_POSTACTION => self::CST_EDIT,
self::FIELD_ID => $this->getId()
);
return $this->getQueryArg($queryArgs);
}
}
<file_sep>/core/services/SurvivorServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SurvivorServices
* @author Hugues.
* @since 1.04.27
* @version 1.05.07
*/
class SurvivorServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var SurvivorDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new SurvivorDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayWideFilter($arrFilters, self::FIELD_NAME));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_ZOMBIVOR));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayWideFilter($arrFilters, self::FIELD_ULTIMATE));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_EXPANSIONID));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_BACKGROUND));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_LIVEABLE));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getSurvivorsWithFilters($arrFilters=array(), $orderby=self::FIELD_NAME, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
if (isset($arrFilters[self::FIELD_EXPANSIONID]) && strpos($arrFilters[self::FIELD_EXPANSIONID], ',')!==false) {
return $this->Dao->selectEntriesInExpansions($this->arrParams);
} else {
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getSurvivorsWithFiltersIn($arrFilters=array(), $orderby=self::FIELD_NAME, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFiltersIn($this->arrParams, $arrFilters);
}
public function selectSurvivor($id)
{ return $this->select(__FILE__, __LINE__, $id); }
public function updateSurvivor($Survivor)
{ return $this->update(__FILE__, __LINE__, $Survivor); }
public function insertSurvivor($Survivor)
{ return $this->insert(__FILE__, __LINE__, $Survivor); }
}
<file_sep>/core/daoimpl/ExpansionDaoImpl.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe ExpansionDaoImpl
* @author Hugues.
* @version 1.0.00
* @since 1.0.00
*/
class ExpansionDaoImpl extends LocalDaoImpl
{
/**
* Class constructor
*/
public function __construct()
{ parent::__construct('Expansion'); }
/**
* @param array $rows
* @return array
*/
protected function convertToArray($rows)
{
$Items = array();
if (!empty($rows)) {
foreach ($rows as $row) {
$Items[] = Expansion::convertElement($row);
}
}
return $Items;
}
/**
* @param string $file
* @param int $line
* @param array $arrParams
* @return array|Expansion
*/
public function select($file, $line, $arrParams)
{ return parent::localSelect($file, $line, $arrParams, new Expansion()); }
/**
* @param string $file
* @param string $line
*/
protected function updateNbMissions($file, $line)
{
$subRequest = 'SELECT COUNT(*) FROM wp_11_zombicide_mission_expansion me WHERE me.expansionId=e.id';
$requete = 'UPDATE wp_11_zombicide_expansion e SET nbMissions = ('.$subRequest.');';
$this->createEditDeleteEntry($file, $line, $requete);
}
}
<file_sep>/core/services/MissionRuleServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe MissionRuleServices
* @author Hugues.
* @since 1.04.08
* @version 1.04.27
*/
class MissionRuleServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var MissionRuleDaoImpl $Dao
*/
protected $Dao;
/**
* Class Constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new MissionRuleDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_MISSIONID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_RULEID));
array_push($this->arrParams[self::SQL_WHERE], $this->addFilter($arrFilters, self::FIELD_TITLE));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getMissionRulesWithFilters($arrFilters=array(), $orderby=self::FIELD_ID, $order=self::ORDER_ASC )
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
}
<file_sep>/core/bean/WpPageMissionsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageMissionsBean
* @author Hugues
* @since 1.04.01
* @version 1.05.01
*/
class WpPageMissionsBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-missions.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->DurationServices = new DurationServices();
$this->ExpansionServices = new ExpansionServices();
$this->LevelServices = new LevelServices();
$this->MissionServices = new MissionServices();
$this->OrigineServices = new OrigineServices();
$this->PlayerServices = new PlayerServices();
}
/**
* On vérifie si on est ici pour traiter la page des Missions, ou une Mission en particulier.
* Pour le cas d'une Mission, on retourne une WpPostMissionBean.
* @return string
*/
public function getContentPage()
{
$this->setFilters();
return $this->getListContentPage();
}
/**
* @return string
*/
public function getListContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste des Missions puis les éléments nécessaires à la pagination.
$Missions = $this->MissionServices->getMissionsWithFilters($this->arrFilters, $this->colSort, $this->colOrder);
$this->nbElements = count($Missions);
$this->nbPages = ceil($this->nbElements/$this->nbperpage);
// On slice la liste pour n'avoir que ceux à afficher
$displayedMissions = array_slice($Missions, $this->nbperpage*($this->paged-1), $this->nbperpage);
// On construit le corps du tableau
$strBody = '';
if (!empty($displayedMissions)) {
foreach ($displayedMissions as $Mission) {
$strBody .= $Mission->getBean()->getRowForPublicPage();
}
}
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Affiche-t-on le filtre ?
$showFilters = !empty($this->arrFilters);
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// On affiche les lignes du tableau - 1
$strBody,
// On affiche le dropdown par pages - 2
$this->getDropdownNbPerPages(),
// On affiche la pagination - 3
$this->getNavPagination(),
// Affiche ou non le bloc filtre - 4
($showFilters ? 'block' : 'none'),
// Si le Titre est renseigné - 5
$this->arrFilters[self::FIELD_TITLE],
// La liste des Difficultés - 6
$this->getLevelFilters($this->arrFilters[self::FIELD_LEVELID]),
// La liste des Survivants - 7
$this->getPlayerFilters($this->arrFilters[self::FIELD_PLAYERID]),
// La liste des Durée - 8
$this->getDurationFilters($this->arrFilters[self::FIELD_DURATIONID]),
// La liste des Origines - 9
$this->getOrigineFilters($this->arrFilters[self::FIELD_ORIGINEID]),
// Liste des Extensions - 10
$this->getExpansionFilters($this->arrFilters[self::FIELD_EXPANSIONID]),
);
return $this->getRender($this->urlTemplate, $args);
}
private function getExpansionFilters($expansionId='')
{ return parent::getBeanExpansionFilters($expansionId, self::FIELD_NBMISSIONS); }
private function getLevelFilters($levelId='')
{
$Levels = $this->LevelServices->getLevelsWithFilters();
$strReturned = '<option value="">Difficultés</option>';
while (!empty($Levels)) {
$Level = array_shift($Levels);
$strReturned .= $this->getOption($Level->getId(), $Level->getName(), $levelId);
}
return $strReturned;
}
private function getPlayerFilters($playerId='')
{
$Players = $this->PlayerServices->getPlayersWithFilters();
$strReturned = '<option value="">Survivants</option>';
while (!empty($Players)) {
$Player = array_shift($Players);
$strReturned .= $this->getOption($Player->getId(), $Player->getNbJoueurs(), $playerId);
}
return $strReturned;
}
private function getDurationFilters($durationId='')
{
$Durations = $this->DurationServices->getDurationsWithFilters();
$strReturned = '<option value="">Durées</option>';
while (!empty($Durations)) {
$Duration = array_shift($Durations);
$strReturned .= $this->getOption($Duration->getId(), $Duration->getStrDuree(), $durationId);
}
return $strReturned;
}
private function getOrigineFilters($origineId='')
{
$Origines = $this->OrigineServices->getOriginesWithFilters();
$strReturned = '<option value="">Origine</option>';
while (!empty($Origines)) {
$Origine = array_shift($Origines);
$strReturned .= $this->getOption($Origine->getId(), $Origine->getName(), $origineId);
}
return $strReturned;
}
/**
* @param array $post
*/
public function setFilters($post=null)
{ parent::setBeanFilters($post, self::FIELD_TITLE); }
}
<file_sep>/core/domain/EquipmentExpansion.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe EquipmentExpansion
* @author Hugues.
* @since 1.0.00
* @version 1.04.28
*/
class EquipmentExpansion extends LocalDomain
{
/**
* Id technique de la jointure
* @var int $id
*/
protected $id;
/**
* Id technique de la carte Equipement
* @var int $equipmentCardId
*/
protected $equipmentCardId;
/**
* Id technique de l'Expansion
* @var int $expansionId
*/
protected $expansionId;
/**
* Nombre de cartes dans l'extension
* @var int $quantity
*/
protected $quantity;
/**
* @param array $attributes
*/
public function __construct($attributes=array())
{
parent::__construct($attributes);
$this->EquipmentServices = new EquipmentServices();
}
/**
* @return int
*/
public function getId()
{ return $this->id; }
/**
* @ return int
*/
public function getEquipmentCardId()
{ return $this->equipmentCardId; }
/**
* @ return int
*/
public function getExpansionId()
{ return $this->expansionId; }
/**
* @ return int
*/
public function getQuantity()
{ return $this->quantity; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $equipmentCardId
*/
public function setEquipmentCardId($equipmentCardId)
{ $this->equipmentCardId = $equipmentCardId; }
/**
* @param int $expansionId
*/
public function setExpansionId($expansionId)
{ $this->expansionId = $expansionId; }
/**
* @param int $quantity
*/
public function setQuantity($quantity)
{ $this->quantity = $quantity; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('EquipmentExpansion'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return EquipmentExpansion
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new EquipmentExpansion(), self::getClassVars(), $row); }
/**
* @return Equipment
*/
public function getEquipment()
{
if ($this->Equipment == null) {
$this->Equipment = $this->EquipmentServices->selectEquipment($this->equipmentCardId);
}
return $this->Equipment;
}
/**
* @param Equipment $Equipment
*/
public function setEquipment($Equipment)
{ $this->Equipment = $Equipment; }
}
<file_sep>/core/bean/SurvivorSkillBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SurvivorSkillBean
* @author Hugues.
* @since 1.05.02
* @version 1.07.19
*/
class SurvivorSkillBean extends LocalBean
{
/**
* @param SurvivorSkill $SurvivorSkill
*/
public function __construct($SurvivorSkill=null)
{
parent::__construct();
$this->SurvivorSkill = ($SurvivorSkill==null ? new SurvivorSkill() : $SurvivorSkill);
}
public function getBadge($linked=false)
{
if ($linked) {
$tag = self::TAG_A;
$href = '/page-competences/?skillId='.$this->SurvivorSkill->getSkill()->getId();
$WpPost = $this->SurvivorSkill->getSkill()->getWpPost();
if ($WpPost->getID()!='') {
$href = $WpPost->getPermaLink();
}
$attributes = array(
self::ATTR_CLASS => 'badge badge-'.$this->getColor().'-skill',
self::ATTR_HREF => $href,
);
} else {
$tag = self:: TAG_SPAN;
$attributes = array(
self::ATTR_CLASS => 'badge badge-'.$this->getColor().'-skill',
);
}
return $this->getBalise($tag, $this->SurvivorSkill->getSkill()->getName(), $attributes);
}
public function getColor()
{
switch ($this->SurvivorSkill->getTagLevelId()) {
case 20 :
$color = self::COLOR_YELLOW;
break;
case 30 :
case 31 :
$color = self::COLOR_ORANGE;
break;
case 40 :
case 41 :
case 42 :
$color = self::COLOR_RED;
break;
default :
$color = self::COLOR_BLUE;
break;
}
return $color;
}
}
<file_sep>/core/bean/WpPageSpawnsBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageSpawnsBean
* @author Hugues
* @since 1.0.00
* @version 1.0.00
*/
class WpPageSpawnsBean extends WpPageBean
{
protected $urlTemplate = 'web/pages/public/wppage-spawncards.php';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->ExpansionServices = new ExpansionServices();
$this->SpawnServices = new SpawnServices();
}
/**
* @return string
*/
public function getContentPage()
{ return $this->getListContentPage(); }
/**
* Retourne la liste des cartes Invasions
* @return string
*/
public function getListContentPage()
{
/////////////////////////////////////////////////////////////////////////////
// On récupère la liste de toutes les Extensions
$Expansions = $this->ExpansionServices->getExpansionsWithFilters(array(), self::FIELD_DISPLAYRANK);
$strFilters = '';
$strSpawns = '';
while (!empty($Expansions)) {
$Expansion = array_shift($Expansions);
// On récupère les cartes Invasions relatives à l'extension. S'il n'y en a pas, on passe à l'extension suivante.
$SpawnCards = $this->SpawnServices->getSpawnsWithFilters(array(self::FIELD_EXPANSIONID=>$Expansion->getId()));
if (empty($SpawnCards)) {
continue;
}
// On en profite aussi pour construire le bloc de filtres.
$strFilters .= $this->getBalise(self::TAG_OPTION, $Expansion->getName(), array(self::ATTR_VALUE => 'set-'.$Expansion->getId()));
// On ajoute chaque carte Invasion à la liste à afficher.
foreach ($SpawnCards as $SpawnCard) {
$strSpawns .= $SpawnCard->getBean()->displayCard();
}
}
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// La liste des cartes - 1
$strSpawns,
// Les filtres disponibles - 2
$strFilters,
);
return $this->getRender($this->urlTemplate, $args);
}
}
<file_sep>/core/bean/WpPostMissionBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPostMissionBean
* @author Hugues
* @since 1.00.00
* @version 1.07.25
*/
class WpPostMissionBean extends WpPostBean
{
protected $urlTemplate = 'web/pages/public/wppage-mission.php';
protected $urlImgBase = '/wp-content/plugins/hj-zombicide/web/rsc/img/missions/';
protected $urlTemplateArticle = 'web/pages/public/fragments/mission-article.php';
/**
* Constructeur
*/
public function __construct($missionId='')
{
parent::__construct();
$this->MissionServices = new MissionServices();
if ($missionId instanceof WpPost) {
$this->WpPost = $missionId;
$code = $this->WpPost->getPostMeta(self::FIELD_CODE);
$Missions = $this->MissionServices->getMissionsWithFilters(array(self::FIELD_CODE=>$code));
$this->Mission = array_shift($Missions);
} else {
$this->Mission = $this->MissionServices->selectMission($missionId);
}
}
/**
* @return string
*/
public function displayWpPost()
{ return $this->Mission->getBean()->getContentForHome(); }
/**
* @return string
*/
public function getContentPage()
{
//////////////////////////////////////////////////////////////////
// On enrichit le template puis on le restitue.
$args = array(
// On affiche la Mission demandée - 1
$this->getArticlePage(true),
// Liens de navigation - 2
$this->getNavLinks(),
// Contenu additionnel en bas de page - 3
'',
);
return $this->getRender($this->urlTemplate, $args);
}
public function getArticlePage($isWhole=false)
{
if ($isWhole) {
$classExtra = 'wholeArticle';
$imgExt = '-Missions.png';
} else {
$classExtra = '';
$imgExt = '-Thumb.png';
}
///////////////////////////////////////////////////////////////
// On enrichi le template et on le retourne.
$args = array(
// Titre de la Mission - 1
'['.$this->getMission()->getCode().'] - '.$this->WpPost->getPostTitle(),
// Synopsis - 2
$this->WpPost->getPostContent(),
// Extensions nécessaires - 3
$this->getMission()->getBean()->getStrExpansions(),
// Dalles nécessaires - 4
$this->getMission()->getBean()->getStrTiles(),
// Classe supplémentaire - 5
$classExtra,
// Url de la Mission... - 6
$this->WpPost->getPermalink(),
// Image de la Map éventuelle - 7
$this->urlImgBase.$this->getMission()->getCode().$imgExt,
// Lien vers la page des Missions - 8
'/'.self::PAGE_MISSION,
// Difficulté - 9
$this->getMission()->getBean()->getLinkedDifficulty(),
// Nb Survivants - 10
$this->getMission()->getBean()->getStrNbJoueurs(),
// Durée - 11
$this->getMission()->getBean()->getLinkedDuration(),
// Liste des Objectifs - 12
$this->getMission()->getBean()->getMissionContentObjectives(),
// Liste des Règles - 13
$this->getMission()->getBean()->getMissionContentRules(),
);
return $this->getRender($this->urlTemplateArticle, $args);
}
private function getNavLinks()
{
//////////////////////////////////////////////////////////////////
// On construit les liens de navigation
// On récupère toutes les missions, classées par ordre alphabétique.
$Missions = $this->MissionServices->getMissionsWithFilters(array(), self::FIELD_CODE);
$firstMission = null;
while (!empty($Missions)) {
$Mission = array_shift($Missions);
// On les parcourt jusqu'à trouver la courante.
if ($Mission->getId()==$this->Mission->getId()) {
break;
}
if ($firstMission==null) {
$firstMission = $Mission;
}
$prevMission = $Mission;
}
$nextMission = array_shift($Missions);
if (empty($prevMission)) {
$prevMission = array_pop($Missions);
}
if (empty($nextMission)) {
$nextMission = $firstMission;
}
$nav = '';
// On exploite la précédente et la suivante.
if (!empty($prevMission)) {
$attributes = array(self::ATTR_HREF=>$prevMission->getWpPost()->getPermalink(), self::ATTR_CLASS=>'adjacent-link col-3');
$nav .= $this->getBalise(self::TAG_A, '« '.$prevMission->getWpPost()->getPostTitle(), $attributes);
}
if (!empty($nextMission)) {
$attributes = array(self::ATTR_HREF=>$nextMission->getWpPost()->getPermalink(), self::ATTR_CLASS=>'adjacent-link col-3');
$nav .= $this->getBalise(self::TAG_A, $nextMission->getWpPost()->getPostTitle().' »', $attributes);
}
return $nav;
}
public function getMission()
{ return $this->Mission; }
}
<file_sep>/core/domain/Equipment.class.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe Equipment
* @author Hugues.
* @since 1.0.00
* @version 1.04.28
*/
class Equipment extends LocalDomain
{
/**
* Id technique de la donnée
* @var int $id
*/
protected $id;
/**
* Nom de la carte
* @var string $name
*/
protected $name;
/**
* Abilité spéciale de l'équipement
* @var string $textAbility
*/
protected $textAbility;
/**
* @return $id
*/
public function getId()
{return $this->id; }
/**
* @return $name
*/
public function getName()
{ return $this->name; }
/**
* @return $textAbility
*/
public function getTextAbility()
{ return $this->textAblity; }
/**
* @param int $id
*/
public function setId($id)
{ $this->id=$id; }
/**
* @param int $name
*/
public function setName($name)
{ $this->name=$name; }
/**
* @param int $textAbility
*/
public function setTextAbility($textAbility)
{ $this->textAbility=$textAbility; }
/**
* @return array
*/
public function getClassVars()
{ return get_class_vars('Equipment'); }
/**
* @param array $row
* @param string $a
* @param string $b
* @return Spawn
*/
public static function convertElement($row, $a='', $b='')
{ return parent::convertElement(new Equipment(), self::getClassVars(), $row); }
/**
* @return string
*/
public function getNiceName()
{
$cleanDigits = array(' ', '#', '-', '!', 'à', 'é', "'", '(', ')', 'ê', 'ç', '&', '.', 'è');
return str_replace($cleanDigits, '', strtolower($this->getName()));
}
/**
* @param $expansionId
* @return string
*/
public function getImgUrl($expansionId='00')
{
$uniqueId = (str_pad($this->id, 3, '0', STR_PAD_LEFT)).(str_pad($expansionId, 2, '0', STR_PAD_LEFT));
$urlThumb = '/wp-content/plugins/zombicide/web/rsc/images/equipments/'.$uniqueId.'-thumb.jpg';
// Si l'image Thumb n'existe pas, on va la créer à partir de l'original. Puis on supprime l'original.
if (!is_file(getcwd().$urlThumb)) {
$urlOriginal = '/wp-content/plugins/zombicide/web/rsc/images/equipments/'.$uniqueId.'.png';
$adminUrl = getcwd().$urlOriginal;
$src = imagecreatefrompng($adminUrl);
$dst = imagecreatetruecolor(320, 440);
imagecopyresized($dst, $src, 0, 0, 0, 0, 320, 440, 597, 822);
imagejpeg($dst, getcwd().$urlThumb);
unlink($adminUrl);
}
return $urlThumb;
}
/**
* @return array EquipmentWeaponProfile
*/
public function getEquipmentWeaponProfiles()
{
if ($this->EquipmentWeaponProfiles == null) {
$arrF = array(self::FIELD_EQUIPMENTCARDID=>$this->id);
$this->EquipmentWeaponProfiles = $this->EWProfileServices->getEquipmentWeaponProfilesWithFilters($arrF);
}
return $this->EquipmentWeaponProfiles;
}
/**
* @return array EquipmenKeyword
*/
public function getEquipmentKeywords()
{
if ($this->EquipmentKeywords == null) {
$arrFilters = array(self::FIELD_EQUIPMENTCARDID=>$this->id);
$this->EquipmentKeywords = $this->EquipmentKeywordServices->getEquipmentKeywordsWithFilters($arrFilters);
}
return $this->EquipmentKeywords;
}
/**
* @return Keyword
*/
public function getKeyword()
{
if ($this->Keyword == null) {
$this->Keyword = $this->KeywordServices->selectKeyword($this->keywordId);
}
return $this->Keyword;
}
/**
* @return int
*/
public function getExpansionId()
{ return $this->expansionId; }
/**
* @return boolean
*/
public function isRanged()
{
if ($this->ranged == null) {
if ($this->EquipmentWeaponProfiles == null) {
$this->EquipmentWeaponProfiles = $this->getEquipmentWeaponProfiles();
}
if (empty($this->EquipmentWeaponProfiles)) {
$this->ranged = false;
} else {
$isRanged = false;
foreach ($this->EquipmentWeaponProfiles as $EquipmentWeaponProfile) {
$WeaponProfile = $EquipmentWeaponProfile->getWeaponProfile(__FILE__, __LINE__);
if ($WeaponProfile->getMaxRange()>0) {
$isRanged = true;
} else {
$this->melee = true;
}
}
$this->ranged = $isRanged;
}
}
return $this->ranged;
}
/**
* @return boolean
*/
public function isMelee()
{
if ($this->melee==null) {
if ($this->EquipmentWeaponProfiles == null) {
$this->EquipmentWeaponProfiles = $this->getEquipmentWeaponProfiles();
}
if (empty($this->EquipmentWeaponProfiles)) {
$this->melee = false;
} else {
$isMelee = false;
foreach ($this->EquipmentWeaponProfiles as $EquipmentWeaponProfile) {
$WeaponProfile = $EquipmentWeaponProfile->getWeaponProfile(__FILE__, __LINE__);
if ($WeaponProfile->getMaxRange()==0) {
$isMelee = true;
} else {
$this->ranged = true;
}
}
$this->melee = $isMelee;
}
}
return $this->melee;
}
/**
* @return boolean
*/
public function isPimp()
{ return $this->hasKeyword('Pimp'); }
/**
* @return boolean
*/
public function isStarter()
{ return $this->hasKeyword('Starter'); }
/**
* @return boolean
*/
public function isDual()
{ return $this->hasKeyword('Dual'); }
/**
* @param string $keyword
* @return boolean
*/
public function hasKeyword($keyword)
{
$hasKeyword = false;
if ($this->Keywords == null) {
$this->initKeywords();
}
if (!empty($this->Keywords)) {
foreach ($this->Keywords as $Keyword) {
if ($Keyword->getName()==$keyword) {
$hasKeyword = true;
}
}
}
return $hasKeyword;
}
private function initKeywords()
{
if ($this->Keywords==null) {
$EquipmentKeywords = $this->getEquipmentKeywords();
$ownKeyWords = array();
if (!empty($EquipmentKeywords)) {
foreach ($EquipmentKeywords as $EquipmentKeyword) {
array_push($ownKeyWords, $EquipmentKeyword->getKeyword(__FILE__, __LINE__));
}
}
$this->Keywords = empty($ownKeyWords) ? array(new Equipment()) : $ownKeyWords;
}
}
/**
* @param int $expansionId
*/
public function setExpansionId($expansionId)
{ $this->expansionId = $expansionId; }
}
<file_sep>/core/services/ExpansionServices.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe ExpansionServices
* @author Hugues.
* @since 1.04.00
* @version 1.07.21
*/
class ExpansionServices extends LocalServices
{
/**
* L'objet Dao pour faire les requêtes
* @var ExpansionDaoImpl $Dao
*/
protected $Dao;
/**
* Class constructor
*/
public function __construct()
{
parent::__construct();
$this->Dao = new ExpansionDaoImpl();
}
/**
* @param array $arrFilters
*/
private function buildFilters($arrFilters)
{
$this->arrParams[self::SQL_WHERE] = array();
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_CODE));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayWideFilter($arrFilters, self::FIELD_NAME));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_NBMISSIONS, 0));
array_push($this->arrParams[self::SQL_WHERE], $this->addNonArrayFilter($arrFilters, self::FIELD_NBSURVIVANTS, 0));
}
/**
* @param array $arrFilters
* @param string $orderby
* @param string $order
* @return array
*/
public function getExpansionsWithFilters($arrFilters=array(), $orderby=self::FIELD_NAME, $order=self::ORDER_ASC)
{
$this->arrParams = $this->buildOrderAndLimit($orderby, $order);
$this->buildFilters($arrFilters);
return $this->Dao->selectEntriesWithFilters(__FILE__, __LINE__, $this->arrParams);
}
/**
* @param int $id
* @return Expansion
*/
public function selectExpansion($id)
{ return $this->select(__FILE__, __LINE__, $id); }
public function updateExpansion($Expansion)
{ $this->update(__FILE__, __LINE__, $Expansion); }
public function insertExpansion($Expansion)
{ return $this->insert(__FILE__, __LINE__, $Expansion); }
}
<file_sep>/core/actions/MissionActions.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* MissionActions
* @author Hugues
* @since 1.02.00
* @version 1.07.25
*/
class MissionActions extends LocalActions
{
/**
* Constructeur
*/
public function __construct($post=array())
{
parent::__construct();
$this->post = $post;
$this->WpPostServices = new WpPostServices();
$this->DurationServices = new DurationServices();
$this->ExpansionServices = new ExpansionServices();
$this->LevelServices = new LevelServices();
$this->MissionServices = new MissionServices();
$this->MissionExpansionServices = new MissionExpansionServices();
$this->MissionTileServices = new MissionTileServices();
$this->OrigineServices = new OrigineServices();
$this->PlayerServices = new PlayerServices();
$this->TileServices = new TileServices();
}
/**
* Point d'entrée des méthodes statiques.
* @param array $post
* @return string
**/
public static function dealWithStatic($post)
{
$returned = '';
$Act = new MissionActions($post);
if ($post[self::CST_AJAXACTION]==self::AJAX_GETMISSIONS) {
$returned = $Act->dealWithGetMissions();
} elseif ($post[self::CST_AJAXACTION]==self::AJAX_MISSIONVERIF) {
$returned = $Act->dealWithMissionVerif(true);
} else {
$returned = '';
}
return $returned;
}
/**
* Récupération du contenu de la page via une requête Ajax.
* @param array $post
* @return string
*/
public function dealWithGetMissions()
{
$Bean = new WpPageMissionsBean();
$Bean->setFilters($this->post);
return $this->jsonString($Bean->getListContentPage(), self::PAGE_MISSION, true);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
// Bloc de gestion de vérifications des Missions en Home Admin
/**
* @param boolean $isVerif
* @return string
*/
public function dealWithMissionVerif($isVerif=false)
{
// On récupère les articles de missions
$args = array(
self::WP_CAT => self::WP_CAT_MISSION_ID,
self::WP_TAXQUERY => array(),
self::WP_POSTSTATUS => self::WP_PUBLISH.', future, pending',
);
$this->WpPostMissions = $this->WpPostServices->getArticles($args);
$nbWpPostMissions = count($this->WpPostMissions);
// Et les Missions en base
$this->Missions = $this->MissionServices->getMissionsWithFilters();
$nbMissions = count($this->Missions);
if ($isVerif) {
$this->checkMissions();
$strBilan = $this->jsonString($this->strBilan, self::AJAX_MISSIONVERIF, true);
} elseif ($nbWpPostMissions!=$nbMissions) {
$strBilan = "Le nombre d'articles ($nbWpPostMissions) ne correspond pas au nombre de Missions en base ($nbMissions).";
$strBilan .= "<br>Une vérification est vivement conseillée.";
} else {
$strBilan = "Le nombre d'articles ($nbWpPostMissions) correspond au nombre de Missions en base.";
}
return $strBilan;
}
private function checkMissions()
{
$hasErrors = false;
$strErrors = '';
$this->strBilan = "Début de l'analyse des données relatives aux Missions.<br>";
$this->strBilan .= "Il y a ".count($this->WpPostMissions)." articles de Missions.<br>";
$this->strBilan .= "Il y a ".count($this->Missions)." entrées en base.<br>";
/////////////////////////////////////////////////////////////////////
// On va réorganiser les Missions pour les retrouver facilement
$arrMissions = array();
while (!empty($this->Missions)) {
$Mission = array_shift($this->Missions);
if (isset($arrMissions[$Mission->getCode()])) {
$strErrors .= "Le code <em>".$Mission->getCode()."</em> semble être utilisé deux fois dans la base de données.<br>";
$hasErrors = true;
}
$arrMissions[$Mission->getCode()] = $Mission;
}
/////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////
while (!empty($this->WpPostMissions)) {
// On regarde les articles créés et on vérifie les données en base, si elles existent et si elles sont cohérentes entre elles.
// On récupère le WpPost et ses données
$this->WpPost = array_shift($this->WpPostMissions);
$code = $this->WpPost->getPostMeta(self::FIELD_CODE);
if (!isset($arrMissions[$code])) {
// A priori l'article n'a pas de code associé en base. Il faut donc en créé un qui corresponde
$Mission = new Mission();
$name = $this->WpPost->getPostTitle();
$Mission->setTitle($name);
$Mission->setCode($code);
// Set la Difficulté
$Mission->setLevelId($this->getWpPostLevelId());
// Set le Nb de Joueurs
$Mission->setPlayerId($this->getWpPostPlayerId());
// Set la Durée
$Mission->setDurationId($this->getWpPostDurationId());
// Set l'Origine
$Mission->setOrigineId($this->getWpPostOrigineId());
// Set Width & Height
list($width, $height) = explode(',', $this->getWpPostDimensions());
$Mission->setWidth($width);
$Mission->setHeight($height);
// On peut insérer
$this->MissionServices->insertMission($Mission);
$this->strBilan .= "L'article <em>".$name."</em> a été créé en base.<br>";
continue;
}
$Mission = $arrMissions[$code];
unset($arrMissions[$code]);
$this->checkMission($Mission);
}
/////////////////////////////////////////////////////////////////////
// On vérifie que la totalité des Missions en base ont été utilisées. Si ce n'est pas le cas, il faut créer des articles correspondants.
if (!empty($arrMissions)) {
$this->strBilan .= "On a des données en base qui n'ont pas d'article correspondant.<br>";
while (!empty($arrMissions)) {
$Mission = array_shift($arrMissions);
$this->strBilan .= '<br>Article à créer pour une Extension : '.$Mission->getName().' ['.$Mission->toJson().'].<br>';
}
}
/////////////////////////////////////////////////////////////////////
$this->strBilan .= "Fin de l'analyse des données relatives aux Missions.<br>";
if ($hasErrors) {
$this->strBilan .= "Anomalies constatées :<br>".$strErrors;
} else {
$this->strBilan .= "Aucune anomalie constatée.";
}
}
private function checkMission($Mission)
{
$doUpdate = false;
// On initialise les données de l'article
$name = $this->WpPost->getPostTitle();
// On vérifie si la donnée en base correspond à l'article.
$strError = '';
if ($Mission->getTitle()!=$name) {
$Mission->setTitle($name);
$strError .= "Le Titre a été mis à jour.<br>";
$doUpdate = true;
}
$levelId = $this->getWpPostLevelId();
if ($Mission->getLevelId()!=$levelId) {
$Mission->setLevelId($levelId);
$strError .= "La Difficulté a été mise à jour.<br>";
$doUpdate = true;
}
$playerId = $this->getWpPostPlayerId();
if ($Mission->getPlayerId()!=$playerId) {
$Mission->setPlayer($playerId);
$strError .= "Le Nb de joueurs a été mis à jour.<br>";
$doUpdate = true;
}
$durationId = $this->getWpPostDurationId();
if ($Mission->getDurationId()!=$durationId) {
$Mission->setDurationId($durationId);
$strError .= "La Durée a été mise à jour.<br>";
$doUpdate = true;
}
$origineId = $this->getWpPostOrigineId();
if ($Mission->getOrigineId()!=$origineId) {
$Mission->setOrigineId($origineId);
$strError .= "L'Origine a été mise à jour.<br>";
$doUpdate = true;
}
list($width, $height) = explode(',', $this->getWpPostDimensions());
if ($Mission->getWidth()!=$width || $Mission->getHeight()!=$height) {
if ($width==0) {
$this->strBilan .= "Il ne semble pas y avoir de Dalles saisies dans le champ tileIds de l'Article <em>".$name."</em> <strong>".$Mission->getCode()."</strong>.<br>";
} elseif ($width==2 && $height==2 && $Mission->getWidth()!=$Mission->getHeight()) {
$this->strBilan .= "Les Dimensions (".$Mission->getWidth().", ".$Mission->getHeight().") => ($width, $height) ne peuvent pas être renseignées automatiquement pour l'Article <em>".$name."</em>.<br>";
} else {
$strError .= "Les Dimensions (".$Mission->getWidth().", ".$Mission->getHeight().") => ($width, $height) ont été mises à jour.<br>";
$Mission->setWidth($width);
$Mission->setHeight($height);
$doUpdate = true;
}
}
// On récupère les Extensions rattachées à l'article.
$arrExpansions = $this->getWpPostExpansions();
// On récupère les MissionExpansions rattachés à la Mission.
$MissionExpansions = $this->MissionExpansionServices->getMissionExpansionsWithFilters(array(self::FIELD_MISSIONID=>$Mission->getId()));
if (count($arrExpansions)==count($MissionExpansions)) {
//$this->strBilan .= "Le nombre entre les deux univers correspond. Il faudrait vérifier que ce sont bien les mêmes...<br>";
} else {
// On a une différence. On s'embête pas, on supprime les MissionExpansions existantes, on insère les nouvelles.
while (!empty($MissionExpansions)) {
$MissionExpansion = array_shift($MissionExpansions);
$this->MissionExpansionServices->deleteMissionExpansion($MissionExpansion);
}
$MissionExpansion = new MissionExpansion();
$MissionExpansion->setMissionId($Mission->getId());
while (!empty($arrExpansions)) {
$Expansion = array_shift($arrExpansions);
$MissionExpansion->setExpansionId($Expansion->getId());
$this->MissionExpansionServices->insertMissionExpansion($MissionExpansion);
}
}
if ($doUpdate) {
// Si nécessaire, on update en base.
$this->MissionServices->updateMission($Mission);
$this->strBilan .= "Les données de la Mission <em>".$name."</em> ont été mises à jour.<br>".$strError;
}
}
private function getWpPostExpansions()
{
$Expansions = array();
$expansionNames = unserialize($this->WpPost->getPostMeta('expansionIds'));
while (!empty($expansionNames)) {
$expansionName = array_shift($expansionNames);
$SearchedExpansions = $this->ExpansionServices->getExpansionsWithFilters(array(self::FIELD_NAME=>$expansionName));
if (empty($SearchedExpansions)) {
echo "[[ERROR : $expansionName ne correspond pas à une Extension.]]\r\n";
} else {
$Expansion = array_shift($SearchedExpansions);
array_push($Expansions, $Expansion);
}
}
return $Expansions;
}
private function getWpPostLevelId()
{
$levelId = $this->WpPost->getPostMeta(self::FIELD_LEVELID);
$Levels = $this->LevelServices->getLevelsWithFilters(array(self::FIELD_NAME=>$levelId));
$Level = array_shift($Levels);
return $Level->getId();
}
private function getWpPostPlayerId()
{
$playerId = $this->WpPost->getPostMeta(self::FIELD_PLAYERID);
$Players = $this->PlayerServices->getPlayersWithFilters(array(self::FIELD_NAME=>$playerId));
$Player = array_shift($Players);
return $Player->getId();
}
private function getWpPostDurationId()
{
$durationId = $this->WpPost->getPostMeta(self::FIELD_DURATIONID);
list($min, $max) = explode('-', $durationId);
$Durations = $this->DurationServices->getDurationsWithFilters(array(self::FIELD_MINDURATION=>$min, self::FIELD_MAXDURATION=>$max));
$Duration = array_shift($Durations);
return $Duration->getId();
}
private function getWpPostOrigineId()
{
$origineId = $this->WpPost->getPostMeta(self::FIELD_ORIGINEID);
$Origines = $this->OrigineServices->getOriginesWithFilters(array(self::FIELD_NAME=>$origineId));
$Origine = array_shift($Origines);
return $Origine->getId();
}
private function getWpPostDimensions()
{
$tileCodes = $this->WpPost->getPostMeta('tileIds');
$arrTileIds = explode(', ', str_replace(' &', ',', $tileCodes));
switch (count($arrTileIds)) {
case 2 :
case 3 :
$width = count($arrTileIds);
$height = 1;
break;
case 4 :
case 6 :
$width = count($arrTileIds)/2;
$height = 2;
break;
case 5 :
$width = 1;
$height = count($arrTileIds);
break;
case 8 :
case 10 :
$width = 2;
$height = count($arrTileIds)/2;
break;
case 9 :
$width = 3;
$height = 3;
break;
case 12 :
$width = 4;
$height = 3;
break;
default :
$width = 0;
$height = 0;
break;
}
return $width.','.$height;
}
// Fin du bloc relatif à la vérification des Missions sur la Home Admin.
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////
}
<file_sep>/core/bean/AdminPageBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe AdminPageBean
* @author Hugues
* @since 1.00.00
* @version 1.05.12
*/
class AdminPageBean extends MainPageBean
{
protected $urlFragmentPagination = 'web/pages/admin/fragments/fragment-pagination.php';
protected $tplHomeAdminBoard = 'web/pages/admin/home-admin-board.php';
/**
* Backup Cron Table
*/
const WP_DB_BACKUP_CRON = 'wp_db_backup_cron';
/**
* @param string $tag
*/
public function __construct($tag='')
{
parent::__construct();
$this->urlParams = array();
$this->analyzeUri();
$this->tableName = 'wp_11_zombicide_'.$tag;
$this->tplAdminerUrl = 'http://zombicide.jhugues.fr/wp-content/plugins/adminer/inc/adminer/loader.php';
$this->tplAdminerUrl .= '?username=dbo507551204&db=db507551204&table='.$this->tableName;
}
/**
* @return string
*/
public function analyzeUri()
{
$uri = $_SERVER['REQUEST_URI'];
$pos = strpos($uri, '?');
if ($pos!==false) {
$arrParams = explode('&', substr($uri, $pos+1, strlen($uri)));
if (!empty($arrParams)) {
foreach ($arrParams as $param) {
list($key, $value) = explode('=', $param);
$this->urlParams[$key] = $value;
}
}
$uri = substr($uri, 0, $pos-1);
}
$pos = strpos($uri, '#');
if ($pos!==false) {
$this->anchor = substr($uri, $pos+1, strlen($uri));
}
if (isset($_POST)) {
foreach ($_POST as $key => $value) {
$this->urlParams[$key] = $value;
}
}
return $uri;
}
/**
* @return string
*/
public function getContentPage()
{
$strReturned = 'Need to be an admin';
if (self::isAdmin()) {
if (!isset($this->urlParams[self::CST_ONGLET])) {
$strReturned = $this->getHomeContentPage();
} elseif ($this->urlParams[self::CST_ONGLET]==self::CST_SKILL) {
$Bean = new AdminPageSkillsBean($this->urlParams);
$strReturned = $Bean->getSpecificContentPage();
} elseif ($this->urlParams[self::CST_ONGLET]==self::CST_SURVIVOR) {
$Bean = new AdminPageSurvivorsBean($this->urlParams);
$strReturned = $Bean->getSpecificContentPage();
} elseif ($this->urlParams[self::CST_ONGLET]==self::CST_EXPANSION) {
$Bean = new AdminPageExpansionsBean($this->urlParams);
$strReturned = $Bean->getSpecificContentPage();
} elseif ($this->urlParams[self::CST_ONGLET]==self::CST_MISSION) {
$Bean = new AdminPageMissionsBean($this->urlParams);
$strReturned = $Bean->getSpecificContentPage();
} else {
$strReturned = "Need to add <b>".$this->urlParams[self::CST_ONGLET]."</b> to AdminPageBean > getContentPage().";
}
}
return $strReturned;
}
/**
* @return string
*/
public function getHomeContentPage()
{
/////////////////////////////////////////////////
// Gestion des Cartes.
// On récupère les cartes qu'on souhaite afficher sur la Home
// La carte relatives aux compétences
$Bean = new AdminPageSkillsBean();
$lstCards = $Bean->getCheckCard();
// La carte relatives aux extensions
$Bean = new AdminPageExpansionsBean();
$lstCards .= $Bean->getCheckCard();
// La carte relatives aux Survivants
$Bean = new AdminPageSurvivorsBean();
$lstCards .= $Bean->getCheckCard();
// La carte relatives aux Missions
$Bean = new AdminPageMissionsBean();
$lstCards .= $Bean->getCheckCard();
$args = array(
// La liste des Cartes affichées sur le panneau d'accueil de la Home - 1
$lstCards,
);
return $this->getRender($this->tplHomeAdminBoard, $args);
}
/**
* @param unknown $queryArg
* @param unknown $post_status
* @param unknown $curPage
* @param unknown $nbPages
* @param unknown $nbElements
* @return string
*/
protected function getPagination($queryArg, $post_status, $curPage, $nbPages, $nbElements)
{
////////////////////////////////////////////////////////////////////////////
// Lien vers la première page. Seulement si on n'est ni sur la première, ni sur la deuxième page.
if ($curPage>=3) {
$queryArg[self::CST_CURPAGE] = 1;
$strToFirst = '<a class="first-page button" href="'.$this->getQueryArg($queryArg).'"><span aria-hidden="true">«</span></a>';
} else {
$strToFirst = '<span class="tablenav-pages-navspan button disabled" aria-hidden="true">«</span>';
}
////////////////////////////////////////////////////////////////////////////
// Lien vers la page précédente. Seulement si on n'est pas sur la première.
if ($curPage>=2) {
$queryArg[self::CST_CURPAGE] = $curPage-1;
$strToPrevious = '<a class="prev-page button" href="'.$this->getQueryArg($queryArg).'"><span aria-hidden="true">‹</span></a>';
} else {
$strToPrevious = '<span class="tablenav-pages-navspan button disabled" aria-hidden="true">‹</span>';
}
////////////////////////////////////////////////////////////////////////////
// Lien vers la page suivante. Seulement si on n'est pas sur la dernière.
if ($curPage<$nbPages) {
$queryArg[self::CST_CURPAGE] = $curPage+1;
$strToNext = '<a class="next-page button" href="'.$this->getQueryArg($queryArg).'"><span aria-hidden="true">›</span></a>';
} else {
$strToNext = '<span class="tablenav-pages-navspan button disabled" aria-hidden="true">›</span>';
}
////////////////////////////////////////////////////////////////////////////
// Lien vers la dernière page. Seulement si on n'est pas sur la dernière, ni l'avant-dernière.
if ($curPage<$nbPages-1) {
$queryArg[self::CST_CURPAGE] = $nbPages;
$strToLast = '<a class="next-page button" href="'.$this->getQueryArg($queryArg).'"><span aria-hidden="true">»</span></a>';
} else {
$strToLast = '<span class="tablenav-pages-navspan button disabled" aria-hidden="true">»</span>';
}
$args = array(
// Nombre d'éléments - 1
$nbElements,
// Lien vers la première page - 2
$strToFirst,
// Lien vers la page précédente - 3
$strToPrevious,
// Page courante - 4
$curPage,
// Nombre total de pages - 5
$nbPages,
// Lien vers la page suivante - 6
$strToNext,
// Lien vers la dernière page - 7
$strToLast,
);
return $this->getRender($this->urlFragmentPagination, $args);
}
}
<file_sep>/core/bean/SurvivorBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SurvivorBean
* @author Hugues
* @since 1.00.00
* @version 1.07.26
*/
class SurvivorBean extends LocalBean
{
protected $urlRowAdmin = 'web/pages/admin/fragments/survivor-row.php';
protected $urlRowPublic = 'web/pages/public/fragments/survivor-row.php';
protected $urlArticle = 'web/pages/public/fragments/survivor-article.php';
/**
* @param Survivor $Survivor
*/
public function __construct($Survivor=null)
{
parent::__construct();
$this->Survivor = ($Survivor==null ? new Survivor() : $Survivor);
$this->ExpansionServices = new ExpansionServices();
$this->SurvivorServices = new SurvivorServices();
}
//////////////////////////////////////////////////////////////////////////
// Différentes modes de présentation des Survivants
/**
* @return string
*/
public function getRowForAdminPage()
{
///////////////////////////////////////////////////////////////////////////
// On enrichit le template
$args = array(
// Identifiant du Survivant - 1
$this->Survivor->getId(),
// Portraits - 2
$this->getAllPortraits(),
// Url d'édition - 3
$this->Survivor->getEditUrl(self::CST_SURVIVOR),
// Nom du Survivant - 4
$this->Survivor->getName(),
// Url d'édition du WpPost - 5
$this->Survivor->getWpPostEditUrl(),
// Article publié ? - 6
$this->Survivor->getWpPostUrl(),
// Liste des profils existants - 7
$this->getListeProfils(),
// Extension de provenance - 8
$this->Survivor->getExpansion()->getName(),
// Background du Survivant - 9
($this->Survivor->getBackground()!='' ? substr($this->Survivor->getBackground(), 0, 50).'...' : 'Non renseigné'),
// Nom de l'image alternative, si défini. - 10
$this->Survivor->getAltImgName(),
// Type du Survivant - 11
(!$this->Survivor->isStandard() ? ' ultimate' : ''),
);
///////////////////////////////////////////////////////////////
// Puis on le retourne
return $this->getRender($this->urlRowAdmin, $args);
}
/**
* @return string
*/
public function getRowForPublicPage()
{
///////////////////////////////////////////////////////////////
// On enrichit le template et on le retourne.
$args = array(
// Id du Survivant - 1
$this->Survivor->getId(),
// Les portraits du Survivants - 2
$this->getAllPortraits(),
// Url du WpPost associé, s'il existe - 3
$this->Survivor->getWpPostUrl(),
// Nom du Survivant - 4
$this->Survivor->getName(),
// Si on a un profil de Zombivant, on donne la possibilité de l'afficher - 5
($this->Survivor->isZombivor()?self::CST_CHANGEPROFILE:''),
// Si on a un profil de Zombivant, on veut une case à cocher - 6
($this->Survivor->isZombivor()?self::CST_SQUAREPOINTER:self::CST_WINDOWCLOSE),
// Si on a un profil d'Ultimate, on donne la possibilité de l'afficher - 7
($this->Survivor->isUltimate()?self::CST_CHANGEPROFILE:''),
// Si on a un profil d'Ultimate, on veut une case à cocher - 8
($this->Survivor->isUltimate()?self::CST_SQUAREPOINTER:self::CST_WINDOWCLOSE),
// Extension à laquelle est rattaché le Survivant - 9
$this->Survivor->getExpansion()->getName(),
// Liste des Compétences du Survivant - 10
$this->getAllSkills(),
// Background du Survivant - 11
$this->Survivor->getBackground(),
// Classe additionnelle, pour la ligne - 12
(!$this->Survivor->isStandard() ? ' ultimate' : ''),
);
return $this->getRender($this->urlRowPublic, $args);
}
/**
* @return string
*/
public function getContentForHome()
{
//////////////////////////////////////////////////////////////////
// Liste des profils du Survivants.
$strProfiles = '';
$strSkills = '';
if ($this->Survivor->isStandard()) {
$strProfiles .= $this->getBalise(self::TAG_LI, $this->getFormRadioBouton('survivant', self::LBL_SURVIVANT), array(self::ATTR_CLASS=>self::CST_ACTIVE));
$strSkills .= $this->getBalise(self::TAG_UL, $this->Survivor->getUlSkills('', false, true), array(self::ATTR_CLASS=>'colSkills skills-survivant'));
}
if ($this->Survivor->isZombivor()) {
$strProfiles .= $this->getBalise(self::TAG_LI, $this->getFormRadioBouton('zombivant', self::LBL_ZOMBIVANT));
$strSkills .= $this->getBalise(self::TAG_UL, $this->Survivor->getUlSkills('z', false, true), array(self::ATTR_CLASS=>'colSkills skills-zombivant'));
}
if ($this->Survivor->isUltimate()) {
$strProfiles .= $this->getBalise(self::TAG_LI, $this->getFormRadioBouton('ultimate', self::LBL_ULTIMATE));
$strSkills .= $this->getBalise(self::TAG_UL, $this->Survivor->getUlSkills('u', false, true), array(self::ATTR_CLASS=>'colSkills skills-ultimate'));
}
if ($this->Survivor->isUltimatez()) {
$strProfiles .= $this->getBalise(self::TAG_LI, $this->getFormRadioBouton('ultimatez', self::LBL_ULTIMATEZOMBIVANT));
$strSkills .= $this->getBalise(self::TAG_UL, $this->Survivor->getUlSkills('uz', false, true), array(self::ATTR_CLASS=>'colSkills skills-ultimatez'));
}
//////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// On enrichit le template
$args = array(
// Url de l'article - 1
$this->Survivor->getWpPostUrl(),
// Url du portrait du Survivant - 2
$this->Survivor->getPortraitUrl(),
// Url vers la page Survivants - 3
'/'.self::PAGE_SURVIVOR,
// Nom du Survivant - 4
$this->Survivor->getName(),
// Les Compétences du Survivant - 5
$strSkills,
// Liste des profils du Survivant - 6
'<ul>'.$strProfiles.'</ul>',
// Plus utilisé - 7
'',
// Background du Survivant - 8
$this->Survivor->getBackground(),
// Classe additionnelle de l'article - 9
$this->Survivor->getStrClassFilters().' '.$this->Survivor->getExpansion()->getCode(),
// Le Nom de l'extension - 10
$this->Survivor->getExpansion()->getName(),
);
///////////////////////////////////////////////////////////////
// Puis on le retourne
return $this->getRender($this->urlArticle, $args);
}
protected $urlFormRadioBouton = 'web/pages/public/fragments/form-radio-bouton.php';
protected $urlCardVisit = 'web/pages/public/fragments/survivor-cardvisit.php';
protected $tplSkillBadge = '<a class="badge badge-%1$s-skill" href="%2$s">%3$s</a>';
protected $tplDisabledSkillBadge = '<span class="badge badge-%1$s-skill">%3$s</span>';
private $strPortraitSurvivant = 'portrait-survivant';
private $strPortraitZombivant = 'portrait-zombivant';
private $strPortraitUltimate = ' portrait-ultimate';
//////////////////////////////////////////////////////////////////////////
private function getFormRadioBouton($value, $libelle)
{
return '<div class="form-check badge badge-outline changeProfile" data-type="'.$value.'">'.$libelle.'</div>';
}
/**
* @return string
*/
public function getButton()
{
$label = $this->getIconFarSquare().' '.$this->Survivor->getName();
$attributes = array(
self::ATTR_TYPE => self::TAG_BUTTON,
self::ATTR_CLASS => 'btn btn-light btn-survivor hidden',
self::ATTR_DATA_EXPANSIONID => $this->Survivor->getExpansionId(),
self::ATTR_DATA_SURVIVORID => $this->Survivor->getId(),
);
return $this->getBalise(self::TAG_BUTTON, $label, $attributes);
}
/**
* @param string $color
* @return string
*/
public function getSkillBadge($color)
{
////////////////////////////////////////////////////////////////////
// On enrichi les paramètres du template et on le retourne
$args = array(
$color,
$this->Survivor->getWpPostUrl(),
$this->Survivor->getName()
);
return vsprintf(($this->Survivor->getWpPostUrl()=='#' ? $this->tplDisabledSkillBadge : $this->tplSkillBadge), $args);
}
public function getPortrait($type='')
{ return $this->getStrImgPortrait($this->Survivor->getPortraitUrl($type), 'Portrait', ''); }
/**
* @return string
*/
public function getAllPortraits($displayedFiltered=true)
{
$Survivor = $this->Survivor;
$name = $Survivor->getName();
$str = '';
if ($Survivor->isStandard()) {
$str .= $this->getStrImgPortrait($Survivor->getPortraitUrl(), 'Portrait Survivant - '.$name, ($displayedFiltered?$this->strPortraitSurvivant:''));
}
if ($Survivor->isZombivor()) {
$str .= $this->getStrImgPortrait($Survivor->getPortraitUrl('z'), 'Portrait Zombivant - '.$name, ($displayedFiltered?$this->strPortraitZombivant:''));
}
if ($Survivor->isUltimate()) {
$label = $this->strPortraitSurvivant.$this->strPortraitUltimate;
$str .= $this->getStrImgPortrait($Survivor->getPortraitUrl('u'), 'Portrait Ultimate - '.$name, ($displayedFiltered?$label:''));
}
if ($Survivor->isUltimatez()) {
$label = $this->strPortraitZombivant.$this->strPortraitUltimate;
$str .= $this->getStrImgPortrait($Survivor->getPortraitUrl('uz'), 'Portrait ZUltimate - '.$name, ($displayedFiltered?$label:''));
}
return $str;
}
private function getProfileLi($type, $survivorTypeId, $label)
{
$strProfils = '<li data-id="'.$this->Survivor->getId().'" data-type="'.$type.'" class="hasTooltip pointer"> ';
if ($this->Survivor->areDataSkillsOkay($survivorTypeId)) {
$strProfils .= $this->getIconFarCheckSquare().' '.$label.' <div class="tooltip">';
$strProfils .= $this->Survivor->getAdminUlSkills($survivorTypeId).'</div>';
} else {
$strProfils .= $this->getIconFarWindowClose().' '.$label;
}
return $strProfils.'</li>';
}
public function getListeProfils()
{
$strProfils = '<ul>';
// A-t-il un profil Standard ?
if ($this->Survivor->isStandard()) {
$strProfils .= $this->getProfileLi('survivant', self::CST_SURVIVORTYPEID_S, 'Standard');
}
// A-t-il un profil Zombivant ?
if ($this->Survivor->isZombivor()) {
$strProfils .= $this->getProfileLi('zombivant', self::CST_SURVIVORTYPEID_Z, 'Zombivant');
}
// A-t-il un profil Ultimate ?
if ($this->Survivor->isUltimate()) {
$strProfils .= $this->getProfileLi('ultimate survivant', self::CST_SURVIVORTYPEID_U, 'Ultimate');
}
// A-t-il un profil UltimateZ ?
if ($this->Survivor->isUltimatez()) {
$strProfils .= $this->getProfileLi('ultimate zombivant', self::CST_SURVIVORTYPEID_UZ, 'Ultimate Zombivant');
}
return $strProfils.'</ul>';
}
/**
* @param string $src
* @param string $alt
* @param string $addClass
* @return string
*/
private function getStrImgPortrait($src, $alt, $addClass)
{
$attributes = array(
self::ATTR_SRC => $src,
self::ATTR_ALT => $alt,
self::ATTR_CLASS => 'thumb '.$addClass,
);
return $this->getBalise(self::TAG_IMG, '', $attributes);
}
public function getSkills($type='')
{ return $this->getSkillsBySurvivorType('row', $this->Survivor->getUlSkills($type, true)); }
/**
* @return string
*/
public function getAllSkills()
{
$Survivor = $this->Survivor;
$str = $this->getSkillsBySurvivorType('skills-survivant row', $Survivor->getUlSkills('', true));
if ($Survivor->isZombivor()) {
$str .= $this->getSkillsBySurvivorType('skills-zombivant row', $Survivor->getUlSkills('z', true));
}
if ($Survivor->isUltimate()) {
$str .= $this->getSkillsBySurvivorType('skills-ultimate skills-survivant row', $Survivor->getUlSkills('u', true));
$str .= $this->getSkillsBySurvivorType('skills-ultimate skills-zombivant row', $Survivor->getUlSkills('uz', true));
}
return $this->getBalise(self::TAG_UL, $str);
}
/**
* @param string $addClass
* @param string $content
* @return string
*/
private function getSkillsBySurvivorType($addClass, $content)
{ return $this->getBalise(self::TAG_LI, $content, array(self::ATTR_CLASS=>$addClass)); }
/**
* @return string
*/
public function getCheckBoxType()
{
$strType = '';
if ($this->Survivor->isZombivor()) {
$attributes = array(
self::ATTR_DATA_ID => $this->Survivor->getId(),
self::ATTR_DATA_TYPE => 'zombivant',
self::ATTR_CLASS => 'changeProfile',
);
$strType .= $this->getBalise(self::TAG_DIV, $this->getIconFarSquarePointer().' Zombivant', $attributes);
if ($this->Survivor->isUltimate()) {
$attributes[self::ATTR_DATA_TYPE] = self::FIELD_ULTIMATE;
$strType .= ' '.$this->getBalise(self::TAG_DIV, $this->getIconFarSquarePointer().' Ultimate', $attributes);
}
}
return $strType;
}
public function getCartouche($extraAttributes=array(), $linked=false)
{
$content = $this->getStrImgPortrait($this->Survivor->getPortraitUrl(), '', '').' '.$this->Survivor->getName();
$attributes = array(
self::ATTR_CLASS => 'cartouche',
);
if (!empty($extraAttributes)) {
$attributes = array_merge($attributes, $extraAttributes);
}
if ($linked) {
$tag = self::TAG_A;
$attributes[self::ATTR_HREF] = $this->Survivor->getWpPost()->getPermalink();
} else {
$tag = self::TAG_SPAN;
}
return $this->getBalise($tag, $content, $attributes);
}
}
<file_sep>/core/bean/WpPageMissionOnlineBean.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe WpPageMissionOnlineBean
* @author Hugues
* @since 1.10.04
* @version 1.10.04
*/
class WpPageMissionOnlineBean extends WpPageBean
{
protected $urlDirMissions = '/web/rsc/missions/';
protected $urlDirLiveMissions = '/web/rsc/missions/live/';
protected $urlLoginTemplate = 'web/pages/public/wppage-mission-online-login.php';
protected $urlTemplate = 'web/pages/public/wppage-mission-online.php';
protected $urlTchatMsgTpl = 'web/pages/public/fragments/tchat-message.php';
protected $urlSectionSetup = 'web/pages/public/fragments/online-section-setup.php';
protected $xmlSuffixe = '.mission.xml';
/**
* Class Constructor
* @param WpPage $WpPage
*/
public function __construct($WpPage='')
{
parent::__construct($WpPage);
$this->MissionServices = new MissionServices();
$this->SurvivorServices = new SurvivorServices();
}
/**
* @return string
*/
public function getContentPage()
{
$this->msgError = '';
if ($_POST['radioChoice']=='new') {
$Missions = $this->MissionServices->getMissionsWithFilters(array(self::FIELD_CODE=>$_POST['selectMission']));
if (empty($Missions)) {
$this->msgError = '<em>Attention</em>, le code sélectionné n\'existe pas.';
} elseif (is_file(PLUGIN_PATH.$this->urlDirMissions.$_POST['selectMission'].$this->xmlSuffixe)) {
// ON doit générer une clef qui va bien et la stocker dans zombieKey.
// Puis on génère un fichier live à partir du fichier référence de la Mission.
//AnJwMKqNkXba2suQ
$str = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
$strCode = substr(str_shuffle($str), 0, 16);
$Mission = array_shift($Missions);
copy(PLUGIN_PATH.$this->urlDirMissions.$Mission->getCode().$this->xmlSuffixe, PLUGIN_PATH.$this->urlDirLiveMissions.$strCode.$this->xmlSuffixe);
$_SESSION['zombieKey'] = $strCode;
$MissionOnline = new MissionOnline($Mission);
$MissionOnline->setUp();
$this->msgError = '<em>Attention</em>, la Mission sélectionnée existe.';
} else {
$this->msgError = '<em>Attention</em>, la Mission sélectionnée n\'existe pas.';
}
} elseif ($_POST['radioChoice']=='old') {
if (is_file(PLUGIN_PATH.$this->urlDirLiveMissions.$_POST['saveCode'].$this->xmlSuffixe)) {
// On récupère la clef fournie et on la stocke dans zombieKey
$_SESSION['zombieKey'] = $_POST['saveCode'];
$this->msgError = '<em>Attention</em>, le code saisi correspond à une partie sauvegardée.';
} else {
// Prévoir une gestion d'erreur pour fichier inexistant.
$this->msgError = '<em>Attention</em>, le code saisi ne correspond pas à une partie sauvegardée.';
}
} elseif (isset($_GET['logout'])) {
unset($_SESSION['zombieKey']);
}
if (isset($_SESSION['zombieKey']) && is_file(PLUGIN_PATH.$this->urlDirLiveMissions.$_SESSION['zombieKey'].$this->xmlSuffixe)) {
return $this->getBoard();
} else {
return $this->getLogin();
}
}
public function getLogin()
{
if ($this->msgError!='') {
$strMsgError = $this->getBalise(self::TAG_DIV, $this->msgError, array(self::ATTR_CLASS=>'alert alert-danger'));
} else {
$strMsgError = '';
}
// Gérer les cas éventuels d'erreur.
$args = array(
// Le message d'erreur éventuel - 1
$strMsgError,
// Si old est checked - 2
(isset($_POST['radioChoice']) && $_POST['radioChoice']=='old') ? 'checked' : '',
// Si old n'est pas checked - 3
(!isset($_POST['radioChoice']) || $_POST['radioChoice']=='new') ? 'checked' : '',
// Si old est checked - 2
(isset($_POST['radioChoice']) && $_POST['radioChoice']=='old') ? 'fa-dot-circle-o' : 'fa-circle-o',
// Si old n'est pas checked - 3
(!isset($_POST['radioChoice']) || $_POST['radioChoice']=='new') ? 'fa-dot-circle-o' : 'fa-circle-o',
);
return $this->getRender($this->urlLoginTemplate, $args);
}
public function getBoard()
{
//////////////////////////////////////////////////////////////////
// On va afficher une Mission, à partir de son XML... Donc, déjà, il faut l'ouvrir !
$this->openFile();
// On initialise quelques variables :
$this->arrLstPortraits = array();
$this->arrLstSurvivorDetail = array();
$Missions = $this->MissionServices->getMissionsWithFilters(array(self::FIELD_CODE=>'AJ01'));
$Mission = array_shift($Missions);
$MissionBean = $Mission->getBean();
//////////////////////////////////////////////////////////////////
// On enrichi le template puis on le restitue.
$args = array(
// class pour afficher correctement la Map - 1
$this->getDimensions(),
// La liste des Dalles - 2
$this->displayTiles(),
// La liste des Zones - 3
'',
// La liste des Tokens - 4
$this->displayTokens().$this->displaySurvivors().$this->displayZombies(),
// Identifiant de la Partie - 5
// TODO : en dur pour le moment.
$_SESSION['zombieKey'],
// Liste des Objectifs - 6
$MissionBean->getMissionContentObjectives(),
// Liste des Règles Spéciales - 7
$MissionBean->getMissionContentRules(),
// Portraits des Survivants dans la Sidebar - 8
$this->getLstPortraits(),
// Fiche d'identité des Survivants dans la Sidebar - 9
$this->getLstDetails(),
// Le Tchat - 10
$this->getLstTchats(),
// Le contenu de l'onglet SetUp - 11
$this->getMissionSetup(),
);
return $this->getRender($this->urlTemplate, $args);
}
private function getMissionSetup()
{
$Spawns = $this->objXmlDocument->xPath('//spawns')[0];
$Pools = $this->objXmlDocument->xPath('//pools/pool');
$lstPools = '';
foreach ($Pools as $Pool) {
$innerDiv = $this->getBalise(self::TAG_DIV, $Pool->attributes()['current'].' / '.$Pool->attributes()['max'], array(self::ATTR_CLASS=>'badge'));
$img = $this->getBalise(self::TAG_IMG, '', array(self::ATTR_SRC=>'/wp-content/plugins/hj-zombicide/web/rsc/img/zombies/'.$Pool->attributes()['type'].'.png'));
$outerDiv = $this->getBalise(self::TAG_DIV, $img.$innerDiv, array(self::ATTR_CLASS=>'chip token zombie Standard non-draggable'));
$lstPools .= $this->getBalise(self::TAG_LI, $outerDiv);
}
$args = array(
// Spawn actuel - 1
$Spawns->attributes()['interval'],
// Etat de la réserve de Zombies - 2
$lstPools,
);
return $this->getRender($this->urlSectionSetup, $args);
}
public function getLstTchats($tsTreshold='')
{
if ($tsTreshold=='') {
$Tchats = $this->objXmlDocument->xPath('//tchat');
} else {
$this->openFile();
$Tchats = $this->objXmlDocument->xPath('//tchat[@timestamp>"'.$tsTreshold.'"]');
}
usort($Tchats, 'sort_trees');
$lstMsgs = '';
$prevTs = '';
while (!empty($Tchats)) {
$Tchat = array_shift($Tchats);
$author = $Tchat->attributes()['author'];
$ts = $Tchat->attributes()['timestamp']*1;
// On insère un Tag pour séparer les messages des différentes journées.
if ($prevTs!='' && date('d', $ts)!=date('d', $prevTs)) {
$liClass = 'clearfix';
$msgDataClass = ' message changeDate';
$msgDataContent = date('d m Y', $ts);
$msgClass = ' hidden';
$args = array(
$liClass,
$msgDataClass,
$msgDataContent,
$msgClass,
'',
$ts,
);
$lstMsgs .= $this->getRender($this->urlTchatMsgTpl, $args);
}
// Selon que l'auteur est Automat, le user courant ou un autre, le visuel change
if ($author=='Automat') {
$liClass = 'clearfix';
$msgDataClass = '';
$msgDataContent = date('H:i', $ts);
$msgClass = ' tech-message';
} elseif ($author=='me') {
$liClass = 'clearfix';
$msgDataClass = ' align-right';
$msgDataContent = $this->getBalise(self::TAG_SPAN, date('H:i', $ts), array(self::ATTR_CLASS=>'message-data-time')).' ';
$msgDataContent .= $this->getBalise(self::TAG_SPAN, $author, array(self::ATTR_CLASS=>'message-data-name'));
$msgClass = ' other-message float-right';
} else {
$liClass = '';
$msgDataClass = '';
$msgDataContent = $this->getBalise(self::TAG_SPAN, $author, array(self::ATTR_CLASS=>'message-data-name')).' ';
$msgDataContent .= $this->getBalise(self::TAG_SPAN, date('H:i', $ts), array(self::ATTR_CLASS=>'message-data-time'));
$msgClass = ' my-message';
}
$args = array(
$liClass,
$msgDataClass,
$msgDataContent,
$msgClass,
$Tchat[0],
$ts,
);
$lstMsgs .= $this->getRender($this->urlTchatMsgTpl, $args);
$prevTs = $ts;
}
return $lstMsgs;
}
private function getLstDetails()
{
$lstDetails = array();
$survivors = $this->objXmlDocument->xPath('//survivor');
while (!empty($survivors)) {
$survivor = array_shift($survivors);
$TokenBean = new TokenBean($survivor);
$lstDetails[] = $TokenBean->getTokenDetail();
}
return implode('', $lstDetails);
}
private function getLstPortraits()
{
$lstPortraits = array();
$survivors = $this->objXmlDocument->xPath('//survivor');
while (!empty($survivors)) {
$survivor = array_shift($survivors);
$TokenBean = new TokenBean($survivor);
$lstPortraits[] = $TokenBean->getTokenPortrait();
}
// On rajoute un Unkonwn, pour pouvoir ajouter un Survivant.
$args = array(
self::ATTR_ID => 'portrait-new',
self::ATTR_CLASS => 'unknown',
self::ATTR_SRC => '/wp-content/plugins/hj-zombicide/web/rsc/img/portraits/p.jpg',
self::ATTR_TITLE => 'Add a Survivor',
);
$lstPortraits[] = $this->getBalise(self::TAG_IMG, '', $args);
return implode('', $lstPortraits);
}
private function displayZombies()
{
$lstZombies = '';
// On récupère les Zombies pour les afficher
$zombies = $this->objXmlDocument->xPath('//zombie');
while (!empty($zombies)) {
$zombie = array_shift($zombies);
$TokenBean = new TokenBean($zombie);
$lstZombies .= $TokenBean->getTokenBalise();
$lstZombies .= $TokenBean->getTokenMenu();
}
return $lstZombies;
}
private function displaySurvivors()
{
$lstSurvivors = '';
// On récupère les Survivants pour les afficher
$survivors = $this->objXmlDocument->xPath('//survivor');
while (!empty($survivors)) {
$survivor = array_shift($survivors);
$TokenBean = new TokenBean($survivor);
$lstSurvivors .= $TokenBean->getTokenBalise();
$lstSurvivors .= $TokenBean->getTokenMenu();
}
return $lstSurvivors;
}
private function displayTokens()
{
$lstChips = '';
// On récupère les Tokens pour les afficher
$chips = $this->objXmlDocument->xPath('//chip');
while (!empty($chips)) {
$chip = array_shift($chips);
$TokenBean = new TokenBean($chip);
$lstChips .= $TokenBean->getTokenBalise();
$lstChips .= $TokenBean->getTokenMenu();
}
return $lstChips;
}
private function displayTiles()
{
$lstTiles = '';
// On récupère les Dalles pour les afficher
$tiles = $this->objXmlDocument->xPath('//tile');
while (!empty($tiles)) {
$tile = array_shift($tiles);
$code = $tile->attributes()[self::FIELD_CODE];
$orientation = $tile->attributes()['orientation'];
$args = array(
self::ATTR_CLASS => 'mapTile '.$orientation,
'style' => "background:url('/wp-content/plugins/hj-zombicide/web/rsc/img/tiles/".$code."-500px.png');",
);
$lstTiles .= $this->getBalise(self::TAG_DIV, '', $args);
}
return $lstTiles;
}
private function getDimensions()
{
$maps = $this->objXmlDocument->xPath('//map');
$map = array_shift($maps);
// On détermine les dimensions de la map pour pouvoir appliquer les styles css
$this->width = $map->attributes()['width'];
$this->height = $map->attributes()['height'];
return 'map'.$this->height.'x'.$this->width;
}
private function openFile()
{
$fileName = PLUGIN_PATH.$this->urlDirLiveMissions.$_SESSION['zombieKey'].".mission.xml";
$this->objXmlDocument = simplexml_load_file($fileName);
$objXmlDocument = simplexml_load_file($fileName);
$objJsonDocument = json_encode($objXmlDocument);
$arrOutput = json_decode($objJsonDocument, TRUE);
$this->map = $arrOutput['map'];
}
}
function sort_trees($t1, $t2) {
return ($t1['timestamp']*1 > $t2['timestamp']*1);
}
<file_sep>/core/daoimpl/SkillDaoImpl.php
<?php
if (!defined('ABSPATH')) {
die('Forbidden');
}
/**
* Classe SkillDaoImpl
* @author Hugues.
* @since 1.00.00
* @version 1.05.06
*/
class SkillDaoImpl extends LocalDaoImpl
{
/**
* Class constructor
*/
public function __construct()
{ parent::__construct('Skill'); }
/**
* @param array $rows
* @return array
*/
protected function convertToArray($rows)
{
$Items = array();
if (!empty($rows)) {
foreach ($rows as $row) {
$Items[] = Skill::convertElement($row);
}
}
return $Items;
}
/**
* @param array $arrParams
* @param array $filters
* @return array
*/
public function selectEntriesWithFiltersIn($arrParams, $filters)
{
// On s'appuie sur la requête de base.
$requete = $this->selectRequest.$this->fromRequest;
// On doit faire une jointure externe pour lier la table mission_expansion si on cherche sur ce critère
if (isset($filters[self::FIELD_TAGLEVELID])) {
$requete .= 'INNER JOIN wp_11_zombicide_survivor_skill ss ON s.id=ss.skillId ';
// On passe ensuite aux critères de sélection.
$requete .= 'WHERE 1=1 ';
// Contrainte sur le niveau
$requete .= 'AND tagLevelId IN ('.$filters[self::FIELD_TAGLEVELID].') ';
}
// On peut aussi trier
$requete .= $this->orderBy;
// Et retourner le tableau de résultats.
return $this->convertToArray($this->selectEntriesAndLogQuery(__FILE__, __LINE__, $requete, $arrParams));
}
/**
* @param string $file
* @param int $line
* @param array $arrParams
* @return array|Skill
*/
public function select($file, $line, $arrParams)
{ return parent::localSelect($file, $line, $arrParams, new Skill()); }
}
| 1dc31aec88c9ef66bf6f2220a2be39dca033aa11 | [
"JavaScript",
"PHP"
] | 102 | PHP | HuguesGithub/hj-zombicide-v2 | aaa6b3dd8ad3d7e7c52f42e6dfc5fca64f5f1a2e | 097fa5359c7ee6dda111787e3d2792256090a339 | |
refs/heads/main | <repo_name>Bpielstick/2.5DPlatformer<file_sep>/Assets/Scripts/UIManager.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
public class UIManager : MonoBehaviour
{
[SerializeField] private GameObject coinCountDisplay;
[SerializeField] private GameObject livesCountDisplay;
// Start is called before the first frame update
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void UpdateUI (int newCoinCount, int newLivesCount)
{
coinCountDisplay.GetComponent<TextMeshProUGUI>().text = "Coins x" + newCoinCount;
livesCountDisplay.GetComponent<TextMeshProUGUI>().text = "Lives: " + newLivesCount;
}
}
<file_sep>/Assets/Scripts/Player.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
public class Player : MonoBehaviour
{
[SerializeField] private float maxSpeed;
[SerializeField] private float currentSpeed;
[SerializeField] private float acceleration;
[SerializeField] private float deceleration;
[SerializeField] private float maxFallSpeed;
[SerializeField] private float fallSpeed = 0;
[SerializeField] private float fallAcceleration;
[SerializeField] private bool jumpEnabled = true;
[SerializeField] private bool isJumping = false;
[SerializeField] private bool doubleJump = true;
[SerializeField] private float jumpSpeed;
[SerializeField] private int coinCount = 0;
[SerializeField] private int livesCount = 3;
[SerializeField] private Vector3 moveDirection;
[SerializeField] private GameObject UIManager;
[SerializeField] private Transform startLocation;
private UIManager uiManager;
private CharacterController characterController;
// Start is called before the first frame update
void Start()
{
characterController = GetComponent<CharacterController>();
uiManager = UIManager.GetComponent<UIManager>();
uiManager.UpdateUI(coinCount, livesCount);
}
// Update is called once per frame
void Update()
{
float XDirection = Input.GetAxis("Horizontal");
currentSpeed += XDirection * acceleration * Time.deltaTime;
if (currentSpeed > maxSpeed) { currentSpeed = maxSpeed; }
else if (currentSpeed < -maxSpeed) { currentSpeed = -maxSpeed; }
if (XDirection == 0 && currentSpeed > 0) { currentSpeed -= deceleration * Time.deltaTime; }
else if (XDirection == 0 && currentSpeed < 0) { currentSpeed += deceleration * Time.deltaTime; }
if (XDirection == 0 && Mathf.Abs(currentSpeed) < 0.1) { currentSpeed = 0; }
if (characterController.isGrounded && isJumping == false)
{
//Debug.Log("grounded");
fallSpeed = -0.1f;
jumpEnabled = true;
}
else
{
//Debug.Log("not grounded");
if (fallSpeed >= maxFallSpeed)
{
fallSpeed += fallAcceleration * Time.deltaTime;
}
}
if (Input.GetButtonDown("Jump"))
{
if (characterController.isGrounded)
{
jumpEnabled = false;
isJumping = true;
doubleJump = true;
fallSpeed = jumpSpeed;
StartCoroutine(JumpRoutine());
}
else if (doubleJump)
{
fallSpeed = jumpSpeed;
doubleJump = false;
}
}
moveDirection = new Vector3(currentSpeed, fallSpeed, 0);
if (characterController.enabled)
{
characterController.Move(moveDirection * Time.deltaTime);
}
}
private IEnumerator JumpRoutine()
{
yield return new WaitForSeconds(0.1f);
isJumping = false;
}
public void Pickup (string PickupType)
{
switch (PickupType)
{
case "Coin":
coinCount++;
break;
default:
break;
}
uiManager.UpdateUI(coinCount, livesCount);
}
public void LoseLife ()
{
livesCount--;
uiManager.UpdateUI(coinCount, livesCount);
StartCoroutine(RespawnRoutine());
}
private IEnumerator RespawnRoutine()
{
characterController.SimpleMove(new Vector3(0,0,0));
characterController.enabled = false;
yield return new WaitForSeconds(0.5f);
if (livesCount <= 0)
{
SceneManager.LoadScene("Main");
}
else
{
transform.position = startLocation.position;
characterController.enabled = true;
}
}
public void GainLife()
{
livesCount++;
uiManager.UpdateUI(coinCount, livesCount);
}
}
<file_sep>/Assets/Scripts/GroundDetection.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class GroundDetection : MonoBehaviour
{
// Start is called before the first frame update
void Start()
{
}
// Update is called once per frame
void Update()
{
/*
if (!characterController.isGrounded)
{
RaycastHit Ground;
RaycastHit InFront;
RaycastHit Behind;
Debug.DrawRay(transform.position, transform.TransformDirection(Vector3.down) * 1.3f, Color.yellow);
if (Physics.Raycast(transform.position, transform.TransformDirection(Vector3.down), out Ground, 1.3f))
{
Debug.Log("raycast hit");
jumpEnabled = true;
}
else
{
Debug.DrawRay(transform.position, new Vector3(.5f, -1.3f, 0) * 1, Color.yellow);
if (Physics.Raycast(transform.position, new Vector3(-.5f, -1.3f, 0), out Ground, 1))
{
jumpEnabled = true;
Debug.DrawRay(transform.position, transform.TransformDirection(Vector3.right) * 1f, Color.red);
if (Physics.Raycast(transform.position, transform.TransformDirection(Vector3.left), out Behind, 1))
{
if (Ground.Equals(Behind))
{
jumpEnabled = false;
}
}
}
Debug.DrawRay(transform.position, new Vector3(-.5f, -1.3f, 0) * 1, Color.yellow);
if (Physics.Raycast(transform.position, new Vector3(.5f, -1.3f, 0), out Ground, 1))
{
jumpEnabled = true;
Debug.DrawRay(transform.position, transform.TransformDirection(Vector3.left) * 1f, Color.red);
if (Physics.Raycast(transform.position, transform.TransformDirection(Vector3.forward) * 1f, out InFront, 1))
{
if (Ground.Equals(InFront))
{
jumpEnabled = false;
}
}
}
}
}
*/
}
}
| b1bdb636883ef1cbf083f4f11a5c98bbff584be3 | [
"C#"
] | 3 | C# | Bpielstick/2.5DPlatformer | c0bda8aec683cdbc2a80964c1c2ff97e48cd1968 | 9c51f6fefeb360eb688ac7961ef20c133ba314f6 | |
refs/heads/master | <file_sep>+++
title = "C Lecture - 4"
description = "Exercise 48 ~ 51"
+++
Author: <NAME>
All content comes from Zed's [Lecture Repository](https://github.com/zedshaw/learn-c-the-hard-way-lectures.git) and [Libraries Repository](https://github.com/zedshaw/liblcthw). All credit goes to Zed.
### Exercise 48a A Simple Network Server:
Project Description
The Plan
Start your first long running project:
statserve
The Purpose
You'll get the project started and get a minimum first hack going.
The Requirements
1. Create a simple network server that accepts a connection on port 7899 from
*netclient* or the *nc* command, and echoes back anything you type.
2. You'll need to learn how to bind a port, listen on the socket, and answer it.
Use your research skills to study how this is done and attempt to implement it
yourself.
The Requirements
3. The more important part of this project is laying out the project directory
from the *c-skeleton*, and making sure you can build everything and get it
working.
4. Don't worry about things like daemons or anything else. Your server just has
to run from the command line and keep running.
The Clues
I will now give you some clues:
* USE liblcthw!
* Remember you did a client already, you just need to make a server.
* Do NOT use select! Use fork() for the server.
* Keep it *simple*. Don't worry about anything other than accepting a connection and closing.
* Stay small, build slowly.
Important References
* Research online for "echo server in C".
* Read man (2) pages for *accept*, *bind*, *listen*, *connect*, *select*, *socket*, and *shutdown*.
Encouragement
This will be *HARD*! Try it your best, and take it piece by piece. You can do it, but remember if you give up the next video (48b) will show you the code to my solution and how to solve it. You can peek there then come back when you're stuck.
### Exercise 48b A Simple Network Server:
.\ex48b\c-skeleton
.\ex48b\c-skeleton\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\c-skeleton\src\libex29.c
```c
#include <stdio.h>
#include <ctype.h>
#include "dbg.h"
int print_a_message(const char *msg)
{
printf("A STRING: %s\n", msg);
return 0;
}
int uppercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", toupper(msg[i]));
}
printf("\n");
return 0;
}
int lowercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", tolower(msg[i]));
}
printf("\n");
return 0;
}
int fail_on_purpose(const char *msg)
{
return 1;
}
```
.\ex48b\c-skeleton\tests\libex29_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
typedef int (*lib_function) (const char *data);
char *lib_file = "build/libYOUR_LIBRARY.so";
void *lib = NULL;
int check_function(const char *func_to_run, const char *data,
int expected)
{
lib_function func = dlsym(lib, func_to_run);
check(func != NULL,
"Did not find %s function in the library %s: %s", func_to_run,
lib_file, dlerror());
int rc = func(data);
check(rc == expected, "Function %s return %d for data: %s",
func_to_run, rc, data);
return 1;
error:
return 0;
}
char *test_dlopen()
{
lib = dlopen(lib_file, RTLD_NOW);
mu_assert(lib != NULL, "Failed to open the library to test.");
return NULL;
}
char *test_functions()
{
mu_assert(check_function("print_a_message", "Hello", 0),
"print_a_message failed.");
mu_assert(check_function("uppercase", "Hello", 0),
"uppercase failed.");
mu_assert(check_function("lowercase", "Hello", 0),
"lowercase failed.");
return NULL;
}
char *test_failures()
{
mu_assert(check_function("fail_on_purpose", "Hello", 1),
"fail_on_purpose should fail.");
return NULL;
}
char *test_dlclose()
{
int rc = dlclose(lib);
mu_assert(rc == 0, "Failed to close lib.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dlopen);
mu_run_test(test_functions);
mu_run_test(test_failures);
mu_run_test(test_dlclose);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex48b\statserve
.\ex48b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 3, "USAGE: statserve host port");
const char *host = argv[1];
const char *port = argv[2];
check(echo_server(host, port), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex48b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
```
.\ex48b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
#endif
```
.\ex48b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
const int RB_SIZE = 1024 * 10;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
void client_handler(int client_fd)
{
int rc = 0;
// need a ringbuffer for the input
RingBuffer *sock_rb = RingBuffer_create(RB_SIZE);
// read_some in a loop
while(read_some(sock_rb, client_fd, 1) != -1) {
// write_it back off the ringbuffer
if(write_some(sock_rb, client_fd, 1) == -1) {
debug("Client closed.");
break;
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(sock_rb) RingBuffer_destroy(sock_rb);
exit(0); // just exit the child process
}
int echo_server(const char *host, const char *port)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex48b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
int echo_server(const char *host, const char *port);
#endif
```
.\ex48b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
char *test_dummy()
{
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dummy);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex48b\c-skeleton
.\ex48b\c-skeleton\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\c-skeleton\src\libex29.c
```c
#include <stdio.h>
#include <ctype.h>
#include "dbg.h"
int print_a_message(const char *msg)
{
printf("A STRING: %s\n", msg);
return 0;
}
int uppercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", toupper(msg[i]));
}
printf("\n");
return 0;
}
int lowercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", tolower(msg[i]));
}
printf("\n");
return 0;
}
int fail_on_purpose(const char *msg)
{
return 1;
}
```
.\ex48b\c-skeleton\tests\libex29_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
typedef int (*lib_function) (const char *data);
char *lib_file = "build/libYOUR_LIBRARY.so";
void *lib = NULL;
int check_function(const char *func_to_run, const char *data,
int expected)
{
lib_function func = dlsym(lib, func_to_run);
check(func != NULL,
"Did not find %s function in the library %s: %s", func_to_run,
lib_file, dlerror());
int rc = func(data);
check(rc == expected, "Function %s return %d for data: %s",
func_to_run, rc, data);
return 1;
error:
return 0;
}
char *test_dlopen()
{
lib = dlopen(lib_file, RTLD_NOW);
mu_assert(lib != NULL, "Failed to open the library to test.");
return NULL;
}
char *test_functions()
{
mu_assert(check_function("print_a_message", "Hello", 0),
"print_a_message failed.");
mu_assert(check_function("uppercase", "Hello", 0),
"uppercase failed.");
mu_assert(check_function("lowercase", "Hello", 0),
"lowercase failed.");
return NULL;
}
char *test_failures()
{
mu_assert(check_function("fail_on_purpose", "Hello", 1),
"fail_on_purpose should fail.");
return NULL;
}
char *test_dlclose()
{
int rc = dlclose(lib);
mu_assert(rc == 0, "Failed to close lib.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dlopen);
mu_run_test(test_functions);
mu_run_test(test_failures);
mu_run_test(test_dlclose);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex48b\statserve
.\ex48b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 3, "USAGE: statserve host port");
const char *host = argv[1];
const char *port = argv[2];
check(echo_server(host, port), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex48b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
```
.\ex48b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
#endif
```
.\ex48b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
const int RB_SIZE = 1024 * 10;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
void client_handler(int client_fd)
{
int rc = 0;
// need a ringbuffer for the input
RingBuffer *sock_rb = RingBuffer_create(RB_SIZE);
// read_some in a loop
while(read_some(sock_rb, client_fd, 1) != -1) {
// write_it back off the ringbuffer
if(write_some(sock_rb, client_fd, 1) == -1) {
debug("Client closed.");
break;
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(sock_rb) RingBuffer_destroy(sock_rb);
exit(0); // just exit the child process
}
int echo_server(const char *host, const char *port)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex48b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
int echo_server(const char *host, const char *port);
#endif
```
.\ex48b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
char *test_dummy()
{
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dummy);
return NULL;
}
RUN_TESTS(all_tests);
```
Solution
The Plan
Show you how I solved the *statserve* project.
The Purpose
Watch me solve the first project quickly, then review the code.
The Setup
First I need to install liblcthw since I'll be using that.
Then I make the project skeleton and get something, anything going.
The Server
Then I just get it accepting a connection.
The Echo
Then I decided to just make it echo back what I type.
The Final Code
### Exercise 49a A Statistics Server
Project Description
The Plan
Make the *statsserver* do something using a simple protocol.
The Purpose
Learn the first steps in creating a server that answers a protocol.
The Requirements
Create this protocol:
create Create a new statistic.
mean Get the current mean of a statistic.
sample Add a new sample to a statistics.
dump Get all of the elements of a statistic (sum, sumsq, n, min, and max).
The Requirements
1. You'll need to allow people to name these statistics, which means using one of the map style data structures to map names to ``Stats`` structs.
2. You'll need to add the ``CRUD`` standard operations for each name. CRUD stands for create read update delete. Currently, the list of commands above has create, mean, and dump for reading; and sample for updating. You need a delete command now.
3. Make the protocol *strict*! Abort any client that makes any mistakes in protocols.
Strict Protocol
Once again, in case you missed it, be ruthless!
Abort all deviant clients.
Pause!
I'm going to give you clues to solve this, so if you want to try on your own pause now!
The Clues
* Create the data structures first for holding the information for each of these commands.
* Then write a protocol parser to handle it and fill in the data.
* Then pass that data to a function that knows how to do that command.
* You can just store the stats in a Hashmap, BSTree, or TSTree for now.
* KEEP IT SIMPLE!
Important References
* You'll want to refer to the bstring documentation as much as possible to know what functions to use.
Encouragement
* Remember that this is *supposed* to be hard.
* You are *supposed* to struggle with this.
* This could take you a while, but keep up the struggle, do it bit by bit, and test little pieces as you go.
* Automate your tests!
### Exercise 49b A Statistics Server:
.\ex49b\statserve
.\ex49b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 3, "USAGE: statserve host port");
const char *host = argv[1];
const char *port = argv[2];
check(echo_server(host, port), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex49b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex49b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
bstring read_line(RingBuffer *input, const char line_ending)
{
int i = 0;
bstring result = NULL;
// not super efficient
// read a character at a time from the ring buffer
for(i = 0; i < RingBuffer_available_data(input); i++) {
// if the buffer has line ending
if(input->buffer[i] == line_ending) {
// get that much fromt he ring buffer
result = RingBuffer_gets(input, i);
check(result, "Failed to get line from RingBuffer");
// make sure that we got the right amount
check(RingBuffer_available_data(input) >= 1,
"Not enough data in the RingBuffer after reading line.");
// and commit it
RingBuffer_commit_read(input, 1);
break;
}
}
// notice this will fail in the cases where we get a set of data
// on the wire that does not have a line ending yet
return result;
error:
return NULL;
}
```
.\ex49b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
bstring read_line(RingBuffer *input, const char line_ending);
#endif
```
.\ex49b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <lcthw/hashmap.h>
#include <lcthw/stats.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
struct tagbstring LINE_SPLIT = bsStatic(" ");
struct tagbstring CREATE = bsStatic("create");
struct tagbstring STDDEV = bsStatic("stddev");
struct tagbstring MEAN = bsStatic("mean");
struct tagbstring SAMPLE = bsStatic("sample");
struct tagbstring DUMP = bsStatic("dump");
struct tagbstring DELETE = bsStatic("delete");
struct tagbstring OK = bsStatic("OK\n");
struct tagbstring ERR = bsStatic("ERR\n");
struct tagbstring DNE = bsStatic("DNE\n");
struct tagbstring EXISTS = bsStatic("EXISTS\n");
const char LINE_ENDING = '\n';
const int RB_SIZE = 1024 * 10;
Hashmap *DATA = NULL;
struct Command;
typedef int (*handler_cb)(struct Command *cmd, RingBuffer *send_rb);
typedef struct Command {
bstring command;
bstring name;
bstring number;
handler_cb handler;
} Command;
typedef struct Record {
bstring name;
Stats *stat;
} Record;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
void send_reply(RingBuffer *send_rb, bstring reply)
{
RingBuffer_puts(send_rb, reply);
}
int handle_create(Command *cmd, RingBuffer *send_rb)
{
int rc = 0;
// if the name is in the DATA map then return exists
if(Hashmap_get(DATA, cmd->name)) {
send_reply(send_rb, &EXISTS);
} else {
// allocate a recrod
debug("create: %s %s", bdata(cmd->name), bdata(cmd->number));
Record *info = calloc(sizeof(Record), 1);
check_mem(info);
// set its stat element
info->stat = Stats_create();
check_mem(info->stat);
// set its name element
info->name = bstrcpy(cmd->name);
check_mem(info->name);
// do a first sample
Stats_sample(info->stat, atof(bdata(cmd->number)));
// add it to the hashmap
rc = Hashmap_set(DATA, info->name, info);
check(rc == 0, "Failed to add data to map.");
// send an OK
send_reply(send_rb, &OK);
}
return 0;
error:
return -1;
}
int handle_sample(Command *cmd, RingBuffer *send_rb)
{
// get the info from the hashmap
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
// if it doesn't exist then DNE
send_reply(send_rb, &DNE);
} else {
// else run sample on it, return the mean
Stats_sample(info->stat, atof(bdata(cmd->number)));
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_delete(Command *cmd, RingBuffer *send_rb)
{
log_info("delete: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
Hashmap_delete(DATA, cmd->name);
free(info->stat);
bdestroy(info->name);
free(info);
send_reply(send_rb, &OK);
}
return 0;
}
int handle_mean(Command *cmd, RingBuffer *send_rb)
{
log_info("mean: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_stddev(Command *cmd, RingBuffer *send_rb)
{
log_info("stddev: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_stddev(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_dump(Command *cmd, RingBuffer *send_rb)
{
log_info("dump: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f %f %f %f %ld %f %f\n",
Stats_mean(info->stat),
Stats_stddev(info->stat),
info->stat->sum,
info->stat->sumsq,
info->stat->n,
info->stat->min,
info->stat->max);
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int parse_command(struct bstrList *splits, Command *cmd)
{
// get the command
cmd->command = splits->entry[0];
if(biseq(cmd->command, &CREATE)) {
check(splits->qty == 3, "Failed to parse create: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_create;
} else if(biseq(cmd->command, &MEAN)) {
check(splits->qty == 2, "Failed to parse mean: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_mean;
} else if(biseq(cmd->command, &SAMPLE)) {
check(splits->qty == 3, "Failed to parse sample: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_sample;
} else if(biseq(cmd->command, &DUMP)) {
check(splits->qty == 2, "Failed to parse dump: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_dump;
} else if(biseq(cmd->command, &DELETE)) {
check(splits->qty == 2, "Failed to parse delete: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_delete;
} else if(biseq(cmd->command, &STDDEV)) {
check(splits->qty == 2, "Failed to parse stddev: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_stddev;
} else {
sentinel("Failed to parse the command.");
}
return 0;
error:
return -1;
}
int parse_line(bstring data, RingBuffer *send_rb)
{
int rc = -1;
Command cmd = {.command = NULL};
// split data on line boundaries
struct bstrList *splits = bsplits(data, &LINE_SPLIT);
check(splits != NULL, "Bad data.");
// parse it into a command
rc = parse_command(splits, &cmd);
check(rc == 0, "Failed to parse command.");
// call the command handler for that command
rc = cmd.handler(&cmd, send_rb);
error: // fallthrough
if(splits) bstrListDestroy(splits);
return rc;
}
void client_handler(int client_fd)
{
int rc = 0;
RingBuffer *recv_rb = RingBuffer_create(RB_SIZE);
RingBuffer *send_rb = RingBuffer_create(RB_SIZE);
check_mem(recv_rb);
check_mem(send_rb);
// keep reading into the recv buffer and sending on send
while(read_some(recv_rb, client_fd, 1) != -1) {
// read a line from the recv_rb
bstring data = read_line(recv_rb, LINE_ENDING);
check(data != NULL, "Client closed.");
// parse it, close on any protocol errors
rc = parse_line(data, send_rb);
bdestroy(data); // cleanup here
check(rc == 0, "Failed to parse user. Closing.");
// and as long as there's something to send, send it
if(RingBuffer_available_data(send_rb)) {
write_some(send_rb, client_fd, 1);
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(recv_rb) RingBuffer_destroy(recv_rb);
if(send_rb) RingBuffer_destroy(send_rb);
exit(0); // just exit the child process
}
int setup_data_store()
{
// a more advanced design simply wouldn't use this
DATA = Hashmap_create(NULL, NULL);
check_mem(DATA);
return 0;
error:
return -1;
}
int echo_server(const char *host, const char *port)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
rc = setup_data_store();
check(rc == 0, "Failed to setup the data store.");
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex49b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
struct tagbstring OK;
int setup_data_store();
int parse_line(bstring data, RingBuffer *send_rb);
int echo_server(const char *host, const char *port);
#endif
```
.\ex49b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
#include <assert.h>
typedef struct LineTest {
char *line;
bstring result;
char *description;
} LineTest;
int attempt_line(LineTest test)
{
int rc = -1;
bstring result = NULL;
bstring line = bfromcstr(test.line);
RingBuffer *send_rb = RingBuffer_create(1024);
rc = parse_line(line, send_rb);
check(rc == 0, "Failed to parse line.");
result = RingBuffer_get_all(send_rb);
check(result != NULL, "Ring buffer empty.");
check(biseq(result, test.result), "Got the wrong output: %s expected %s",
bdata(result), bdata(test.result));
bdestroy(line);
RingBuffer_destroy(send_rb);
return 1; // using 1 for tests
error:
log_err("Failed to process test %s: got %s", test.line, bdata(result));
if(line) bdestroy(line);
if(send_rb) RingBuffer_destroy(send_rb);
return 0;
}
int run_test_lines(LineTest *tests, int count)
{
int i = 0;
for(i = 0; i < count; i++) {
check(attempt_line(tests[i]), "Failed to run %s", tests[i].description);
}
return 1;
error:
return 0;
}
char *test_create()
{
LineTest tests[] = {
{.line = "create /zed 100", .result = &OK, .description = "create zed failed"},
{.line = "create /joe 100", .result = &OK, .description = "create joe failed"},
};
mu_assert(run_test_lines(tests, 2), "Failed to run create tests.");
return NULL;
}
char *test_sample()
{
struct tagbstring sample1 = bsStatic("100.000000\n");
LineTest tests[] = {
{.line = "sample /zed 100", .result = &sample1, .description = "sample zed failed."}
};
mu_assert(run_test_lines(tests, 1), "Failed to run sample tests.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
int rc = setup_data_store();
mu_assert(rc == 0, "Failed to setup the data store.");
mu_run_test(test_create);
mu_run_test(test_sample);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex49b\statserve
.\ex49b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 3, "USAGE: statserve host port");
const char *host = argv[1];
const char *port = argv[2];
check(echo_server(host, port), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex49b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex49b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
bstring read_line(RingBuffer *input, const char line_ending)
{
int i = 0;
bstring result = NULL;
// not super efficient
// read a character at a time from the ring buffer
for(i = 0; i < RingBuffer_available_data(input); i++) {
// if the buffer has line ending
if(input->buffer[i] == line_ending) {
// get that much fromt he ring buffer
result = RingBuffer_gets(input, i);
check(result, "Failed to get line from RingBuffer");
// make sure that we got the right amount
check(RingBuffer_available_data(input) >= 1,
"Not enough data in the RingBuffer after reading line.");
// and commit it
RingBuffer_commit_read(input, 1);
break;
}
}
// notice this will fail in the cases where we get a set of data
// on the wire that does not have a line ending yet
return result;
error:
return NULL;
}
```
.\ex49b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
bstring read_line(RingBuffer *input, const char line_ending);
#endif
```
.\ex49b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <lcthw/hashmap.h>
#include <lcthw/stats.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
struct tagbstring LINE_SPLIT = bsStatic(" ");
struct tagbstring CREATE = bsStatic("create");
struct tagbstring STDDEV = bsStatic("stddev");
struct tagbstring MEAN = bsStatic("mean");
struct tagbstring SAMPLE = bsStatic("sample");
struct tagbstring DUMP = bsStatic("dump");
struct tagbstring DELETE = bsStatic("delete");
struct tagbstring OK = bsStatic("OK\n");
struct tagbstring ERR = bsStatic("ERR\n");
struct tagbstring DNE = bsStatic("DNE\n");
struct tagbstring EXISTS = bsStatic("EXISTS\n");
const char LINE_ENDING = '\n';
const int RB_SIZE = 1024 * 10;
Hashmap *DATA = NULL;
struct Command;
typedef int (*handler_cb)(struct Command *cmd, RingBuffer *send_rb);
typedef struct Command {
bstring command;
bstring name;
bstring number;
handler_cb handler;
} Command;
typedef struct Record {
bstring name;
Stats *stat;
} Record;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
void send_reply(RingBuffer *send_rb, bstring reply)
{
RingBuffer_puts(send_rb, reply);
}
int handle_create(Command *cmd, RingBuffer *send_rb)
{
int rc = 0;
// if the name is in the DATA map then return exists
if(Hashmap_get(DATA, cmd->name)) {
send_reply(send_rb, &EXISTS);
} else {
// allocate a recrod
debug("create: %s %s", bdata(cmd->name), bdata(cmd->number));
Record *info = calloc(sizeof(Record), 1);
check_mem(info);
// set its stat element
info->stat = Stats_create();
check_mem(info->stat);
// set its name element
info->name = bstrcpy(cmd->name);
check_mem(info->name);
// do a first sample
Stats_sample(info->stat, atof(bdata(cmd->number)));
// add it to the hashmap
rc = Hashmap_set(DATA, info->name, info);
check(rc == 0, "Failed to add data to map.");
// send an OK
send_reply(send_rb, &OK);
}
return 0;
error:
return -1;
}
int handle_sample(Command *cmd, RingBuffer *send_rb)
{
// get the info from the hashmap
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
// if it doesn't exist then DNE
send_reply(send_rb, &DNE);
} else {
// else run sample on it, return the mean
Stats_sample(info->stat, atof(bdata(cmd->number)));
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_delete(Command *cmd, RingBuffer *send_rb)
{
log_info("delete: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
Hashmap_delete(DATA, cmd->name);
free(info->stat);
bdestroy(info->name);
free(info);
send_reply(send_rb, &OK);
}
return 0;
}
int handle_mean(Command *cmd, RingBuffer *send_rb)
{
log_info("mean: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_stddev(Command *cmd, RingBuffer *send_rb)
{
log_info("stddev: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_stddev(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_dump(Command *cmd, RingBuffer *send_rb)
{
log_info("dump: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f %f %f %f %ld %f %f\n",
Stats_mean(info->stat),
Stats_stddev(info->stat),
info->stat->sum,
info->stat->sumsq,
info->stat->n,
info->stat->min,
info->stat->max);
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int parse_command(struct bstrList *splits, Command *cmd)
{
// get the command
cmd->command = splits->entry[0];
if(biseq(cmd->command, &CREATE)) {
check(splits->qty == 3, "Failed to parse create: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_create;
} else if(biseq(cmd->command, &MEAN)) {
check(splits->qty == 2, "Failed to parse mean: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_mean;
} else if(biseq(cmd->command, &SAMPLE)) {
check(splits->qty == 3, "Failed to parse sample: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_sample;
} else if(biseq(cmd->command, &DUMP)) {
check(splits->qty == 2, "Failed to parse dump: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_dump;
} else if(biseq(cmd->command, &DELETE)) {
check(splits->qty == 2, "Failed to parse delete: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_delete;
} else if(biseq(cmd->command, &STDDEV)) {
check(splits->qty == 2, "Failed to parse stddev: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_stddev;
} else {
sentinel("Failed to parse the command.");
}
return 0;
error:
return -1;
}
int parse_line(bstring data, RingBuffer *send_rb)
{
int rc = -1;
Command cmd = {.command = NULL};
// split data on line boundaries
struct bstrList *splits = bsplits(data, &LINE_SPLIT);
check(splits != NULL, "Bad data.");
// parse it into a command
rc = parse_command(splits, &cmd);
check(rc == 0, "Failed to parse command.");
// call the command handler for that command
rc = cmd.handler(&cmd, send_rb);
error: // fallthrough
if(splits) bstrListDestroy(splits);
return rc;
}
void client_handler(int client_fd)
{
int rc = 0;
RingBuffer *recv_rb = RingBuffer_create(RB_SIZE);
RingBuffer *send_rb = RingBuffer_create(RB_SIZE);
check_mem(recv_rb);
check_mem(send_rb);
// keep reading into the recv buffer and sending on send
while(read_some(recv_rb, client_fd, 1) != -1) {
// read a line from the recv_rb
bstring data = read_line(recv_rb, LINE_ENDING);
check(data != NULL, "Client closed.");
// parse it, close on any protocol errors
rc = parse_line(data, send_rb);
bdestroy(data); // cleanup here
check(rc == 0, "Failed to parse user. Closing.");
// and as long as there's something to send, send it
if(RingBuffer_available_data(send_rb)) {
write_some(send_rb, client_fd, 1);
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(recv_rb) RingBuffer_destroy(recv_rb);
if(send_rb) RingBuffer_destroy(send_rb);
exit(0); // just exit the child process
}
int setup_data_store()
{
// a more advanced design simply wouldn't use this
DATA = Hashmap_create(NULL, NULL);
check_mem(DATA);
return 0;
error:
return -1;
}
int echo_server(const char *host, const char *port)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
rc = setup_data_store();
check(rc == 0, "Failed to setup the data store.");
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex49b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
struct tagbstring OK;
int setup_data_store();
int parse_line(bstring data, RingBuffer *send_rb);
int echo_server(const char *host, const char *port);
#endif
```
.\ex49b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
#include <assert.h>
typedef struct LineTest {
char *line;
bstring result;
char *description;
} LineTest;
int attempt_line(LineTest test)
{
int rc = -1;
bstring result = NULL;
bstring line = bfromcstr(test.line);
RingBuffer *send_rb = RingBuffer_create(1024);
rc = parse_line(line, send_rb);
check(rc == 0, "Failed to parse line.");
result = RingBuffer_get_all(send_rb);
check(result != NULL, "Ring buffer empty.");
check(biseq(result, test.result), "Got the wrong output: %s expected %s",
bdata(result), bdata(test.result));
bdestroy(line);
RingBuffer_destroy(send_rb);
return 1; // using 1 for tests
error:
log_err("Failed to process test %s: got %s", test.line, bdata(result));
if(line) bdestroy(line);
if(send_rb) RingBuffer_destroy(send_rb);
return 0;
}
int run_test_lines(LineTest *tests, int count)
{
int i = 0;
for(i = 0; i < count; i++) {
check(attempt_line(tests[i]), "Failed to run %s", tests[i].description);
}
return 1;
error:
return 0;
}
char *test_create()
{
LineTest tests[] = {
{.line = "create /zed 100", .result = &OK, .description = "create zed failed"},
{.line = "create /joe 100", .result = &OK, .description = "create joe failed"},
};
mu_assert(run_test_lines(tests, 2), "Failed to run create tests.");
return NULL;
}
char *test_sample()
{
struct tagbstring sample1 = bsStatic("100.000000\n");
LineTest tests[] = {
{.line = "sample /zed 100", .result = &sample1, .description = "sample zed failed."}
};
mu_assert(run_test_lines(tests, 1), "Failed to run sample tests.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
int rc = setup_data_store();
mu_assert(rc == 0, "Failed to setup the data store.");
mu_run_test(test_create);
mu_run_test(test_sample);
return NULL;
}
RUN_TESTS(all_tests);
```
Solution
The Plan
I'll show you how I implemented the protocol in the smallest code possible.
I won't implement all of the CRUD operations, so you can go look at the
git repo for this project to see a full implementation.
The Setup
First I setup the data, then the protocol parser, then the handlers.
The Protocol
create Create a new statistic.
mean Get the current mean of a statistic.
sample Add a new sample to a statistics.
dump Get all of the elements of a statistic (sum, sumsq, n, min, and max).Final Code
The Command Structure
typedef struct Command {
bstring command;
bstring name;
bstring number;
handler_cb handler;
} Command;
The Storage Record
typedef struct Record {
bstring name;
Stats *stat;
} Record;
The Design
* Accept a connection
* Parse the line into the Command
* Run a handler function to process it
* Temporarily store into a Hashmap
Final Thoughts
The last thing I would do is add better tests and round out the protocol with CRUD operations.
### Exercise 50a Routing The Statistics
Project Description
The Plan
You are now given vague instructions and have to "solve" as best you can.
The Purpose
To give you freedom to be creative, and also taste a real project with vague
specifications.
Many times all you get is a single sentence in a bug tracker. Oh well.
The Requirements
Allow people to work with statistics at arbitrary URLs in the server.
You get to define what that means, but think "web application".
Pause!
Try to solve it on your own then continue.
The Clues
Answer these questions:
1. What happens when I have a statistics "under" another, as in /age/northamerica/ is under /age/.
2. Could you do the summary statistics we talked about? A mean of means and mean of standard deviations that are rolled up the tree?
3. What data structures do you need? Starting with data is key here too. Data data data.
4. Are your tests good enough? Before you start you might want to get good tests that use the protocol.
Important References
* Definitely look at the statistics code you built in liblcthw if you do the summary statistics.
Encouragement
This is hard, as I've said all along, however it is all doable. It's simply a matter of breaking the problems down and tackling each little piece.
### Exercise 50b Routing the Statistics
Solution
The Plan
Show you how I solved the problem of routing the names of statistics as URLs.
The Setup
1. First thing I did was make sure my tests were really good.
2. Then I designed the data structures I'd need.
3. Then I did the work to make them functions.
4. The protocol shouldn't need to change.
"URLs"
I'll define paths as simply names separated by /.
Real URLs are way more complex than that.
Data Structure
I just added:
struct bstrList *path;
To the Command struct to hold paths.
URL Meaning
Kind of weird, but:
Deepest part of URL is "parent", this is the main stat.
Children are next segments up, and are mean-of-mean stats.
New Processing
1. Change to a loop over all paths with a "scan path" function.
2. Add optional path parameter to handlers.
3. Parse the path in *parse\_command* to set path in Commands.
4. In sample and create, change root processing vs. child processing.
5. Move *send\_reply* over to *net.c* instead.
Test First Path Parsing
I'll write a small test for just the *scan\_paths* part first.
Then wire that in and use the existing tests to confirm the old code
works.
The Code
.\ex50b\statserve
.\ex50b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 3, "USAGE: statserve host port");
const char *host = argv[1];
const char *port = argv[2];
check(echo_server(host, port), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex50b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex50b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
bstring read_line(RingBuffer *input, const char line_ending)
{
int i = 0;
bstring result = NULL;
// not super efficient
// read a character at a time from the ring buffer
for(i = 0; i < RingBuffer_available_data(input); i++) {
// if the buffer has line ending
if(input->buffer[i] == line_ending) {
// get that much fromt he ring buffer
result = RingBuffer_gets(input, i);
check(result, "Failed to get line from RingBuffer");
// make sure that we got the right amount
check(RingBuffer_available_data(input) >= 1,
"Not enough data in the RingBuffer after reading line.");
// and commit it
RingBuffer_commit_read(input, 1);
break;
}
}
// notice this will fail in the cases where we get a set of data
// on the wire that does not have a line ending yet
return result;
error:
return NULL;
}
void send_reply(RingBuffer *send_rb, bstring reply)
{
RingBuffer_puts(send_rb, reply);
}
```
.\ex50b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
bstring read_line(RingBuffer *input, const char line_ending);
void send_reply(RingBuffer *send_rb, bstring reply);
#endif
```
.\ex50b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <lcthw/hashmap.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
#include "statserve.h"
struct tagbstring LINE_SPLIT = bsStatic(" ");
struct tagbstring CREATE = bsStatic("create");
struct tagbstring STDDEV = bsStatic("stddev");
struct tagbstring MEAN = bsStatic("mean");
struct tagbstring SAMPLE = bsStatic("sample");
struct tagbstring DUMP = bsStatic("dump");
struct tagbstring DELETE = bsStatic("delete");
struct tagbstring OK = bsStatic("OK\n");
struct tagbstring ERR = bsStatic("ERR\n");
struct tagbstring DNE = bsStatic("DNE\n");
struct tagbstring EXISTS = bsStatic("EXISTS\n");
struct tagbstring SLASH = bsStatic("/");
const char LINE_ENDING = '\n';
const int RB_SIZE = 1024 * 10;
Hashmap *DATA = NULL;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
int handle_create(Command *cmd, RingBuffer *send_rb, bstring path)
{
int rc = 0;
int is_root = biseq(path, cmd->name);
log_info("create: %s %s %s", bdata(cmd->name), bdata(path), bdata(cmd->number));
Record *info = Hashmap_get(DATA, path);
if(info != NULL && is_root) {
// report if root exists, just skip children
send_reply(send_rb, &EXISTS);
} else if(info != NULL) {
debug("Child %s exists, skipping it.", bdata(path));
return 0;
} else {
// new child so make it
debug("create: %s %s", bdata(path), bdata(cmd->number));
Record *info = calloc(sizeof(Record), 1);
check_mem(info);
// set its stat element
info->stat = Stats_create();
check_mem(info->stat);
// set its name element
info->name = bstrcpy(path);
check_mem(info->name);
// do a first sample
Stats_sample(info->stat, atof(bdata(cmd->number)));
// add it to the hashmap
rc = Hashmap_set(DATA, info->name, info);
check(rc == 0, "Failed to add data to map.");
// only send the for the root part
if(is_root) {
send_reply(send_rb, &OK);
}
}
return 0;
error:
return -1;
}
int handle_sample(Command *cmd, RingBuffer *send_rb, bstring path)
{
// get the info from the hashmap
Record *info = Hashmap_get(DATA, path);
int is_root = biseq(path, cmd->name);
log_info("sample %s %s %s", bdata(cmd->name), bdata(path), bdata(cmd->number));
bstring child_path = NULL;
if(info == NULL) {
// if it doesn't exist then DNE
send_reply(send_rb, &DNE);
return 0;
} else {
if(is_root) {
// just sample the root like normal
Stats_sample(info->stat, atof(bdata(cmd->number)));
} else {
// need to do some hackery to get the child path
// for rolling up mean-of-means on it
// increase the qty on path up one
cmd->path->qty++;
// get the "child path" (previous path?)
child_path = bjoin(cmd->path, &SLASH);
// get that info from the DATA
Record *child_info = Hashmap_get(DATA, child_path);
bdestroy(child_path);
// if it exists then sample on it
if(child_info) {
// info is /logins, child_info is /logins/zed
// we want /logins/zed's mean to be a new sample on /logins
Stats_sample(info->stat, Stats_mean(child_info->stat));
}
// drop the path back to where it was
cmd->path->qty--;
}
}
// do the reply for the mean last
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
return 0;
}
int handle_delete(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("delete: %s %s", bdata(cmd->name), bdata(path));
Record *info = Hashmap_get(DATA, path);
int is_root = biseq(path, cmd->name);
// BUG: should just decide that this isn't scanned
// but run once, for now just only run on root
if(info == NULL) {
send_reply(send_rb, &DNE);
} else if(is_root) {
Hashmap_delete(DATA, path);
free(info->stat);
bdestroy(info->name);
free(info);
send_reply(send_rb, &OK);
}
return 0;
}
int handle_mean(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("mean: %s %s %s", bdata(cmd->name), bdata(path), bdata(path));
Record *info = Hashmap_get(DATA, path);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_stddev(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("stddev: %s %s %s", bdata(cmd->name), bdata(path), bdata(path));
Record *info = Hashmap_get(DATA, path);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_stddev(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_dump(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("dump: %s, %s, %s", bdata(cmd->name), bdata(path), bdata(path));
Record *info = Hashmap_get(DATA, path);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f %f %f %f %ld %f %f\n",
Stats_mean(info->stat),
Stats_stddev(info->stat),
info->stat->sum,
info->stat->sumsq,
info->stat->n,
info->stat->min,
info->stat->max);
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int parse_command(struct bstrList *splits, Command *cmd)
{
// get the command
cmd->command = splits->entry[0];
if(biseq(cmd->command, &CREATE)) {
check(splits->qty == 3, "Failed to parse create: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_create;
} else if(biseq(cmd->command, &MEAN)) {
check(splits->qty == 2, "Failed to parse mean: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_mean;
} else if(biseq(cmd->command, &SAMPLE)) {
check(splits->qty == 3, "Failed to parse sample: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_sample;
} else if(biseq(cmd->command, &DUMP)) {
check(splits->qty == 2, "Failed to parse dump: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_dump;
} else if(biseq(cmd->command, &DELETE)) {
check(splits->qty == 2, "Failed to parse delete: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_delete;
} else if(biseq(cmd->command, &STDDEV)) {
check(splits->qty == 2, "Failed to parse stddev: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_stddev;
} else {
sentinel("Failed to parse the command.");
}
return 0;
error:
return -1;
}
int scan_paths(Command *cmd, RingBuffer *send_rb)
{
check(cmd->path != NULL, "Path was not set in command.");
int rc = 0;
// save the original path length
size_t qty = cmd->path->qty;
// starting at the longest path, shorten it and call
// for each one:
for(; cmd->path->qty > 1; cmd->path->qty--) {
// remake the path with / again
bstring path = bjoin(cmd->path, &SLASH);
// call the handler with the path
rc = cmd->handler(cmd, send_rb, path);
// if the handler returns != 0 then abort and return that
bdestroy(path);
if(rc != 0) break;
}
// restore path length
cmd->path->qty = qty;
return rc;
error:
return -1;
}
struct bstrList *parse_name(bstring name)
{
return bsplits(name, &SLASH);
}
int parse_line(bstring data, RingBuffer *send_rb)
{
int rc = -1;
Command cmd = {.command = NULL};
// split data on line boundaries
struct bstrList *splits = bsplits(data, &LINE_SPLIT);
check(splits != NULL, "Bad data.");
// parse it into a command
rc = parse_command(splits, &cmd);
check(rc == 0, "Failed to parse command.");
// parse the name into the path we need for scan_paths
cmd.path = parse_name(cmd.name);
check(cmd.path != NULL, "Invalid path.");
// scan the path and call the handlers
rc = scan_paths(&cmd, send_rb);
check(rc == 0, "Failure running command against path: %s", bdata(cmd.name));
bstrListDestroy(cmd.path);
bstrListDestroy(splits);
return 0;
error: // fallthrough
if(cmd.path) bstrListDestroy(cmd.path);
if(splits) bstrListDestroy(splits);
return -1;
}
void client_handler(int client_fd)
{
int rc = 0;
RingBuffer *recv_rb = RingBuffer_create(RB_SIZE);
RingBuffer *send_rb = RingBuffer_create(RB_SIZE);
check_mem(recv_rb);
check_mem(send_rb);
// keep reading into the recv buffer and sending on send
while(read_some(recv_rb, client_fd, 1) != -1) {
// read a line from the recv_rb
bstring data = read_line(recv_rb, LINE_ENDING);
check(data != NULL, "Client closed.");
// parse it, close on any protocol errors
rc = parse_line(data, send_rb);
bdestroy(data); // cleanup here
check(rc == 0, "Failed to parse user. Closing.");
// and as long as there's something to send, send it
if(RingBuffer_available_data(send_rb)) {
write_some(send_rb, client_fd, 1);
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(recv_rb) RingBuffer_destroy(recv_rb);
if(send_rb) RingBuffer_destroy(send_rb);
exit(0); // just exit the child process
}
int setup_data_store()
{
// a more advanced design simply wouldn't use this
DATA = Hashmap_create(NULL, NULL);
check_mem(DATA);
return 0;
error:
return -1;
}
int echo_server(const char *host, const char *port)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
rc = setup_data_store();
check(rc == 0, "Failed to setup the data store.");
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex50b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/stats.h>
struct Command;
typedef int (*handler_cb)(struct Command *cmd, RingBuffer *send_rb, bstring path);
typedef struct Command {
bstring command;
bstring name;
struct bstrList *path;
bstring number;
handler_cb handler;
} Command;
typedef struct Record {
bstring name;
Stats *stat;
} Record;
struct tagbstring OK;
int setup_data_store();
struct bstrList *parse_name(bstring name);
int scan_paths(Command *cmd, RingBuffer *send_rb);
int parse_line(bstring data, RingBuffer *send_rb);
int echo_server(const char *host, const char *port);
#endif
```
.\ex50b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
#include <assert.h>
typedef struct LineTest {
char *line;
bstring result;
char *description;
} LineTest;
int attempt_line(LineTest test)
{
int rc = -1;
bstring result = NULL;
bstring line = bfromcstr(test.line);
RingBuffer *send_rb = RingBuffer_create(1024);
rc = parse_line(line, send_rb);
check(rc == 0, "Failed to parse line.");
result = RingBuffer_get_all(send_rb);
check(result != NULL, "Ring buffer empty.");
check(biseq(result, test.result), "Got the wrong output: %s expected %s",
bdata(result), bdata(test.result));
bdestroy(line);
RingBuffer_destroy(send_rb);
return 1; // using 1 for tests
error:
log_err("Failed to process test %s: got %s", test.line, bdata(result));
if(line) bdestroy(line);
if(send_rb) RingBuffer_destroy(send_rb);
return 0;
}
int run_test_lines(LineTest *tests, int count)
{
int i = 0;
for(i = 0; i < count; i++) {
check(attempt_line(tests[i]), "Failed to run %s", tests[i].description);
}
return 1;
error:
return 0;
}
int fake_command(Command *cmd, RingBuffer *send_rb, bstring path)
{
check(cmd != NULL, "Bad cmd.");
check(cmd->path != NULL, "Bad path.");
check(send_rb != NULL, "Bad send_rb.");
check(path != NULL, "Bad path given.");
return 0;
error:
return -1;
}
char *test_path_parsing()
{
struct bstrList *result = NULL;
struct tagbstring slash = bsStatic("/");
struct tagbstring logins_zed = bsStatic("/logins/zed");
struct tagbstring command_name = bsStatic("dump");
RingBuffer *send_rb = RingBuffer_create(1024);
struct bstrList *path = bsplits(&logins_zed, &slash);
int rc = 0;
Command fake = {
.command = &command_name,
.name = &logins_zed,
.number = NULL,
.handler = fake_command,
.path = path
};
result = parse_name(&logins_zed);
mu_assert(result != NULL, "Failed to parse /logins/zed");
rc = scan_paths(&fake, send_rb);
mu_assert(rc != -1, "scan_paths failed.");
return NULL;
}
char *test_create()
{
LineTest tests[] = {
{.line = "create /zed 100", .result = &OK, .description = "create zed failed"},
{.line = "create /joe 100", .result = &OK, .description = "create joe failed"},
};
mu_assert(run_test_lines(tests, 2), "Failed to run create tests.");
return NULL;
}
char *test_sample()
{
struct tagbstring sample1 = bsStatic("100.000000\n");
LineTest tests[] = {
{.line = "sample /zed 100", .result = &sample1, .description = "sample zed failed."}
};
mu_assert(run_test_lines(tests, 1), "Failed to run sample tests.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
int rc = setup_data_store();
mu_assert(rc == 0, "Failed to setup the data store.");
mu_run_test(test_create);
mu_run_test(test_sample);
mu_run_test(test_path_parsing);
return NULL;
}
RUN_TESTS(all_tests);
```
Final Review
### Exercise 51a Storing the Statistics
Project Description
The Plan
Learn to store the statistics to the hard disk.
There are meany issues with this.
The Purpose
To teach you about various problems related to securely storing files.
The Requirements
For this exercise, you'll add two commands for storing to and loading statistics
from a hard drive:
store
If there's a URL, store it to a hard drive.
load
If there are two URLs, load the statistic from the hard drive based on the first URL, and then put it into the second URL that's in memory.
The Requirements
1. If URLs have ``/`` characters in them, then that conflicts with the filesystem's use of slashes. How will you solve this?
2. If URLs have ``/`` characters in them, then someone can use your server to overwrite files on a hard drive by giving paths to them. How will you solve this?
3. If you choose to use deeply nested directories, then traversing directories to find files will be very slow. What will you do here?
The Requirements
4. If you choose to use one directory and hash URLs (oops, I gave a hint), then directories with too many files in them are slow. How will you solve this?
5. What happens when someone loads a statistic from a hard drive into a URL that already exists?
6. How will someone running ``statserve`` know where the storage should be?
The Clues
There are no clues. You can do this.
### Exercise 51b Storing the Statistics
Solution
The Plan
Show you how I solved the problem of storing the statistics to disk.
Security Requirements
* Use *realpath* to make sure that paths are in one place.
* Use _BAD_ encryption to mangle the stored names.
* No other security beyond that. Just a demo of the path issue.
XTEA Encryption
* For an extra challenge I decided to "hash" names with XTEA.
* https://en.wikipedia.org/wiki/XTEA for the code.
* Normally I wouldn't do this, but wanted to show you XTEA.
* Because XTEA if cool and fun, although broken.
* DON'T USE XTEA FOR ENCRYPTION.
Improvements
* Let commands set cmd->path = NULL to indicate non-recursive.
* Change *echo_server* to *run_server* finally.
* Allow a 3rd storage path argument on the command line.
* Allow an additional argument to Command.
Weirdness
* Forking means I can't share data between clients without storage.
* Storing doesn't happen automatically, only explicitly.
* Loading acts as a copy command.
* XTEA isn't the best algorithm at all. Just for fun.
How I Did It
1. Create the LOAD and STORE handlers.
2. Add Command.arg and set those in *parse\_command*.
3. Move path parsing up to allow non-recursive handling with path = NULL.
4. Write a *sanitize\_location* and *encrypt\_armor\_name* function, test them.
5. Write *handle\_store* first to allow testing *handle\_load*.
6. Use *open* (man 2 open) with O_EXLOCK to get exclusive locks on files.
7. Using *close* (man 2 close) should release EXLOCK, but not clear on this.
The Code
.\ex51b\statserve
.\ex51b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 4, "USAGE: statserve host port store_path");
const char *host = argv[1];
const char *port = argv[2];
const char *store_path = argv[3];
check(run_server(host, port, store_path), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex51b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex51b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
bstring read_line(RingBuffer *input, const char line_ending)
{
int i = 0;
bstring result = NULL;
// not super efficient
// read a character at a time from the ring buffer
for(i = 0; i < RingBuffer_available_data(input); i++) {
// if the buffer has line ending
if(input->buffer[i] == line_ending) {
// get that much fromt he ring buffer
result = RingBuffer_gets(input, i);
check(result, "Failed to get line from RingBuffer");
// make sure that we got the right amount
check(RingBuffer_available_data(input) >= 1,
"Not enough data in the RingBuffer after reading line.");
// and commit it
RingBuffer_commit_read(input, 1);
break;
}
}
// notice this will fail in the cases where we get a set of data
// on the wire that does not have a line ending yet
return result;
error:
return NULL;
}
void send_reply(RingBuffer *send_rb, bstring reply)
{
RingBuffer_puts(send_rb, reply);
}
```
.\ex51b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
bstring read_line(RingBuffer *input, const char line_ending);
void send_reply(RingBuffer *send_rb, bstring reply);
#endif
```
.\ex51b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <lcthw/hashmap.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
#include <fcntl.h>
#include "statserve.h"
struct tagbstring LINE_SPLIT = bsStatic(" ");
struct tagbstring CREATE = bsStatic("create");
struct tagbstring STDDEV = bsStatic("stddev");
struct tagbstring MEAN = bsStatic("mean");
struct tagbstring SAMPLE = bsStatic("sample");
struct tagbstring DUMP = bsStatic("dump");
struct tagbstring DELETE = bsStatic("delete");
struct tagbstring STORE = bsStatic("store");
struct tagbstring LOAD = bsStatic("load");
struct tagbstring OK = bsStatic("OK\n");
struct tagbstring ERR = bsStatic("ERR\n");
struct tagbstring DNE = bsStatic("DNE\n");
struct tagbstring EXISTS = bsStatic("EXISTS\n");
struct tagbstring SLASH = bsStatic("/");
const char LINE_ENDING = '\n';
const int RB_SIZE = 1024 * 10;
Hashmap *DATA = NULL;
bstring STORE_PATH = NULL;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
// BUG: this is stupid, use md5
void encipher(unsigned int num_rounds, uint32_t v[2], uint32_t const key[4]) {
unsigned int i;
uint32_t v0=v[0], v1=v[1], sum=0, delta=0x9E3779B9;
for (i=0; i < num_rounds; i++) {
v0 += (((v1 << 4) ^ (v1 >> 5)) + v1) ^ (sum + key[sum & 3]);
sum += delta;
v1 += (((v0 << 4) ^ (v0 >> 5)) + v0) ^ (sum + key[(sum>>11) & 3]);
}
v[0]=v0; v[1]=v1;
}
/// TOTALLY RANDOM! LOL BUG: not secure
const uint32_t STORE_KEY[4] = {18748274, 228374, 193034845, 85726348};
struct tagbstring FAUX64 = bsStatic("ABCDEFGHIJKLMNOPQRST<KEY>");
// BUG: this all dies
bstring encrypt_armor_name(bstring name)
{
// copy the name to encrypt
bstring encname = bstrcpy(name);
size_t i = 0;
// point the encrypt pointer at it
// BUG: this cast is weird, why?
uint32_t *v = (uint32_t *)bdata(encname);
// extend the encname so that it can hold everything
// BUG: use a correct padding algorithm
while(blength(encname) % (sizeof(uint32_t) * 2) > 0) {
bconchar(encname, ' ');
}
// run encipher on this
// BUG: get rid of encipher
for(i = 0; i < (size_t)blength(encname) / (sizeof(uint32_t) * 2); i+=2) {
encipher(1, v+i, STORE_KEY);
}
// do a lame "base 64" kind of thing on it
// BUG: this is NOT the best way, it's a quick hack to get it working
// replace with real BASE64 later
for(i = 0; i < (size_t)blength(encname); i++) {
int at = encname->data[i] % blength(&FAUX64);
encname->data[i] = FAUX64.data[at];
}
// that's our final hack encrypted name
return encname;
}
bstring sanitize_location(bstring base, bstring path)
{
bstring attempt = NULL;
bstring encpath = NULL;
// encrypt armore the name
// BUG: ditch encryption, it was dumb
encpath = encrypt_armor_name(path);
check(encpath != NULL, "Failed to encrypt path name: %s", bdata(path));
// combine it with the base, this means that we've armored the
// path so we can just append it
attempt = bformat("%s/%s", bdata(base), bdata(encpath));
bdestroy(encpath);
return attempt;
error:
if(encpath) bdestroy(encpath);
if(attempt) bdestroy(attempt);
return NULL;
}
int handle_create(Command *cmd, RingBuffer *send_rb, bstring path)
{
int rc = 0;
int is_root = biseq(path, cmd->name);
log_info("create: %s %s %s", bdata(cmd->name), bdata(path), bdata(cmd->number));
Record *info = Hashmap_get(DATA, path);
if(info != NULL && is_root) {
// report if root exists, just skip children
send_reply(send_rb, &EXISTS);
} else if(info != NULL) {
debug("Child %s exists, skipping it.", bdata(path));
return 0;
} else {
// new child so make it
debug("create: %s %s", bdata(path), bdata(cmd->number));
Record *info = calloc(1, sizeof(Record));
check_mem(info);
// set its stat element
info->stat = Stats_create();
check_mem(info->stat);
// set its name element
info->name = bstrcpy(path);
check_mem(info->name);
// do a first sample
Stats_sample(info->stat, atof(bdata(cmd->number)));
// add it to the hashmap
rc = Hashmap_set(DATA, info->name, info);
check(rc == 0, "Failed to add data to map.");
// only send the for the root part
if(is_root) {
send_reply(send_rb, &OK);
}
}
return 0;
error:
return -1;
}
int handle_sample(Command *cmd, RingBuffer *send_rb, bstring path)
{
// get the info from the hashmap
Record *info = Hashmap_get(DATA, path);
int is_root = biseq(path, cmd->name);
log_info("sample %s %s %s", bdata(cmd->name), bdata(path), bdata(cmd->number));
bstring child_path = NULL;
if(info == NULL) {
// if it doesn't exist then DNE
send_reply(send_rb, &DNE);
return 0;
} else {
if(is_root) {
// just sample the root like normal
Stats_sample(info->stat, atof(bdata(cmd->number)));
} else {
// need to do some hackery to get the child path
// for rolling up mean-of-means on it
// increase the qty on path up one
cmd->path->qty++;
// get the "child path" (previous path?)
child_path = bjoin(cmd->path, &SLASH);
// get that info from the DATA
Record *child_info = Hashmap_get(DATA, child_path);
bdestroy(child_path);
// if it exists then sample on it
if(child_info) {
// info is /logins, child_info is /logins/zed
// we want /logins/zed's mean to be a new sample on /logins
Stats_sample(info->stat, Stats_mean(child_info->stat));
}
// drop the path back to where it was
cmd->path->qty--;
}
}
// do the reply for the mean last
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
return 0;
}
int handle_delete(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("delete: %s", bdata(cmd->name));
Record *info = Hashmap_get(DATA, cmd->name);
check(path == NULL && cmd->path == NULL, "Should be a recursive command.");
// BUG: should just decide that this isn't scanned
// but run once, for now just only run on root
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
Hashmap_delete(DATA, cmd->name);
free(info->stat);
bdestroy(info->name);
free(info);
send_reply(send_rb, &OK);
}
return 0;
error:
return -1;
}
int handle_mean(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("mean: %s %s %s", bdata(cmd->name), bdata(path), bdata(path));
Record *info = Hashmap_get(DATA, path);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_mean(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_stddev(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("stddev: %s %s %s", bdata(cmd->name), bdata(path), bdata(path));
Record *info = Hashmap_get(DATA, path);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f\n", Stats_stddev(info->stat));
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_dump(Command *cmd, RingBuffer *send_rb, bstring path)
{
log_info("dump: %s, %s, %s", bdata(cmd->name), bdata(path), bdata(path));
Record *info = Hashmap_get(DATA, path);
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
bstring reply = bformat("%f %f %f %f %ld %f %f\n",
Stats_mean(info->stat),
Stats_stddev(info->stat),
info->stat->sum,
info->stat->sumsq,
info->stat->n,
info->stat->min,
info->stat->max);
send_reply(send_rb, reply);
bdestroy(reply);
}
return 0;
}
int handle_store(Command *cmd, RingBuffer *send_rb, bstring path)
{
Record *info = Hashmap_get(DATA, cmd->name);
bstring location = NULL;
bstring from = cmd->name;
int rc = 0;
int fd = -1;
check(cmd != NULL, "Invalid command.");
debug("store %s", bdata(cmd->name));
check(path == NULL && cmd->path == NULL, "Store is non-recursive.");
if(info == NULL) {
send_reply(send_rb, &DNE);
} else {
// it exists so we sanitize the name
location = sanitize_location(STORE_PATH, from);
check(location, "Failed to sanitize the location.");
// open the file we need with EXLOCK
fd = open(bdata(location), O_WRONLY | O_CREAT | O_EXLOCK, S_IRWXU);
check(fd >= 0, "Cannot open file for writing: %s", bdata(location));
// write the Stats part of info to it
rc = write(fd, info->stat, sizeof(Stats));
check(rc == sizeof(Stats), "Failed to write to %s", bdata(location));
// close, which should release the lock
close(fd);
// then send OK
send_reply(send_rb, &OK);
}
return 0;
error:
if(fd < 0) close(fd);
return -1;
}
int handle_load(Command *cmd, RingBuffer *send_rb, bstring path)
{
bstring to = cmd->arg;
bstring from = cmd->name;
bstring location = NULL;
Record *info = Hashmap_get(DATA, to);
int fd = -1;
check(path == NULL && cmd->path == NULL, "Load is non-recursive.");
if(info != NULL) {
// don't do it if the target to exists
send_reply(send_rb, &EXISTS);
} else {
location = sanitize_location(STORE_PATH, from);
check(location, "Failed to sanitize location.");
// make a new record for the to target
// TODO: make regular CRUD methods for Record
info = calloc(1, sizeof(Record));
check_mem(info);
info->stat = calloc(1, sizeof(Stats));
check_mem(info->stat);
// open the file to read from readonly and locked
fd = open(bdata(location), O_RDONLY | O_EXLOCK);
check(fd >= 0, "Error opening file: %s", bdata(location));
// read into the stats record
int rc = read(fd, info->stat, sizeof(Stats));
check(rc == sizeof(Stats), "Failed to read record at %s", bdata(location));
// close so we release the lock quick
close(fd);
// make a copy of to as the name for the info
info->name = bstrcpy(to);
check_mem(info->name);
// put it in the hashmap
rc = Hashmap_set(DATA, info->name, info);
check(rc == 0, "Failed to ass to data map: %s", bdata(info->name));
// and send the reply
send_reply(send_rb, &OK);
}
return 0;
error:
if(fd < 0) close(fd);
return -1;
}
int parse_command(struct bstrList *splits, Command *cmd)
{
check(splits != NULL, "Invalid split line.");
// get the command
cmd->command = splits->entry[0];
if(biseq(cmd->command, &CREATE)) {
check(splits->qty == 3, "Failed to parse create: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_create;
cmd->path = parse_name(cmd->name);
} else if(biseq(cmd->command, &MEAN)) {
check(splits->qty == 2, "Failed to parse mean: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_mean;
cmd->path = parse_name(cmd->name);
} else if(biseq(cmd->command, &SAMPLE)) {
check(splits->qty == 3, "Failed to parse sample: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->number = splits->entry[2];
cmd->handler = handle_sample;
cmd->path = parse_name(cmd->name);
} else if(biseq(cmd->command, &DUMP)) {
check(splits->qty == 2, "Failed to parse dump: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_dump;
cmd->path = parse_name(cmd->name);
} else if(biseq(cmd->command, &DELETE)) {
check(splits->qty == 2, "Failed to parse delete: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_delete;
cmd->path = NULL;
} else if(biseq(cmd->command, &STDDEV)) {
check(splits->qty == 2, "Failed to parse stddev: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_stddev;
cmd->path = parse_name(cmd->name);
} else if(biseq(cmd->command, &STORE)) {
// store URL
check(splits->qty == 2, "Failed to parse store: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->handler = handle_store;
cmd->path = NULL;
} else if(biseq(cmd->command, &LOAD)) {
// load FROM TO
check(splits->qty == 3, "Failed to parse load: %d", splits->qty);
cmd->name = splits->entry[1];
cmd->arg = splits->entry[2];
cmd->handler = handle_load;
cmd->path = NULL;
} else {
sentinel("Failed to parse the command.");
}
return 0;
error:
return -1;
}
int scan_paths(Command *cmd, RingBuffer *send_rb)
{
check(cmd->path != NULL, "Path was not set in command.");
int rc = 0;
// save the original path length
size_t qty = cmd->path->qty;
// starting at the longest path, shorten it and call
// for each one:
for(; cmd->path->qty > 1; cmd->path->qty--) {
// remake the path with / again
bstring path = bjoin(cmd->path, &SLASH);
// call the handler with the path
rc = cmd->handler(cmd, send_rb, path);
// if the handler returns != 0 then abort and return that
bdestroy(path);
if(rc != 0) break;
}
// restore path length
cmd->path->qty = qty;
return rc;
error:
return -1;
}
struct bstrList *parse_name(bstring name)
{
return bsplits(name, &SLASH);
}
int parse_line(bstring data, RingBuffer *send_rb)
{
int rc = -1;
Command cmd = {.command = NULL};
// split data on line boundaries
struct bstrList *splits = bsplits(data, &LINE_SPLIT);
check(splits != NULL, "Bad data.");
// parse it into a command
rc = parse_command(splits, &cmd);
check(rc == 0, "Failed to parse command.");
// scan the path and call the handlers
if(cmd.path) {
check(cmd.path->qty > 1, "Didn't give a valid URL.");
rc = scan_paths(&cmd, send_rb);
check(rc == 0, "Failure running recursive command against path: %s", bdata(cmd.name));
bstrListDestroy(cmd.path);
} else {
rc = cmd.handler(&cmd, send_rb, NULL);
check(rc == 0, "Failed running command against path: %s", bdata(cmd.name));
}
bstrListDestroy(splits);
return 0;
error: // fallthrough
if(cmd.path) bstrListDestroy(cmd.path);
if(splits) bstrListDestroy(splits);
return -1;
}
void client_handler(int client_fd)
{
int rc = 0;
RingBuffer *recv_rb = RingBuffer_create(RB_SIZE);
RingBuffer *send_rb = RingBuffer_create(RB_SIZE);
check_mem(recv_rb);
check_mem(send_rb);
// keep reading into the recv buffer and sending on send
while(read_some(recv_rb, client_fd, 1) != -1) {
// read a line from the recv_rb
bstring data = read_line(recv_rb, LINE_ENDING);
check(data != NULL, "Client closed.");
// parse it, close on any protocol errors
rc = parse_line(data, send_rb);
bdestroy(data); // cleanup here
check(rc == 0, "Failed to parse user. Closing.");
// and as long as there's something to send, send it
if(RingBuffer_available_data(send_rb)) {
write_some(send_rb, client_fd, 1);
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(recv_rb) RingBuffer_destroy(recv_rb);
if(send_rb) RingBuffer_destroy(send_rb);
exit(0); // just exit the child process
}
int setup_data_store(const char *store_path)
{
// a more advanced design simply wouldn't use this
DATA = Hashmap_create(NULL, NULL);
check_mem(DATA);
char *path = realpath(store_path, NULL);
check(path != NULL, "Failed to get the real path for storage: %s", store_path);
STORE_PATH = bfromcstr(path);
free(path);
return 0;
error:
return -1;
}
int run_server(const char *host, const char *port, const char *store_path)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
rc = setup_data_store(store_path);
check(rc == 0, "Failed to setup the data store.");
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex51b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/stats.h>
struct Command;
typedef int (*handler_cb)(struct Command *cmd, RingBuffer *send_rb, bstring path);
typedef struct Command {
bstring command;
bstring name;
struct bstrList *path;
bstring number;
bstring arg;
handler_cb handler;
} Command;
typedef struct Record {
bstring name;
Stats *stat;
} Record;
struct tagbstring OK;
int setup_data_store(const char *store_path);
struct bstrList *parse_name(bstring name);
int scan_paths(Command *cmd, RingBuffer *send_rb);
int parse_line(bstring data, RingBuffer *send_rb);
int run_server(const char *host, const char *port, const char *store_path);
bstring sanitize_location(bstring base, bstring path);
bstring encrypt_armor_name(bstring name);
#endif
```
.\ex51b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
#include <lcthw/bstrlib.h>
#include <lcthw/ringbuffer.h>
#include <assert.h>
typedef struct LineTest {
char *line;
bstring result;
char *description;
} LineTest;
int attempt_line(LineTest test)
{
int rc = -1;
bstring result = NULL;
bstring line = bfromcstr(test.line);
RingBuffer *send_rb = RingBuffer_create(1024);
rc = parse_line(line, send_rb);
check(rc == 0, "Failed to parse line.");
result = RingBuffer_get_all(send_rb);
check(result != NULL, "Ring buffer empty.");
check(biseq(result, test.result), "Got the wrong output: %s expected %s",
bdata(result), bdata(test.result));
bdestroy(line);
RingBuffer_destroy(send_rb);
return 1; // using 1 for tests
error:
log_err("Failed to process test %s: got %s", test.line, bdata(result));
if(line) bdestroy(line);
if(send_rb) RingBuffer_destroy(send_rb);
return 0;
}
int run_test_lines(LineTest *tests, int count)
{
int i = 0;
for(i = 0; i < count; i++) {
check(attempt_line(tests[i]), "Failed to run %s", tests[i].description);
}
return 1;
error:
return 0;
}
int fake_command(Command *cmd, RingBuffer *send_rb, bstring path)
{
check(cmd != NULL, "Bad cmd.");
check(cmd->path != NULL, "Bad path.");
check(send_rb != NULL, "Bad send_rb.");
check(path != NULL, "Bad path given.");
return 0;
error:
return -1;
}
char *test_path_parsing()
{
struct bstrList *result = NULL;
struct tagbstring slash = bsStatic("/");
struct tagbstring logins_zed = bsStatic("/logins/zed");
struct tagbstring command_name = bsStatic("dump");
RingBuffer *send_rb = RingBuffer_create(1024);
struct bstrList *path = bsplits(&logins_zed, &slash);
int rc = 0;
Command fake = {
.command = &command_name,
.name = &logins_zed,
.number = NULL,
.handler = fake_command,
.path = path
};
result = parse_name(&logins_zed);
mu_assert(result != NULL, "Failed to parse /logins/zed");
rc = scan_paths(&fake, send_rb);
mu_assert(rc != -1, "scan_paths failed.");
return NULL;
}
char *test_create()
{
LineTest tests[] = {
{.line = "create /zed 100", .result = &OK, .description = "create zed failed"},
{.line = "create /joe 100", .result = &OK, .description = "create joe failed"},
};
mu_assert(run_test_lines(tests, 2), "Failed to run create tests.");
return NULL;
}
char *test_sample()
{
struct tagbstring sample1 = bsStatic("100.000000\n");
LineTest tests[] = {
{.line = "sample /zed 100", .result = &sample1, .description = "sample zed failed."}
};
mu_assert(run_test_lines(tests, 1), "Failed to run sample tests.");
return NULL;
}
char *test_store_load()
{
LineTest tests[] = {
{.line = "delete /zed", .result = &OK, .description = "delete zed failed"},
{.line = "create /zed 100", .result = &OK, .description = "create zed failed"},
{.line = "store /zed", .result = &OK, .description = "store zed failed"},
{.line = "load /zed /sam", .result = &OK, .description = "load zed failed"},
{.line = "delete /sam", .result = &OK, .description = "load zed failed"},
};
mu_assert(run_test_lines(tests, 3), "Failed to run sample tests.");
return NULL;
}
char *test_encrypt_armor_name()
{
struct tagbstring test1 = bsStatic("/logins");
struct tagbstring expect1 = bsStatic("vtmTmzNI");
struct tagbstring test2 = bsStatic("../../../../../../../../etc/passwd");
struct tagbstring expect2 = bsStatic("pVOBpFjHEIhB7cuT3BGUvyZGn3lvyj226mgggggg");
bstring result = encrypt_armor_name(&test1);
debug("Got encrypted name %s", bdata(result));
mu_assert(biseq(result, &expect1), "Failed to encrypt test2.");
bdestroy(result);
result = encrypt_armor_name(&test2);
debug("Got encrypted name %s", bdata(result));
mu_assert(biseq(result, &expect2), "Failed to encrypt test2.");
bdestroy(result);
return NULL;
}
char *test_path_sanitize_armor()
{
struct tagbstring base = bsStatic("/tmp");
struct tagbstring test1 = bsStatic("/somepath/here/there");
bstring encname = encrypt_armor_name(&test1);
bstring expect = bformat("%s/%s", bdata(&base), bdata(encname));
struct tagbstring test2 = bsStatic("../../../../../../../../etc/passwd");
bstring result = sanitize_location(&base, &test1);
mu_assert(result != NULL, "Failed to sanitize path.");
mu_assert(biseq(result, expect), "failed to sanitize test1");
// this should be pulled up into a tester function
// BUG: just get rid of this and use md5
encname = encrypt_armor_name(&test2);
expect = bformat("%s/%s", bdata(&base), bdata(encname));
result = sanitize_location(&base, &test2);
mu_assert(result != NULL, "Failed to sanitize path.");
mu_assert(biseq(result, expect), "failed to sanitize test1");
return NULL;
}
char *all_tests()
{
mu_suite_start();
int rc = setup_data_store("/tmp");
mu_assert(rc == 0, "Failed to setup the data store.");
mu_run_test(test_path_parsing);
mu_run_test(test_encrypt_armor_name);
mu_run_test(test_path_sanitize_armor);
mu_run_test(test_create);
mu_run_test(test_sample);
mu_run_test(test_store_load);
return NULL;
}
RUN_TESTS(all_tests);
```
Final Review
<file_sep>+++
title = "AWS: EC2 - 1"
description = "Reset EC2 password"
draft="true"
+++
## EC2
### EC2 with Linux
### EC2 with Windows
<file_sep>+++
title = "AWS: EKS - 1"
description = "Create a cluster"
weight=11
+++
## EKS - Part 1
Amazon Elastic Kubernetes Service (Amazon EKS) is a managed service that makes it easy for you to run Kubernetes on AWS without needing to stand up or maintain your own Kubernetes control plane. Kubernetes is an open-source system for automating the deployment, scaling, and management of containerized applications.
* EKS runs Kubernetes control plane instances across multiple Availability Zones to ensure high availability.
* EKS automatically detects and replaces unhealthy control plane instances.
* EKS provides automated version upgrades and patching for them.
* EKS is also integrated with many AWS services to provide scalability and security.
### eksctl
* Install the Latest AWS CLI
pip install awscli --upgrade --user
* Install eksctl (Mac)
```
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
#Install the Weaveworks Homebrew tap.
brew tap weaveworks/tap
# Install or upgrade eksctl.
brew install weaveworks/tap/eksctl
brew upgrade eksctl && brew link --overwrite eksctl
eksctl version
```
* Install eksctl (Linux)
```
# The latest version is 0.16
curl --silent --location "https://github.com/weaveworks/eksctl/releases/latest/download/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp
## For EKS with workloads 0.17.0-rc.0 is required
# curl --silent --location "https://github.com/weaveworks/eksctl/releases/download/0.17.0-rc.0/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp
#Install the Weaveworks Homebrew tap.
sudo mv /tmp/eksctl /usr/local/bin
eksctl version
```
### EKS with Fargate
#### Fargate
AWS Fargate is a serverless compute engine for containers that works with both Amazon Elastic Container Service (ECS) and Amazon Elastic Kubernetes Service (EKS). Fargate makes it easy for you to focus on building your applications. Fargate removes the need to provision and manage servers, lets you specify and pay for resources per application, and improves security through application isolation by design.
* Deploy and manage applications, not infrastructure
* Right-sized resources with flexible pricing options
* Secure isolation by design
* Rich observability of applications
##### Pod Configuration
vCPU value | Memory value
------|------
.25 vCPU| 0.5 GB, 1 GB, 2 GB
.5 vCPU| 1 GB, 2 GB, 3 GB, 4 GB
1 vCPU| 2 GB, 3 GB, 4 GB, 5 GB, 6 GB, 7 GB, 8 GB
2 vCPU | Between 4 GB and 16 GB in 1-GB increments
4 vCPU | Between 8 GB and 30 GB in 1-GB increments
#### Create Cluster
```
CLUSTER_NAME="pg-prd"
REGION_CODE="ap-southeast-1"
eksctl create cluster \
--name ${CLUSTER_NAME} \
--region ${REGION_CODE} \
--fargate
```
### EKS with EC2
```
CLUSTER_NAME="pg-prd"
REGION_CODE="ap-southeast-2"
NODE_GRP_NAME="standard-workers"
KEY_NAME="nonprod-kp"
NODE_TYPE="t3.medium"
eksctl create cluster \
--name ${CLUSTER_NAME} \
--region ${REGION_CODE} \
--nodegroup-name ${NODE_GRP_NAME} \
--node-type ${NODE_TYPE} \
--nodes 1 \
--nodes-min 1 \
--nodes-max 3 \
--ssh-access \
--ssh-public-key "${KEY_NAME}" \
--managed
```
#### Delete Cluster
* Get all services
kubectl get svc --all-namespaces
* Delete all services with EXTERNAL-IP
kubectl delete svc <service-name>
* Delete cluster
eksctl delete cluster --name <cluster-name>
<file_sep>+++
date = "2016-08-11T11:59:31+11:00"
title = "PHP Web Framework"
description="Introduction of PHP Web Frameworks: Zend Framework, Laravel "
draft = false
+++
> *Here we are going to explore some PHP web frameworks.*
## PHP development environment setup
### Install PHP 5.6.x
* Please find the instruction from [home page](/#php)
* Composer
### Linux
* Use `curl -s https://getcomposer.org/installer | php --` to install composer on Linux
* use 'composer -v ' to verify.
### Windows
* Download the [composer](https://getcomposer.org/download/) and install php on your PC
* Use `composer -v` to verify the composer is ready.
## Zend Framework
> Zend Framework 2.x is a collection of 60+ packages for professional PHP development. It can be used to develop web applications and services using PHP 5.6+, and provides 100% object-oriented code using a broad spectrum of language features.
### Create Zend Framework 2 project
* Clone Zend Framework skeleton project as new project.
* Install zend framework with composer
```bash
cd /path/to/newproject
git clone git://github.com/zend framework/ZendSkeletonApplication.git
cd ZendSkeletonApplication
php composer.phar self-update
php composer.phar install
```
### Start app with php built-in server
**Linux**
```bash
php -S 0.0.0.0:8080 -t public/ public/index.php
```
**Windows**
```bash
php -S 0.0.0.0:8080 -t public public/index.php
```
### Use apache server
* Apache configuration
```apache
<VirtualHost *:80>
ServerName zf2-tutorial.localhost
DocumentRoot /path/to/newproject/ZendSkeletonApplication/public
SetEnv APPLICATION_ENV "development"
<Directory /path/to/newproject/ZendSkeletonApplication/public>
DirectoryIndex index.php
AllowOverride All
Require all granted
</Directory>
</VirtualHost>
```
* Zf2-MVC-Starter project
* Please find the project introduction [here](/projects/zf2-mvc-starter/).
## Laravel
> Laravel is a free, open-source PHP web framework, created by <NAME> and intended for the development of web applications following the model–view–controller (MVC) architectural pattern. Some of the features of Laravel are a modular packaging system with a dedicated dependency manager, different ways for accessing relational databases, utilities that aid in application deployment and maintenance, and its orientation toward syntactic sugar.
### Install laravel global
* Use `composer global laravel/installer`
* Enable the `mbs-string` extension
* update `php.ini` config
* Open `php.ini` with notepad
* Change `;extension=php_mbstring.dll` to `extension=php_mbstring.dll`
### Create new project from scratch
#### Migrate database and seed dummy data
* Create data model
* Following is the sample code
```php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Article extends Model
{
protected $fillable = ['title', 'content'];
public function getTitleAttribute($value) {
return strtoupper($value);
}
}
```
* Use artisan to create the table
```bash
php artisan migrate -VVV
## Use following command to seek dump or initial data
php artisan db:seed
```
### Troubleshooting
**Fix the error of Specified key was too long**
```php
namespace App\Providers;
use Illuminate\Support\ServiceProvider;
use Illuminate\Support\Facades\Schema;
class AppServiceProvider extends ServiceProvider
{
/**
* Bootstrap any application services.
*
* @return void
*/
public function boot()
{
//
Schema::defaultStringLength(191);
}
/**
* Register any application services.
*
* @return void
*/
public function register()
{
//
}
}
```
### Lavarel MVC Starter project
* Please find the project introduction [here](/projects/lara-mvc-starter/).
## Use Laravel Rest Starter project
## PrestaShop
> PrestaShop is a free, open source e-commerce solution. The software is published under the Open Software License (OSL). It is written in the PHP programming language with support for the MySQL database management system.
### Download the zip file from [download page](https://www.prestashop.com/en/developers-versions)
### Install Prestashop
There is a instruction page inside the zip file. You can follow the instructions to complete the installation. There is no EasyPHP, Wamp, XAMPP, or any similar AMP (Apache+MySQL+PHP) package installed on my PC, but I have PHP, Apache, MySQL installed. Actually EasyPHP, Wamp are just the bundle of PHP development tools, which include PHP, Apache, MySQL. I don't want to install too many duplicate softwares and packages on my PC, so I prefer to install Prestashop with what I have on my PC. Which strategy is up to you.
### Install Prestashop with AMP package
* Follow the instruction page within zip file.
### Install Prestashop without AMP package
- Unzip file to `path\to\prestashop_workspace`. Your folder structure will look like this.
```ini
path\to\prestashop_workspace
\---prestashop
+---admin
+---cache
+---classes
+---config
+---controllers
+---css
+---docs
+---download
+---img
+---install
+---js
+---localization
+---log
+---mails
+---modules
+---override
+---pdf
+---themes
+---tools
+---translations
+---upload
\---webservice
```
* Start your MySQL or check the status of MySQL
* Use your MySQL client tool to connect to your MySQL server.
* Launch installer page with php server
* Start a command prompt
```bash
cd /path/to/prestashop_workspace
php -S 0.0.0.0:1234 -t prestashop
```
* Open the link `http://localhost:1234/install/index.php` with browser, then you can start installation process.
* Choose language and click `Next`, and then select the checkbox "I agree bah lah bah lah .... " and click `Next`
* If there is an error `GD Library is not installed` prompt, you just need to enable the library on `php.ini`
* DO NOT close your browser.
* Stop the php server by `Ctrl + C` in the command prompt.
* Use notepad to open the file `php.ini` under the `\path\to\php`
* Uncomment the config `;extension=php_gd2.dll` => `extension=php_gd2.dll`
* Start the php server again
* Click the `Refresh this settings`, and click `Next`
* Fill the login user and password. If your MySQL port is not 3306, please attach your port to the server address input field. Click `Test your database connection`.
* If you got error `prestashop database not found`, you need to create a database on mysql server.
* I simply create a new database immediately with one command line
```sql
CREATE DATABASE prestashop CHAR SET utf8 COLLATE 'utf8_unicode_ci';
```
* Test the connection again. You will get the green light
* Click `Next` and you can start to setup your store informaiton, such as, store name, admin account, etc. Then click `Next`
* Setup your sample store. Click `Next`. Then the installer will help you finish the initialization.
* After the store setup, you can access the website by clicking `Font site`, but you can not access back office, as known as admin panel.
* Don't panic. It is easy to fix. Stop the php server by clicking `Ctrl+C`, and delete the folder `install` under the root, and then start the server again. Open the folder with prestashop, you will find something interesting. The original folder `admin` under `prestashop` has been renamed to `adminXXXX`. X is a number. It is Prestashop special trick to secure your admin folder. Now you need to use this new name as path to acces back office. Your new back office link will be `http://localhost:1234/adminXXXX`.
* Open the new link in browser and type in your admin id and password. Now you can start managing your Prestashop site. Enjoy it.
### Forgot admin's password
* Forgot admin's password or somehow you have to reset password and you cannot get admin's password from previous adminstrator. For such case, there is a simple way to update admin's password from database.
* Tailor the SQL below. Then you should be able to use new password to login.
```sql
UPDATE ps_employee SET passwd = MD5('<_COOKIE_KEY_><PASSWORD>')
WHERE ps_employee.id_employee = <ID_EMPLOYEE>;
```
### Troubleshooting
#### InnoDB error
* Error : InnoDB is not supported by your MySQL server
> You got error because Prestashop 1.5 is not working properly with MySQL 5.4 and later.
* Update files `DbPDO.php` , `MySQL.php` and `DbMySQLi.php` as follow.
```php
// $sql = 'SHOW VARIABLES WHERE Variable_name = \'have_innodb\'';
$sql = "SELECT SUPPORT FROM INFORMATION_SCHEMA.ENGINES WHERE ENGINE LIKE 'INNO%'";
...
// if (!$row || strtolower($row['Value']) != 'yes')
if (!$row || strtolower($row['Value']) === 'no')
```
* Restart the server and the proble will be fixed.
#### CORS
* Enable module header in `httpd.conf`
* Add header settings
```apache
Header always set Access-Control-Max-Age "1000"
# "X-Requested-With, Content-Type, Origin, Authorization, Accept, Client-Security-Token, Accept-Encoding"
Header always set Access-Control-Allow-Headers "USE ABOVE COMMENT AS DEFAULT VALUE"
Header always set Access-Control-Allow-Methods "POST, GET, OPTIONS, DELETE, PUT"
```
#### Too big header file
* Add this to your `http {}` of the nginx.conf file normally located at `/etc/nginx/nginx.conf`:
proxy_buffer_size 128k;
proxy_buffers 4 256k;
proxy_busy_buffers_size 256k;
* Then add this to your php location block, this will be located in your vhost file look for the block that begins with `location ~ .php$ {`
fastcgi_buffer_size 128k;
fastcgi_buffers 4 256k;
fastcgi_busy_buffers_size 256k;
<file_sep>+++
title = "Azure: App - 1"
weight = 3
description="Introduction of Azure App Service"
+++
## Azure App Service
Azure App Service is an HTTP-based service for hosting web applications, REST APIs, and mobile back ends. You can develop in your favorite language, be it .NET, .NET Core, Java, Ruby, Node.js, PHP, or Python. Applications run and scale with ease on both Windows and Linux-based environments.
* Built-in auto scale support
* Continuous integration/deployment support
* Deployment slots
### Limitations
* App Service on Linux is not supported on Shared pricing tier.
* You can't mix Windows and Linux apps in the same App Service plan.
* The Azure portal shows only features that currently work for Linux apps.
### Create a Web App
#### Create a web app via docker container
* Set defaout subscriptoin
```
az account set --subscription XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
```
* Crate a resource group
```
az group create --name my-ResourceGroup --location <your-region>
```
* Create a service plan
> By default, the command below uses an inexpensive B1 pricing tier that is free for the first month. You can control the tier with the --sku parameter.
```
az appservice plan create --name myAppServicePlan \
--resource-group myResourceGroup --is-linux
```
```sh
az webapp create --resource-group myAppServicePlan \
--plan myAppServicePlan --name myApp \
--deployment-container-image-name XXXXX/my-web-app:latest
```
#### Add Lets Encrypt SSL for custom domain or sub-domain
> ___The approach below is only for experiment. It is not scalable for commercial purpose.___
* Add customized sub-domain to your DNS
```sh
sub-domain.domain.com. 1800 IN A 172.16.58.3
```
* Generate the SSL cert via Certbot. Following example is done on a linux machine, which IP is `10.10.10.10`
```sh
sudo certbot --nginx -d sub-domain.domain.com
```
* Generate the pfx file from pem file
> __Remember the password of the SSL cert__
```sh
cd
sudo openssl pkcs12 -inkey /etc/letsencrypt/live/sub-domain.domain.com/privkey.pem -in /etc/letsencrypt/live/sub-domain.domain.com/fullchain.pem -in -export -out sub-domain.domain.com.pfx
Enter Export Password:
Verifying - Enter Export Password:
```
* Download the certificate file
```sh
scp -i ~/.ssh/XXX_rsa [email protected]:/sub-domain.domain.com.pfx ~/
```
* Follow the Azure App Service instruction to add customized domain
* Navigate to `Custom Domain` page: App Service > Settings > custom domains
* __Remove the previous A Record of this sub-domain from your DNS__
* Add following records in your DNS
```
sub-domain.domain.com. 1800 IN CNAME sub-domain.azurewebsites.net.
asuid.sub-domain.domain.com. 1800 IN TXT XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
```
* Validate the custom domain
* Add the custom domain
* Add the SSL to custom domain
* Under the SSL session there is alarm sign which shows the domain has no SSL certiificate
* Click `Add binding`
* Upload the pfx certificate file generated from previous step
* Enter the password of the certificate
* Select default option for `Private Certificate Thumbprint` (Only one option available)
* Select `SNI SSL` for TSL Type (Only one option available)
* Check the SSL with SSL online checker
<file_sep>+++
title = " Generic Predicate & Expression"
description = "Use generic predicate and expression for data query"
+++
### Generic Predicate and Expression for query
#### Prerequisites
> *You have experience of developing .net application, which includes entity framework.*
> *You have experience of using SQL to query database*
#### Problem
Now entity framework is core component in all .net applications, which need to communicate with database.
Business service get a lot of benefits form entity framework's ORM feature, and we can create a repository layer on the top of ORM to reduce some simple but tedious database operation, such as, delete, insert, query all data. However, when we need to do some complicated query to support business service, we still need to take so much effort to achieve the query result, because entity framework use LINQ as query language, comparing with SQL, native database language, it is a bit more complicated and cumbersome. Luckily, entity framework provide another generic feature to help us DRY. Predicate and expression can help us create generic query and reduce many reduntant code.
#### Solution
### Analysis
Basically, the idea is close to dynamic statement in ADO.Net. Here generic programming is the key, when we utilize predicate and expression to dynamically rebuild the query filter.
If we look into the queries, we will find 80% of queries can be abstracted as following syntax. Now it is easy to see how generic predicate and expression can support this syntax.
```sql
SELECT * FROM TABLE_A
WHERE
<FIELD_1> [=|<|>|>=|<=|LIKE] <VALUE_1> ----- Expression
[ AND | OR ] ------------ Predicate
<FIELD_2> [=|<|>|>=|<=|LIKE] <VALUE_2> ----- Expression
```
The next step we can look into the Expression, actually the FIELD_1 is the property of entity object, and VALUE_1 is the filter value entered by client. How to use the filter to narrow down the query result is part of business logic, which is handled by developer.
```ini
<FIELD_1> ==> Entity property
[=|<|>|>=|<=|LIKE] ==> Operator
<VALUE_1> ==> Filter value
```
### Design
Accordingt to abve analysis, we can design the classes to support this feature.
```ini
+--------------+
| Filter |
+--------------+
| Property | --- Map to the column (table) or property (entity)
| Op | --- Operator , e.g. Equals, Contains, GreaterThan, tec.
| Val | --- Value entered by client
+--------------+
+----------------------+
| ExpressionBuilder |
+----------------------+
|GetExpression( filer) | --------- Create expression by input filter
+ ---------------------+
+------------------+
| PredicateBuilder |
+------------------+
| And(exp1, exp2) | --- Use AND to combine two expressions
| Or(exp1, exp2) | --- Use OR to combine two expressions
+------------------+
```
### Implementatoin
Following is the implementation of generic expression and predicate. Please keep it in mind. We are using LINQ to simulate the dynamic statement, so there is some tricks to work around as SQL queries.
**Filter**
The Op property should be
```cs
public class Filter
{
public string Property { get; set; }
public string Op { get; set; }
public object Val { get; set; }
}
```
**Op**
This class can be replaced by Enum if you want.
```cs
public static class Op
{
public const string Equals = "Equals";
public const string GreaterThan = "GreaterThan";
public const string LessThan = "LessThan";
public const string GreaterThanOrEqual = "GreaterThanOrEqual";
public const string LessThanOrEqual = "LessThanOrEqual";
public const string Contains = "Contains";
public const string StartsWith = "StartsWith";
public const string EndsWith = "EndsWith";
}
```
**ExpressionBuilder**
This class takes care of Expression with Filter object.
```cs
using System;
using System.Linq;
using System.Linq.Expressions;
using System.Collections.Generic;
public class ExpressionBuilder
{
private static MethodInfo containsMethod = typeof(string).GetMethod("Contains");
private static MethodInfo startsWithMethod =
typeof(string).GetMethod("StartsWith", new Type[] { typeof(string) });
private static MethodInfo endsWithMethod =
typeof(string).GetMethod("EndsWith", new Type[] { typeof(string) });
public static Expression<Func<T, bool>> GetExpression<T>(IList<Filter> filters)
{
if (filters.Count == 0)
return null;
ParameterExpression param = Expression.Parameter(typeof(T), "t");
Expression exp = null;
if (filters.Count == 1)
exp = GetExpression<T>(param, filters[0]);
else if (filters.Count == 2)
exp = GetExpression<T>(param, filters[0], filters[1]);
else
{
while (filters.Count > 0)
{
var f1 = filters[0];
var f2 = filters[1];
if (exp == null)
exp = GetExpression<T>(param, filters[0], filters[1]);
else
exp = Expression.AndAlso(
exp,
GetExpression<T>(param, filters[0], filters[1]));
filters.Remove(f1);
filters.Remove(f2);
if (filters.Count == 1)
{
exp = Expression.AndAlso(
exp,
GetExpression<T>(param, filters[0]));
filters.RemoveAt(0);
}
}
}
return Expression.Lambda<Func<T, bool>>(exp, param);
}
private static ConstantExpression ConvetValueType(
MemberExpression member,
object value)
{
if (member.Type == typeof(int))
{
value = int.Parse(value.ToString());
}
else if (member.Type == typeof(decimal))
{
value = decimal.Parse(value.ToString());
}
else if (member.Type == typeof(float))
{
value = float.Parse(value.ToString());
}
else if (member.Type == typeof(double))
{
value = double.Parse(value.ToString());
}
return Expression.Constant(value);
}
private static Expression GetExpression<T>(
ParameterExpression param,
Filter filter)
{
MemberExpression member = Expression.Property(
param, filter.Property);
switch (filter.Op)
{
case Op.Equals:
return Expression.Equal(
member,
Expression.Constant(filter.Val, member.Type));
case Op.GreaterThan:
return Expression.GreaterThan(
member,
ConvetValueType(member, filter.Val));
case Op.GreaterThanOrEqual:
return Expression.GreaterThanOrEqual(
member,
ConvetValueType(member, filter.Val));
case Op.LessThan:
return Expression.LessThan(
member,
ConvetValueType(member, filter.Val));
case Op.LessThanOrEqual:
return Expression.LessThanOrEqual(
member,
ConvetValueType(member, filter.Val));
case Op.Contains:
return Expression.Call(
member,
containsMethod,
Expression.Constant(filter.Val, member.Type));
case Op.StartsWith:
return Expression.Call(
member,
startsWithMethod, Expression.Constant(
filter.Val,
member.Type));
case Op.EndsWith:
return Expression.Call(
member,
endsWithMethod,
Expression.Constant(filter.Val, member.Type));
}
return null;
}
private static BinaryExpression GetExpression<T>(\
ParameterExpression param, Filter filter1, Filter filter2)
{
Expression bin1 = GetExpression<T>(param, filter1);
Expression bin2 = GetExpression<T>(param, filter2);
return Expression.AndAlso(bin1, bin2);
}
}
```
**PredicateBuilder**
This class manages all expressions to support dynamic statement query.
```cs
using System;
using System.Linq;
using System.Linq.Expressions;
using System.Collections.Generic;
public static class PredicateBuilder
{
public static Expression<Func<T, bool>> True<T>() { return f => true; }
public static Expression<Func<T, bool>> False<T>() { return f => false; }
public static Expression<Func<T, bool>> Or<T>(
this Expression<Func<T, bool>> expr1, Expression<Func<T, bool>> expr2)
{
var secondBody = expr2.Body.Replace(
expr2.Parameters[0], expr1.Parameters[0]);
return Expression.Lambda<Func<T, bool>>(
Expression.OrElse(expr1.Body, secondBody),
expr1.Parameters);
}
public static Expression<Func<T, bool>> And<T>(
this Expression<Func<T, bool>> expr1,
Expression<Func<T, bool>> expr2)
{
var secondBody = expr2.Body.Replace(
expr2.Parameters[0], expr1.Parameters[0]);
return Expression.Lambda<Func<T, bool>> (
Expression.AndAlso(expr1.Body, secondBody),
expr1.Parameters);
}
public static Expression Replace(
this Expression expression,
Expression searchEx,
Expression replaceEx)
{
return new ReplaceVisitor(searchEx, replaceEx).Visit(expression);
}
internal class ReplaceVisitor : ExpressionVisitor
{
private readonly Expression from, to;
public ReplaceVisitor(Expression from, Expression to)
{
this.from = from;
this.to = to;
}
public override Expression Visit(Expression node)
{
return node == from ? to : base.Visit(node);
}
}
}
```<file_sep>+++
title = "AWS: VPC - 4"
description = "VPC - Simple Demo "
weight=6
+++
## VPC Part 4
### Simples demo
* Diagram of customized VPC - MyDemoVPC with Internet Gatway and VPN connect
{{<mermaid>}}
graph LR
InternetGW(Internet Gateway)
VirtualGW(Virtual Gateway)
INTER(Internet - Public)
InternetGW --- INTER
VirtualGW --- SERVER
subgraph MyDemoVPC
EC2_A(EC2 Instannce A)
EC2_B(EC2 Instannce B)
EC2_E(EC2 Instannce E)
EC2_F(EC2 Instannce F)
EC2_C[(Database Master)]
EC2_D[(Database Slave)]
MainRouteTable(10.0.0.0/16)
PrvSubnet(10.0.2.0/24)
PubSubnet(10.0.1.0/24)
VPNSubnet(10.0.3.0/24)
MainRouteTable --- InternetGW
MainRouteTable --- NetworkACL
NetworkACL --- PubSecGrp
NetworkACL --- PrivSecGrp
PrivSecGrp --- PrvSubnet
VPNSubnet --- VirtualGW
VPNSubnet --- MainRouteTable
PubSecGrp --- PubSubnet
subgraph Implied_Router
MainRouteTable(10.0.0.0/16)
end
subgraph Private_Subnet
PrvSubnet
EC2_C
EC2_D
end
subgraph Public_Subnet
PubSubnet
EC2_A
EC2_B
end
subgraph VPN_Subnet
VPNSubnet
EC2_E
EC2_F
end
end
InternetGW
subgraph Internet
INTER
end
subgraph OnPremise
SERVER
end
{{</mermaid>}}
* Customized Route tables of Subnet Public_Subnet
Destination | Target
----- | -------
10.0.1.0/16 | local
fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/56 | local
0.0.0.0/0 | InternetGW
::0/0 | InternetGW
* Main Route tables of Subnet Private_Subnet
Destination | Target
------------|---------
10.0.1.0/16 | local
fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/56 | local
* Route table of Subnet VPN_Subnet
Destination | Target
------------|---------
10.0.1.0/16 | local
fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/56 | local
0.0.0.0/0 | VirtualGW
<file_sep>+++
title="Goroutine & Channel - 2"
description="Goroutine & Channels - Part 2"
weight = 7
+++
### Channel directionality
* A channel type may be annotated to specify that it may only send or only receive
var send_only chan<- int // channel can only receive data
var recv_only <-chan int // channel can only send data
* Receive-only channels ( <-chan T ) cannot be closed, because closing a channel is intended as a way for a sender to signal that no more values will be sent to the channel, so it has no meaning for receive-only channels.
#### Pipe & Filter pattern
* A goroutine channel process will processes what it receives from an input channel and sends this to an output channel
```go
func main() {
sendChan := make(chan int)
receiveChan := make(chan string)
go processChannel(sendChan, receiveChan)
}
func processChannel(in <-chan int, out chan<- string) {
for inValue := range in {
result := strconv.Itoa(inValue) // processing inValue
// ...
out <- result
}
}
```
* Prime number generator sample
```go
func generate() chan int {
ch := make(chan int)
go func() {
for i := 2; ; i++ {
ch <- i
}
}()
return ch
}
// Filter out input values divisible by prime, send rest to returned channel
func filter(in chan int, prime int) chan int {
out := make(chan int)
go func() {
for {
if i := <-in; i%prime != 0 {
out <- i
}
}
}()
return out
}
func sieve() chan int {
out := make(chan int)
go func() {
ch := generate()
for {
prime := <-ch
ch = filter(ch, prime)
out <- prime
}
}()
return out
}
func main() {
primes := sieve()
for {
fmt.Println(<-primes)
}
}
```
##### Goroutine with select
* The select chooses which of the multiple communications listed by its cases can proceed. The default clause is optional; fall through behavior, like in the normal switch, is not permitted.
* It all are blocked, it waits until one can proceed.
* If multiple can proceed, it chooses one at random.
* When none of the channel operations can proceed and the default clause is present, then this is executed: the default is always runnable (that is: ready to execute).
* Using a send operation in a select statement with a default case guarantees that the send will be non-blocking! If there are no cases, the select blocks execution forever.
* Sample 9
```go
func main() {
runtime.GOMAXPROCS(2) // in goroutine_select2.go
ch1 := make(chan int)
ch2 := make(chan int)
go pump1(ch1)
go pump2(ch2)
go suck(ch1, ch2)
time.Sleep(1e9)
}
func pump1(ch chan int) {
for i := 0; ; i++ {
ch <- i * 2
}
}
func pump2(ch chan int) {
for i := 0; ; i++ {
ch <- i + 5
}
}
func suck(ch1 chan int, ch2 chan int) {
for {
select {
case v := <-ch1:
fmt.Printf("Received on channel 1: %d\n", v)
case v := <-ch2:
fmt.Printf("Received on channel 2: %d\n", v)
}
}
}
```
##### Timeout & Ticker
* Ticker: a struct time.Ticker which is an object that repeatedly sends a time value on a contained channel C at a specified time interval
```go
type Ticker struct {
C <-chan Time // the channel on which the ticks are delivered.
// contains filtered or unexported fields
// ...
}
```
* A ticker is stopped with Stop(), use this in a defer statement.
```go
ticker := time.NewTicker(updateInterval)
defer ticker.Stop()
// ...
select {
case u:= <- ch1:
// ...
case v:= <- ch2:
// ...
case <- ticker.C:
logState(status) // call some logging function logState
default: // no value ready to be received
// ...
}
```
* Handy to use when you have to limit the rate of processing per unit time. The time.Tick() function with signature func Tick(d Duration) <-chan Time is useful when you only need access to the return channel and don’t need to shutdown it.
* Sample below is good use case for time.Tick function
```go
rate_per_sec := 10
var dur Duration = 1e9 / rate_per_sec
chRate := time.Tick(dur) // a tick every 1/10th of a second
for req := range requests {
<- chRate // rate limit our Service.Method RPC calls
go client.Call("Service.Method", req, ...) // client.Call is RPC call
}
```
* A Timer type looks exactly the same as a Ticker type (it is constructed with NewTimer(d but it sends the time only once, after a Duration d.
* After Duration d the current time is sent on the returned channel; so this is equivalent to NewTimer(d).C; it resembles Tick(), but After() sends the time only once.
* Sample of timer
```go
func main() {
tick := time.Tick(1e8)
boom := time.After(5e8)
for {
select {
case <-tick:
fmt.Println("tick.")
case <-boom:
fmt.Println("BOOM!")
return
default:
fmt.Println(" .")
time.Sleep(5e7)
}
}
}
```
### Generator
#### Lazy generator
* A generator is a function that returns the next value in a sequence each time the function is called.
* It is a producer that only returns the next value, not the entire sequence; this is called lazy evaluation:only compute what you need at the moment, saving valuable resources (memory and CPU): it is a technology for the evaluation of expressions on demand.
##### Basic lazy generator
```go
var resume chan int
func integers() chan int {
yield := make(chan int)
count := 0
go func() {
for {
yield <- count
count++
}
}()
return yield
}
func generateInteger() int {
return <-resume
}
func main() {
resume = integers()
fmt.Println(generateInteger()) //=> 0
fmt.Println(generateInteger()) //=> 1
fmt.Println(generateInteger()) //=> 2
}
```
##### Generic Lazy Generator
* By making clever use of the empty interface, closures and higher order functions we can implement a generic builder BuildLazyEvaluator for the lazy evaluation function (this should best placed inside a utility package). The builder takes a function that has to be evaluated and an initial state as arguments and returns a function without arguments returning the desired value. The passed evaluation function has to calculate the next return value as well as the next state based on the state argument. Inside the builder a channel and a goroutine with an endless loop are created. The return values are passed to the channel from which they are fetched by the returned function for later usage. Each time a value is fetched the next one will be calculated.
```go
type Any interface{}
type EvalFunc func(Any) (Any, Any)
func main() {
evenFunc := func(state Any) (Any, Any) {
oldSate := state.(int)
newState := oldSate + 2
return oldSate, newState
}
even := BuildLazyIntEvaluator(evenFunc, 0)
for i := 0; i < 10; i++ {
fmt.Printf("%vth even: %v\n", i, even())
}
}
func BuildLazyEvaluator(evalFunc EvalFunc, initState Any) func() Any {
retValChan := make(chan Any)
loopFunc := func() {
var actState Any = initState
var retVal Any
for {
retVal, actState = evalFunc(actState)
retValChan <- retVal
}
}
retFunc := func() Any {
return <-retValChan
}
go loopFunc()
return retFunc
}
func BuildLazyIntEvaluator(evalFunc EvalFunc, initState Any) func() int {
evalFn := BuildLazyEvaluator(evalFunc, initState)
return func() int {
return evalFn().(int)
}
}
```
### Future
> A related idea is that of futures: sometimes you know you need to compute a value before you need to actually use the value. In this case, you can potentially start computing the value on another processor and have it ready when you need it.
* Futures are easy to implement via closures and goroutines, the idea is similar to generators, except
a future needs only to return one value.
* Matrix package will look like the code below
```go
// futures used internally
type futureMatrix chan Matrix;
// API remains the same
func Inverse (a Matrix) Matrix {
return <-InverseAsync(promise(a))
}
func Product (a Matrix, b Matrix) Matrix {
return <-ProductAsync(promise(a), promise(b))
}
// expose async version of the API
func InverseAsync (a futureMatrix) futureMatrix {
c := make (futureMatrix)
go func () { c <- inverse(<-a) } ()
return c
}
func ProductAsync (a futureMatrix) futureMatrix {
c := make (futureMatrix)
go func () { c <- product(<-a) } ()
return c
}
// actual implementation is the same as before
func product (a Matrix, b Matrix) Matrix {
....
}
func inverse (a Matrix) Matrix {
....
}
// utility fxn: create a futureMatrix from a given matrix
func promise (a Matrix) futureMatrix {
future := make (futureMatrix, 1)
future <- a;
return future;
}
```
* Use the matrix package
```go
func InverseProduct (a Matrix, b Matrix) {
a_inv := Inverse(a)
b_inv := Inverse(b)
return Product(a_inv, b_inv)
}
// async way
func InverseProduct (a Matrix, b Matrix) {
a_inv_future := InverseAsync(a);
b_inv_future := InverseAsync(b);
a_inv := <-a_inv_future;
b_inv := <-b_inv_future;
return Product(a_inv, b_inv);
}
```
#### Multiplexing
* Client-server applications are the kind of applications where goroutines and channels shine.
* Server side simulator sample
```go
type Request struct {
a, b int
replyChan chan int // reply channel inside the Request
}
type binOp func(a, b int) int
func run(op binOp, req *Request) {
req.replyChan <- op(req.a, req.b)
}
func server(op binOp, service chan *Request, quitChan chan bool) {
for {
select {
case req := <-service:
go run(op, req)
case <-quitChan:
return
}
}
}
func startServer(op binOp) (service chan *Request, quitChan chan bool) {
service = make(chan *Request)
quitChan = make(chan bool)
go server(op, service, quitChan)
return service, quitChan
}
func main() {
adder, quitChan := startServer(func(a, b int) int { return a + b })
const N = 100
var reqs [N]Request
for i := 0; i < N; i++ {
req := &reqs[i]
req.a = i
req.b = i + N
req.replyChan = make(chan int)
adder <- req
}
// checks:
for i := N - 1; i >= 0; i-- { // doesn’t matter what order
if <-reqs[i].replyChan != N+2*i {
fmt.Println("fail at", i)
} else {
fmt.Println("Request ", i, "is ok!")
}
}
quitChan <- true
fmt.Println("done")
}
```
### Parallel For-Loop
* In summary, we need a channel for synchronization purposes (used as a semaphore) when implementing a parallel for-loop, but we do not need to communicate with goroutine through channels when the stack works perfectly well.
```go
xi := make(float chan);
out := make(float chan);
for _,xi := range data {
xch := make(float chan);
go func () {
xi := <- xch;
out <- doSomething(xi);
}()
xch <- xi;
}
```
* Another sample of parallel-loop with semaphore
```go
func VectorScalarAdd (v []float, s float) {
sem := make (semaphore, len(v));
for i,_ := range v {
go func (i int) {
v [i] += s;
sem.Signal();
} (i);
}()
sem.Wait(len(v));
}
```
#### Concurrent access to object with channel.
* To safeguard concurrent modifications of an object instead of using locking with a sync Mutex we can also use a backend goroutine for the sequential execution of anonymous functions.
* In the following program we have a type Person which now contains a field chF, a channel of anonymous functions. This is initialized in the constructor-method NewPerson, which also starts a method backend() as a goroutine.This method executes in an infinite loop all the functions placed on chF, effectively serializing them and thus providing safe concurrent access. The methods that change and retrieve the salary make an anonymous function which does that and put this function on chF, and backend() will sequentially execute them.
```go
type Person struct {
Name string
salary float64
chF chan func()
}
func NewPerson(name string, salary float64) *Person {
p := &Person{name, salary, make(chan func())}
go p.backend()
return p
}
func (p *Person) backend() {
for f := range p.chF {
f()
}
}
// Set salary.
func (p *Person) SetSalary(sal float64) {
p.chF <- func() { p.salary = sal }
}
// Retrieve salary.
func (p *Person) Salary() float64 {
fChan := make(chan float64)
p.chF <- func() { fChan <- p.salary }
return <-fChan
}
func (p *Person) String() string {
return "Person - name is: " + p.Name + " - salary is: " + strconv.FormatFloat(p.Salary(), 'f', 2, 64)
}
func main() {
bs := NewPerson("<NAME>", 2500.5)
fmt.Println(bs)
bs.SetSalary(4000.25)
fmt.Println("Salary changed:")
fmt.Println(bs)
}
```
<file_sep>+++
date = "2015-07-12T14:59:31+11:00"
title = "Macbook Notes - Intel X64"
description = "Environment setup"
+++
### Prerequisites
* Mac OS 10.12+
* Intel X64 CPU
### Ownership issue
* If you have Homebrew or other software installed by someone else, you need to change ownership
```
sudo chown -R $(whoami) /usr/local/brew
sudo chown -R $(whoami) /usr/local/etc
sudo chown -R $(whoami) /usr/local/share
sudo chown -R $(whoami) /usr/local/lib
```
### Install Homebrew
```bash
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
## Brew commands
brew update
brew upgrade
brew list
```
{{% notice info %}}
The cask is not longer a brew command from 2021. The command has changed as below
{{% /notice %}}
```
# Before 2021
brew cask install XXXX
# After 2021
brew install --cask XXXX
```
### Install Zsh Prezto
- Clone Zsh Prezto
```sh
git clone --recursive https://github.com/sorin-ionescu/prezto.git "${ZDOTDIR:-$HOME}/.zprezto"
```
- Remove default `zshrc`
```sh
rm -f ~/.zshrc
```
- Initialize our Prezto configuration files.
```sh
setopt EXTENDED_GLOB
for rcfile in "${ZDOTDIR:-$HOME}"/.zprezto/runcoms/^README.md(.N); do
ln -s "$rcfile" "${ZDOTDIR:-$HOME}/.${rcfile:t}"
done
```
- Setup Prezto Style
- Open up ~/.zpreztorc and find where it says:
- change “sorin” to “steeef.”
```sh
zstyle ':prezto:module:prompt' theme 'steeef'
```
- Add Some Prezto Modules
```sh
'environment' \
'terminal' \
'editor' \
'history' \
'directory' \
'spectrum' \
'utility' \
'completion' \
'prompt' \
'git' \
'completion' \
'syntax-highlighting' \
'history-substring-search'
```
### Install NVM
```sh
brew install nvm
```
* Add following setting to file `.zshrc`
```sh
# NVM
export NVM_DIR="$HOME/.nvm"
[ -s "/usr/local/opt/nvm/nvm.sh" ] && \. "/usr/local/opt/nvm/nvm.sh" # This loads nvm
[ -s "/usr/local/opt/nvm/etc/bash_completion.d/nvm" ] && \. "/usr/local/opt/nvm/etc/bash_completion.d/nvm" # This loads nvm bash_completion
```
### Install python 3
```
brew install [email protected]
```
### Install MySql
{{% notice info %}}
Only the MySql 5.6 or 5.7 doesn't work well on Mac Big Sur or later. If you need old MySql, please consider to se docker to host mysql db.
{{% /notice %}}
```
brew install mysql
```
### Install JDK
* Tap adoptopenjdk to brew
```sh
brew tap adoptopenjdk/openjdk
brew search openjdk
==> Formulae
openjdk openjdk@11 openjdk@8 openj9 openvdb
==> Casks
adoptopenjdk-jre adoptopenjdk11-openj9 adoptopenjdk12-openj9-jre-large adoptopenjdk14 adoptopenjdk15-openj9 adoptopenjdk8
adoptopenjdk-openj9 adoptopenjdk11-openj9-jre adoptopenjdk12-openj9-large adoptopenjdk14-jre adoptopenjdk15-openj9-jre adoptopenjdk8-jre
adoptopenjdk-openj9-jre adoptopenjdk11-openj9-jre-large adoptopenjdk13 adoptopenjdk14-openj9 adoptopenjdk15-openj9-jre-large adoptopenjdk8-openj9
adoptopenjdk-openj9-jre-large adoptopenjdk11-openj9-large adoptopenjdk13-jre adoptopenjdk14-openj9-jre adoptopenjdk15-openj9-large adoptopenjdk8-openj9-jre
adoptopenjdk-openj9-large adoptopenjdk12 adoptopenjdk13-openj9 adoptopenjdk14-openj9-jre-large adoptopenjdk16 adoptopenjdk8-openj9-jre-large
adoptopenjdk10 adoptopenjdk12-jre adoptopenjdk13-openj9-jre adoptopenjdk14-openj9-large adoptopenjdk16-jre adoptopenjdk8-openj9-large
adoptopenjdk11 adoptopenjdk12-openj9 adoptopenjdk13-openj9-jre-large adoptopenjdk15 adoptopenjdk16-openj9 adoptopenjdk9
adoptopenjdk11-jre adoptopenjdk12-openj9-jre adoptopenjdk13-openj9-large adoptopenjdk15-jre adoptopenjdk16-openj9-jre
```
* Install multiple versions of JDK
```
brew install --cask adoptopenjdk # Latest version is 16
brew install --cask adoptopenjdk8
brew install --cask adoptopenjdk11
```
* LTS version info
Java Version | First Release | Next Release | End of Availability
------|--------|-----------|-----
Java 8 (LTS) | Mar 2014 | jdk8u302 20th Jul 2021 | At Least May 2026
Java 11 (LTS) |September 2018 | jdk-11.0.12 20th Jul 2021 |At Least Oct 2024
Java 17 (LTS) |Sep 2021 | jdk-17 14th Sep 2021 | TBC
* Inspect all available JDKs
```
ls /Library/Java/JavaVirtualMachines
adoptopenjdk-11.jdk adoptopenjdk-16.jdk adoptopenjdk-8.jdk
```
#### Switch Java version with alias
- Add following lines to file .zshrc
```sh
# Java
export JAVA_8_HOME=$(/usr/libexec/java_home -v1.8)
export JAVA_11_HOME=$(/usr/libexec/java_home -v11)
export JAVA_17_HOME=$(/usr/libexec/java_home -v16)
alias java8='export JAVA_HOME=$JAVA_8_HOME'
alias java11='export JAVA_HOME=$JAVA_11_HOME'
alias java17='export JAVA_HOME=$JAVA_16_HOME'
```
- Switch JDK
```sh
source ~/.zshrc
java8
java -version
java11
java -version
```
#### Switch Java version with function
- Add following lines to file .zshrc
```sh
jdk() {
if [[ ! -z $1 ]]; then
version=$1
unset JAVA_HOME;
export JAVA_HOME=$(/usr/libexec/java_home -v"$version");
java -version
else
echo Argument version is required. e.g. 1.8, 11, 16
echo Example: jdk 1.8 or jdk 11
fi
}
```
- Switch JDK
```sh
source ~/.zshrc
jdk
Argument version is required. e.g. 1.8, 11, 16
Example: jdk 1.8 or jdk 11
jdk 1.8
jdk 11
```
### Install Vim plugins
```
[[ ! -d ~/.vim/autoload ]] && mkdir -p ~/.vim/autoload
[[ ! -d ~/.vim/plugged ]] && mkdir -p ~/.vim/plugged
curl -fLo ~/.vim/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
cp ./.vimrc $HOME/
echo 'Launch vi and install plugins'
############################################################
# launch vim & install plugins by typing :PlugInstall
vi
```
### Install KubeCtl
```
brew unlink kubernetes-cli
brew install https://raw.githubusercontent.com/Homebrew/homebrew-core/f25e36259eaa8bcf9b9add2c599aa6d8b15f437b/Formula/kubernetes-cli.rb
```
### Install Hugo
```
## Version: hugo_extended_0.54.0_macOS-64bit
tar -xvzf ~/Downloads/hugo_X.Y_osx-64bit.tgz
cp hugo_X.Y_osx-64bit.tgz
```
### Install AWS Cli 2.x
```
curl "https://awscli.amazonaws.com/AWSCLIV2-2.0.30.pkg" -o "AWSCLIV2.pkg"
sudo installer -pkg AWSCLIV2.pkg -target /
```
### Install Docker Desktop for mac
- Download & Install [Docker for Mac](https://docs.docker.com/desktop/mac/install/)
- Setup cli completion for zsh prezto
```
curl -fLo ~/.zprezto/modules/completion/external/src/_docker \
https://raw.githubusercontent.com/docker/cli/master/contrib/completion/zsh/_docker
```
- Add following line to zshrc to enable completion
```
autoload -Uz compinit; compinit
```
### Use Docker to launch databases
- Launch MySql 6.6 with docker
```
docker run -d --name mysql \
-p 3306:3306 \
-e MYSQL_ROOT_PASSWORD=<PASSWORD> \
mysql:5.6.51
```
- Access the MySql via bash
```
docker exec -it mysql bash
# After the bash into the docker container
mysql -u root -p
```
- Access the MySql via MySql client
```
mysql -h 0.0.0.0 \
-u root -p
```
<file_sep>+++
title = "Golang"
description = "Golang Notes"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
date = "2011-03-09T10:59:31+11:00"
title = "Windows cmd & hotkey - 1"
description="A note for everyone who wants to use Command and Hot Key as hacker "
weight=10
+++
*Do you want to make your friends amazed by your computer skill and praise you as genius? Or the hacker as watched in Sci-Fi movies? You don't need Mac, Linux or other operating systems, just Windows, you can show-off and look like hacker and master of Zeroes and Ones, even you have no any idea of it. Here are some tricks by which you can make your friends' jaw drop.*
## Start Windows command prompt as hacker
* Use use hotkeys to open `Run` feature in two keystrokes: `WinKey + R`
* Type `cmd` and press `Enter`

* Type `color A` to change the color of text to **Green**
* Change the title to **Hacker Tool**
* List the folders of current directory
```
C:\>User\<yourname>\color A
C:\>User\<yourname>\title Hacker Tool
C:\>User\<yourname>\cd \
C:\>tree
```
## Use other command prompt
* [cmder](http://cmder.net/) is an awesome product. I suggest you just choose mini version to download and install if you are not the heavy git user. There are so many built-in features you can play around.
* [console2](https://sourceforge.net/projects/console/) is a very good as well. I used it for many years. I'm planning to migrate to `cmder`, but it will take me some time to do it, because I have some customized scripts need to run in `console2`.
## Useful Windows hotkeys
I believe the common hotkeys you should know. e.g. `Ctrl + C, Ctrl + V, Ctrl + A`. Here the hotkeys I list below are some rarely-used but very useful hotkys.
### General hotkeys
* `Ctrl + Shift + Esc` -- Open task manager
* `WinKey + R` -- run dialog
* `Winkey + D` -- toggle 'show desktop'
* `Winkey + L` -- lock workstation
* `Winkey + E` -- windows explorer
* `Ctrl + Shift + R` -- clear page cache and refresh webpage on browser
* `Alt + (shift +) tab` -- switch windows forwards (or backwards)
* `Alt + F4` -- close the selected application
* `F2` , renames selected file. Also used with spreadsheet cells.
* `Ctrl and (+/-)` -- zoom in or zoom out text on the editor tool
* Middle click a tab -- close tab
### Hotkeys for Windows 7 or higher version
* `WinKey + W` -- search setting iterms
* `WinKey + Q` -- search every iterms
* `WinKey + F` -- search file iterms
* `WinKey + T` -- use keyboard arrow keys to navigate dock
* `Winkey + X` - bring up laptop settings control panel
* `Ctrl + N` - new tab
* `Ctrl + Shift + N` - new Folder
### Common short command lines for `Run` feature
* `cmd` -- start a Windows command prompt
* `calc` -- start the calculator application
* `notepad` -- start the notepad application
### Advanced short command lines for `Run` feature
* `mstsc` -- start the remote desktop application
* `regedit` -- start registry editor application
* `resmon` -- awesome resource monitor - bandwidth etc (win7 or higher)
* `perfmon` -- a pretty decent performance monitor (vista or higher)
* `services.msc` -- windows service management
* `compmgmt.msc` -- computer management including all other management
* `eventvwr`-- windows event log viewer
* `appwiz.cpl` -- windows programs and features management on control panel
## Most common and useful commands
This article won't list all commands and all usages of each command. Here I will just choose the commands and some usages of command which are useful for most people. Some advanced command will be explained in Part-2.
Before you start typing any cmd, I want to share a common mistake for most beginners, including myself. We always forget to use help command before we Google a solution, when we hit some impediment or roadblock. Actually help command is the most common built-in feature within any software or tool. Learn how to use help command or find the help information is first important when we are going to learn anything new.
### help
* Start command prompt and type `help`. You will get a list of command which you can use, and short description of each command.
* Use help command to see and learn other commands
```
C:\>help
C:\>help cd
```
### start
* Start another `cmd` window prompt.
### pwd
* Type `pwd` to display current directory. All commands will use current directory as default path input if the path parameter is not specified.
### dir
* Displays a list of files and subdirectories in a directory.
* Type `dir /a:h/a:d` to display hidden subdirectories only
* Type `dir /p/w` to display many items per screen within wide list format
* Type `dir /o:-s` to display items sorted by size (biggest first)
* Type `dir /o:s` to display items sorted by size (smallest first)
* Type `dir /o:dn` to display items sorted by date/time (oldest first) and name ( alphabetic)
### cd / chdir
* Displays the name of or changes the current directory
* Type `cd` to display the name of directory
* Type `cd c:\windows` to change to `c:\Windows>`
* Type `cd /d D:` to change to d driver if you have d driver
### tree
* Use `tree /f/a <path>` to graphically display the folder structure of a drive or path.
### ren / rename
* Type `ren abc cba` to ren file name from "abc" to "cba" if there is file named abc under current directory.
* Type `*.md *.txt` to ren all files under current directory with `md` extension to `txt` extension
### md / mkdir
* Use `md a\b\c\d & tree a` to create all directories once and display result as follow
```
<current-directory>\a
|___b
|___c
|___d
```
### copy
* Use `touch test.txt & copy test.txt C:\User\<yourname>\` to create a test.txt file and copy the test.txt to C:\User\<yourname>\
### xcopy
* Use `md a\b\c & touch a\test.txt & touch a\b\test.txt & xcopy /s /e /q a C:\User\<yourname>\a\` to folder a to `C:\User\<yourname>\`
* Use `tree /f /a C:\User\<yourname>\a` to verify the result
### move
* Type `move a b` to move folder `a` into folder `b`.
### rd / rmdir
* Type `rd a` to remove a empty directory `a`
* Type `rd /s a` to remove a directory `a` including all files and empty subdirectories within folder `a`.
### del
* __*IMPORTANT*__ : The item deleted by command `del` can not be restored from **Recyle Bin**. Please be careful before you use this command.
* Type `del` to delete files or `del *.txt` to delete all files with `txt` extension
### cls
* Type `cls` to clean the screen
<file_sep>+++
title = "Cassandra Practices"
description="Cassandra Introduction & Good practices ..."
draft = "false"
+++
_Apache Cassandra is a free open-source database system that is NoSQL based. Meaning Cassandra does not use the table model seen in MySQL, MSSQL or PostgreSQL, but instead uses a cluster model. It’s designed to handle large amounts of data and is highly scalable._
## Install Cassandra on Ubuntu
### Install Java(JRE) 8+
```bash
sudo add-apt-repository ppa:webupd8team/java
sudo apt update
sudo apt-get install oracle-java8-set-default
java -version
```
### Install Python 2.7+
```bash
sudo apt install python
python --version
```
## Install Cassandra
* First, we have to add Cassandra repository to source list by running following command. The `39x` is the version. Use 40x if Cassandra 4.0 is the newest version:
```bash
echo "deb http://www.apache.org/dist/cassandra/debian 39x main" | \
sudo tee -a /etc/apt/sources.list.d/cassandra.sources.list
```
* Next, run the cURL command to add the repository keys :
```bash
curl https://www.apache.org/dist/cassandra/KEYS | \
sudo apt-key add -
```
* We can now update the repositories and install Cassandra:
```bash
sudo apt-get update
sudo apt install cassandra
# optional - It works on MacBook
sudo reboot
```
* Check Cassandra status
```bash
nodetool status
```
## Install Cassandra with Docker
### Create node n1
docker run --name n1 -d tobert/cassandra -dc DC1 -rack RAC1
docker ps
### Get IP of node n1
IP=`docker inspect -f '{{ .NetworkSettings.IPAddress }}' n1`
echo $IP
### Check status of n1
docker exec -it n1 nodetool status
# You will see status of Datacenter D1 below
# Datacenter: DC1
# ===============
# Status=Up/Down
# |/ State=Normal/Leaving/Joining/Moving
# -- Address Load Tokens Owns (effective) Host ID Rack
UN 172.17.0.2 51.53 KB 256 100.0% 8965869d-cae8-41a6-bf19-ff69c2605c6c RAC1
### Create node n2 on rack RAC2
docker run --name=n2 -d tobert/cassandra -dc DC1 -rack RAC2 --seeds $IP
docker exec -it n1 nodetool status
# You will see status of Datacenter D1 below
# Datacenter: DC1
# ===============
# Status=Up/Down
# |/ State=Normal/Leaving/Joining/Moving
# -- Address Load Tokens Owns (effective) Host ID Rack
# UN 172.17.0.3 72.01 KB 256 100.0% cfa002b0-350c-41b8-9f86-eb8978a43b26 RAC2
# UN 172.17.0.2 51.53 KB 256 100.0% 8965869d-cae8-41a6-bf19-ff69c2605c6c RAC1
### Check node n2 configuration
docker exec -it n2 cat /data/conf/cassandra.yaml | grep endpoint
docker exec -it n2 cat /data/conf/cassandra-rackdc.properties | grep -e "dc" -e "rack"
### Create node n3 on Datacenter D2 and rack RAC1
docker run --name=n3 -d tobert/cassandra -dc DC2 -rack RAC1 --seeds $IP
docker exec -it n1 nodetool status
# You will see status below. It may take a few seconds to get everything up and run.
# Datacenter: DC1
# ===============
# Status=Up/Down
# |/ State=Normal/Leaving/Joining/Moving
# -- Address Load Tokens Owns (effective) Host ID Rack
# UN 172.17.0.3 134.03 KB 256 66.1% cfa002b0-350c-41b8-9f86-eb8978a43b26 RAC2
# UN 172.17.0.2 102.84 KB 256 64.5% 8965869d-cae8-41a6-bf19-ff69c2605c6c RAC1
# Datacenter: DC2
# ===============
# Status=Up/Down
# |/ State=Normal/Leaving/Joining/Moving
# -- Address Load Tokens Owns (effective) Host ID Rack
UN 172.17.0.4 14.38 KB 256 69.4% 0fad8335-763d-42fa-9934-3ed10c44eaa8 RAC1
### Setup replication strategy
docker
create keyspace csdb with replication = {'class':'NetworkTopologyStrategy','DC1':2,'DC2':1}
### Check csdb status
docker exec -it n1 nodetool describering csdb
# Run nodetool status and note that the one node in DC2 owns all the data
docker exec -it n1 nodetool status csdb
# Stop and remove all four docker containers:
docker stop n1 n2 n3 n4; docker rm n1 n2 n3 n4
## Create single Datacenter with 3 nodes
### Create 3 nodes with local mounted directory
docker run --name=n1 -v $PWD/ws/ps/cassdev/scripts:/scripts -d tobert/cassandra
docker exec -it n1 nodetool status
IP=`docker inspect -f '{{ .NetworkSettings.IPAddress }}' n1`
echo $IP
docker run --name=n2 -d tobert/cassandra --seeds $IP
docker run --name=n3 -d tobert/cassandra --seeds $IP
docker exec -it n1 /bin/bash
cd scripts
### List scripts
docker exec -it n1 /bin/bash
cd /scrpts && ls
### Create keyspace with simple replication
docker exec -it n1 cqlsh
create keyspace csdb with replication =
{'class':'SimpleStrategy', 'replication_factor':1};
### Create table and insert data with script
docker exec -it n1 cqlsh
> desc keyspaces;
> desc tables;
> use ussdb;
> source '/scripts/courses.cql'
> source '/scripts/users.cql'
### CQL
docker exec -it n1 cqlsh
> use ussdb;
> desc tables;
> select * from users;
> select * from courses;
### Troubleshoot
* Connection error
Connection error: Could not connect to localhost:9160
* Update the configuration - cassandra.yaml
* Change the following IP address
- seeds: "xxx.xxx.xxx.xxx"
listen_address: xxx.xxx.xxx.xxx
broadcast_rpc_address: xxx.xxx.xxx.xxx
* Restart the cassandra
sudo systemctl restart cassandra
<file_sep>+++
title = "AWS Note - 5"
description = "Load Balanceing "
draft="true"
+++
## Load balancer
Elastic Load Balancing is regional service. It automatically distributes incoming application traffic across multiple targets, such as Amazon EC2 instances, containers, IP addresses, and Lambda functions. It can handle the varying load of your application traffic in a single Availability Zone or across multiple Availability Zones.
### Application Load Balancer
#### Layer 7 load balancing (HTTP/HTTPS)
#### Server Name Indication (SNI)
#### IP addresses as targets
#### Lambda functions
### Network Load Balancer
<file_sep>+++
title="Goroutine & Channel - 1"
description="Goroutine & Channel - Part 1"
weight=6
+++
### Goroutine
* A goroutine is implemented as a function or method (this can also be an anonymous or lambda function) and called (invoked) with the keyword go. This starts the function running in parallel with the current computation but in the same address space and with its own stack.
* Go’s concurrency primitives provide the basis for a good concurrency program design: expressing program structure so as to represent independently executing actions; so Go’s emphasis is not in the 1 st place on parallelism: concurrent programs may or may not be parallel. Parallelism is the ability to make things run quickly by using multiple processors. But it turns out most often that a well designed concurrent program also has excellent performing parallel capabilities.
#### Basic Goroutine
* Sample 1: The code below will not print out anything, because the main exits before the "hello" goroutine kicks off. Actually the main is another goroutine which always runs first.
```go
func main() {
go func() {
println("Hello")
}()
}
```
* Sample 2: Add timer to set main goroutine sleep a while, and let the "hello"
goroutine to start.
```go
func main() {
go func() {
println("Hello") // Hello
}()
time.Sleep(3000)
}
```
* Sample 3: The code below will print "hello" 10 times and then print "world" 10 times.
```go
func main() {
go func() {
for i := 0; i < 10; i++ {
println("Hello")
time.Sleep(2000)
}
}()
go func() {
for i := 0; i < 10; i++ {
println("World")
}
}()
time.Sleep(9999)
}
```
* Sample 4: The code below will print "hello" and "world" in different order.
```go
func main() {
runtime.GOMAXPROCS(4)
go func() {
for i := 0; i < 10; i++ {
println("Hello")
time.Sleep(500)
}
}()
go func() {
for i := 0; i < 10; i++ {
println("World")
time.Sleep(500)
}
}()
time.Sleep(999999)
}
```
* Sample 5: File watcher
```go
const watchedPath = "./source"
func main() {
for {
d, _ := os.Open(watchedPath)
files, _ := d.Readdir(-1)
for _, fi := range files {
filePath := watchedPath + "/" + fi.Name()
f, _ := os.Open(filePath)
data, _ := ioutil.ReadAll(f)
f.Close()
os.Remove(filePath)
go func(data string) {
reader := csv.NewReader(strings.NewReader(data))
records, _ := reader.ReadAll()
for _, r := range records {
invoice := new(Invoice)
invoice.Number = r[0]
invoice.Amount, _ = strconv.ParseFloat(r[1], 64)
invoice.PurchaseOrderNumber, _ = strconv.Atoi(r[2])
unixTime, _ := strconv.ParseInt(r[3], 10, 64)
invoice.InvoiceDate = time.Unix(unixTime, 0)
fmt.Printf("Received Invoice '%v' for $%.2f and submitted for processing\n", invoice.Number, invoice.Amount)
}
}(string(data))
}
d.Close()
time.Sleep(100 * time.Millisecond)
}
}
type Invoice struct {
Number string
Amount float64
PurchaseOrderNumber int
InvoiceDate time.Time
}
```
#### Using GOMAXPROCS
* When GOMAXPROCS is greater than 1, they run on a thread pool with that many threads.With the gccgo compiler GOMAXPROCS is effectively equal to the number of running goroutines.
* Observations from experiments: on a 1 CPU laptop performance improved when GOMAXPROCS was increased to 9. On a 32 core machine, the best performance was reached with GOMAXPROCS=8, a higher number didn’t increase performance in that benchmark.
### Channels
#### Channels for communication between goroutines
* Go has a special type, the channel, which is a like a conduit (pipe) through which you can send typed values and which takes care of communication between goroutines, avoiding all the pitfalls of shared memory; the very act of communication through a channel guarantees synchronization.
* Data are passed around on channels: only one goroutine has access to a data item at any given time: so data races cannot occur, by design. The ownership of the data (that is the ability to read and write it) is passed around.
* A channel is in fact a typed message queue: data can be transmitted through it. It is a First In First Out (FIFO) structure and so they preserve the order of the items that are sent into them (for those who are familiar with it, a channel can be compared to a two-way pipe in Unix shells).
* A channel is also a reference type, so we have to use the make() function to allocate memory for it.
**NOTE: Don’t use print statements to indicate the order of sending to and receiving from a channel: this could be out of order with what actually happens due to the time lag between the print statement and the actual channel sending and receiving.**
#### Blocking of channels
* A send operation on a channel (and the goroutine or function that contains it) blocks until a receiver is available for the same channel
* A receive operation for a channel blocks (and the goroutine or function that contains it) until a sender is available for the same channel
##### Goroutine sync through one or more channels
* Blocking - deadlock
* Sample 1 : deadlock because of lack of sender
```go
func main() {
ch := make(chan string)
fmt.Println(<-ch)
}
//fatal error: all goroutines are asleep - deadlock!
```
* Sample 2: deadlock because of lack of receiver
```go
func main() {
ch := make(chan string, 1)
ch <- "Hello"
}
```
* Sample 3
```go
func main() {
ch := make(chan string, 1)
ch <- "Hello"
fmt.Println(<-ch) // Hello
}
```
##### Async channels - channel with buffer
* Channel with buffer
```go
buf := 100
ch1 := make(chan string, buf)
```
* Sample 4: deadlock again because the 2nd sender can not send message
```go
func main() {
ch := make(chan string, 1)
ch <- "Hello"
ch <- "Hello"
fmt.Println(<-ch)
fmt.Println(<-ch)
}
```
* Sample 5
```go
func main() {
ch := make(chan string, 2)
ch <- "Hello"
ch <- "Hello"
fmt.Println(<-ch) //Hello
fmt.Println(<-ch) // Hello
}
```
##### Closing channel
* Sample 6: Closing channel will not impact the receiver to get the message
```go
func main() {
ch := make(chan string, 1)
ch <- "Hello"
ch <- "Hello"
fmt.Println(<-ch)
fmt.Println(<-ch)
}
```
* Sample 7: Sender cannot send message to closing channel
```go
func main() {
ch := make(chan string, 2)
ch <- "Hello"
ch <- "Hello"
fmt.Println(<-ch) //Hello
fmt.Println(<-ch) // Hello
ch <- "Hello" // panic: send on closed channel
}
```
#### Semaphore pattern
* The goroutine compute signals its completion by putting a value on the channel ch, the main routine waits on <-ch until this value gets through.
```go
func compute(ch chan int) {
ch <- someComputation()
// when it completes, signal on the channel.
}
func main() {
ch := make(chan int) // allocate a channel.
go compute(ch) // start something in a goroutine
doSomethingElseForAWhile()
result := <-ch
}
```
##### Implement a semaphore with a buffered channel
* There is no semaphore implementation in Go’s sync package, but they can be emulated easily using a buffered channel:
* the buffered channel is the number of resources we wish to synchronize
* the length (number of elements currently stored) of the channel is the number of resources currently being used.
* the capacity minus the length of the channel is the number of free resources (the integer value of traditional semaphores)
* Sample 8: semaphore pattern
```go
type Empty interface {}
var empty Empty
// do sth ...
data := make([]float64, N)
res := make([]float64, N)
sem := make(chan Empty, N) // semaphore
// do sth ...
for i, xi := range data {
go func (i int, xi float64) {
res[i] = doSomething(i,xi)
sem <- empty
} (i, xi)
}
// wait for goroutines to finish
for i := 0; i < N; i++ { <-sem }
```
* Semaphore operations sample pattern
```go
// acquire n resources
func (s semaphore) P(n int) {
e := new(Empty)
for i := 0; i < n; i++ {
s <- e
}
}
// release n resources
func (s semaphore) V(n int) {
for i := 0; i < n; i++ {
<-s
}
}
```
* Semaphore for a mutex:
```go
/* mutexes */
func (s semaphore) Lock() {
s.P(1)
}
func (s semaphore) Unlock() {
s.V(1)
}
/* signal-wait */
func (s semaphore) Wait(n int) {
s.P(n)
}
func (s semaphore) Signal() {
s.V(1)
}
```
#### Channel Factory pattern
* Another pattern common in this style of programming goes as follows: instead of passing a channel as a parameter to a goroutine, let the function make the channel and return it (so it plays the role of a factory); inside the function a lambda function is called as a goroutine.
```go
func main() {
stream := pump()
go suck(stream)
// the above 2 lines can be shortened to: go suck( pump() )
time.Sleep(1e9)
}
func pump() chan int {
ch := make(chan int)
go func() {
for i := 0; ; i++ {
ch <- i
}
}()
return ch
}
func suck(ch chan int) {
for {
fmt.Println(<-ch)
}
}
```
##### For—range applied to channels
* The range clause on for loops accepts a channel ch as an operand, in which case the for loops over the values received from the channel.
* Obviously another goroutine must be writing to ch (otherwise the execution blocks in the for-loop) and must close ch when it is done writing.
```go
func main() {
suck(pump())
time.Sleep(1e9)
}
func pump() chan int {
ch := make(chan int)
go func() {
for i := 0; ; i++ {
ch <- i
}
}()
return ch
}
func suck(ch chan int) {
go func() {
for v := range ch {
fmt.Println(v)
}
}()
}
```
*
##### Iterator pattern
* Another common case where we have to populate a channel with the items of a container type which contains an index-addressable field items . For this we can define a method which returns a read-only channel.
* Inside the goroutine, a for-loop iterates over the elements in the container c (for tree or graph algorithms, this simple for-loop could be replaced with a depth-first search)
```go
func (c *container) Iter() <-chan items {
ch := make(chan item)
go func() {
for i := 0; i < c.Len(); i++ {
// or use a for-range loop
ch <- c.items[i]
}
}()
return ch
}
// The code which calls this method can then iterate over the container
for x := range container.Iter() { ... }
```
#### Producer Consumer pattern
* A Produce() function which delivers the values needed by a Consume function. Both could be run as a separate goroutine, Produce putting the values on a channel which is read by Consume.
```go
package main
/* producer-consumer problem in Go */
import ("fmt")
var done = make(chan bool)
var msgs = make(chan int)
func produce () {
for i := 0; i < 10; i++ {
msgs <- i
}
done <- true
}
func consume () {
for {
msg := <-msgs
fmt.Println(msg)
}
}
func main () {
go produce()
go consume()
<- done
}
```
<file_sep>+++
title = "Thread & Task"
description = "Introduction of Thread & Task "
+++
## C# Thread & Task
> From Net 4.0, .Net applicatoin introduced Parallel Framework Extensions (PFx), along the way it delivered an entirely new model for async processing in
.NET. In .NET 4.0 the thread pool queue was redesigned with the new requirements of PFx in mind. Instead of using a simple linked list, the queue was built with arrays of work items with the arrays connected into a linked list.
> With the release of .NET 4.0, Microsoft introduced yet another API for building asynchronous applications: the Task Parallel Library (TPL).
### Old way of C# thread
* Early days of .NET applications you will see many following sample codes.
## Lock keyword
```cs
lock(stateGuard)
{
cash += amount;
receivables -= amount;
}
```
## Abort
`Thread.Abort` should be avoid at all costs. Its behavior is much safer and predictable since .NET 2.0, but there are still some pretty serious pitfalls with it. The other ways to stop thread.
* Use `Thread.Interrupt`
* Use a stopping flag
* Use `WaitHandle` events
### New way of C# Async & Thread
## Monitor
* Monitor is no different from lock but the monitor class provides more control over the synchronization of various threads trying to access the same lock of code.
* Lock and Monitor sample
```cs
class Program
{
static readonly object _object = new object();
public static void PrintNumbers()
{
Monitor.Enter(_object);
try
{
for (int i = 0; i < 5; i++)
{
Thread.Sleep(100);
Console.Write(i + ",");
}
Console.WriteLine();
}
finally
{
Monitor.Exit(_object);
}
}
static void TestLock()
{
lock (_object)
{
Thread.Sleep(100);
Console.WriteLine(Environment.TickCount);
}
}
static void Main(string[] args)
{
Thread[] Threads = new Thread[3];
for (int i = 0; i < 3; i++)
{
Threads[i] = new Thread(new ThreadStart(PrintNumbers));
Threads[i].Name = "Child " + i;
}
foreach (Thread t in Threads)
t.Start();
Console.ReadLine();
}
}
```
* Once you have a lock on a code region, you can use the Monitor.Wait, Monitor.Pulse, and Monitor.PulseAll methods.
* Lock and monitor are basically used for the same purpose in multithreading, the difference is that only when we want more control over synchronization with multiple threads running for a specific section of code.
### Monitor Semaphore
```cs
public class MonitorSemaphore
{
private int currentCount;
private readonly int maxCount;
private readonly object guard = new object();
public MonitorSemaphore(int initialCount, int maxCount)
{
this.currentCount = initialCount;
this.maxCount = maxCount;
}
public void Enter()
{
lock (guard)
{
while (currentCount == maxCount)
{
Monitor.Wait(guard);
}
currentCount++;
}
}
public void Exit()
{
lock (guard)
{
currentCount--;
Monitor.Pulse(guard);
}
}
public int CurrentCount{get { return currentCount; }}
}
```
<file_sep>+++
date = "2020-02-07T16:56:21+11:00"
title = "Angular 9 CRM Starter Project"
description="Ng Crm is reusable CRM project for real-world business based on Angular 9"
+++
### Key Takeaways
* Angular 9 is here now.
* Angular Material 9 is ready as well.
* Reusable starter project
### Summary
**Ng-Crm** > A reusable CRM starter project for real-world business based on Angular 9, Angular-Material 9.x.
This project was generated with [Angular CLI](https://github.com/angular/angular-cli) version 9.x. The goal of this project is to create reusable project for real-world business. To achieve this target, we need a solution which includes simple authentication process, restful API feature with token support and simple but elegant UI design.
#### Features
* This project is built on the top of AngularClass/Angular-Starter.
* The UI part of this project is comprehensively built on Angular Material.
* This project includes ng-charts.
* ~~To simulate real-world business, this starter project chooses Json-Server as fake Restful API. (You can simple replace it with your own API).~~
* ~~Fake API is just readonly fake service.~~
#### Live Demo
[Demo App](https://angular-app-demo.harryho.org): The demo is just a proof of concept. It doesn't have back-end API and all features of master branch.
### Build & Setup
```bash
# Clone project
git clone https://github.com/harryho/ng-crm.git
# Install Angular CLI
npm install -g @angular/cli
# prepare Json-Server as fake Restful API
cd ng-crm
# Install the packages with npm
npm install
# Start the app with npm
npm start
# Or use ng
ng serve
# Test with npm
npm run test
# Or use ng
ng test
# build for production
npm run build --prod=true
# run as production
install -g serve
serve dist
```
### Docker
```
## Run / Test release without building new image
npm run build
# Launch nginx image to test latest release
docker pull nginx:alpine
docker run -p 8080:80 -v \
<your_aboslute_path>/dist:/usr/share/nginx/html nginx:alpine
# Build release image
docker build . -t nc-prd:2.0
# Launch the development image in the backgroud
docker run -d --publish 8080:80 --name nc2 nc-prd:2.0
# Check the log
docker logs nc2 -f
```
### Welcome to fork or clone!
For detailed explanation on how things work, checkout following links please.
* [angular](https://angular.io/)
* [angular-material](https://material.angular.io/)
* [ng-charts](https://github.com/valor-software/ng2-charts)
* [rxjs](https://rxjs.dev/api)
### Structure of ng-crm
``` ini
├── angular.json <-// configuration of dev or prod
├── config
│ └── nginx.conf <-// nginx config for deployment
├── src
│ ├── app
| | +---_gurad <-// auth guard for authentication
| | +---_models <-// common models for whole app
| | +---_services <-// common services for whole app
| | +---about <-// about component
| | +---customer <-// customer component
| | +---dashboard <-// dashboard component
| | +---notfoundpage <-// notfoundpage component
| | +---login <-// login component
| | +---order <-// customer component
| | +---root <-// root component
| | +---shared <-// common component for whole app
│ │ ├── app-routing.module.ts
│ │ ├── app.component.html
│ │ ├── app.component.scss
│ │ ├── app.component.spec.ts
│ │ ├── app.component.ts
│ │ ├── app.module.ts
│ ├── assets <-// images and css from third parties
│ │ └── img
|── Dockerfile <-// project comes with docker
├── tsconfig.app.json
├── tsconfig.json
├── tsconfig.spec.json
└── tslint.json
|__ ....
```
### Screenshots
> 
> 
> 
> 
### Browse [Repository](https://github.com/harryho/ng-crm.git)
### __Alternatives__
There are two similar projects respectively built on the Vue.js and React. If you have interests in those technical stacks. You can find and clone those projects below.
* [Vue2 Crm](/projects/vue2-crm).
* [React Redux Crm](/projects/react-crm).
<file_sep>+++
title = "F# Lazy"
description = "F# Computation - Lazy expressions"
weight = 14
+++
## Lazy Expressions
Lazy expressions are expressions that are not evaluated immediately, but are instead evaluated when the result is needed. This can help to improve the performance of your code.
`let identifier = lazy ( expression )`
Lazy expressions enable you to improve performance by restricting the execution of an expression to only those situations in which a result is needed.
To force the expressions to be performed, you call the method Force. Force causes the execution to be performed only one time. Subsequent calls to Force return the same result, but do not execute any code.
```fsharp
let x = 10
let result = lazy (x + 10)
printfn "%d" (result.Force())
```
Lazy evaluation, but not the Lazy type, is also used for sequences.<file_sep>+++
date = "2017-08-03T16:56:21+11:00"
title = "FlatApi - Restful API for python dev"
description = "FlatApi is a zero coding and zero configuration restful API server inspired by Json-Server and Eve"
+++
## Summary
FlatApi is a **zero coding** and **zero configuration** restful API server inspired by Json-Server_ and Eve_. It is designed to be used as fake restful api for development, especially for people want to use Python stack. Setup process is **less than 10 seconds**.
## FlatApi
- **Zero coding and configuration to setup Restful API** FlatApi is designed to use without coding and configuration by default. You just need one config to setup all endpoints you need, then you can use it immediately.
- **Flask based web server** FlatApi is built on the top of _Flask
- **Json flat file database** FlatApi uses FlatApi_ to manage the Json flat file database. FlatApi is a document oriented database.
- **Caching memory storage availble** FlatApi supports caching momery storage after version 4.0.0.
### Install Package
```bash
$ pip uninstall flatapi
$ pip install --no-cache-dir flatapi
```
### Quick Start
- Launch FlatApi without configuration
```bash
## Start the FlatApi - Sample 1
$ python3 /<path_to_package>/flatapi -S MEMORY -G NO
## Start the FlatApi - Sample 2
$ python3 /<path_to_package>/flatapi --storage MEMORY -cfgfile NO
## Start the FlatApi with prefix - Sample 3
$ python3 /<path_to_package>/flatapi --storage memory -cfgfile no -X api
\(^_^)/ Hi
Loading is done.
There is no config file found. Flat Api uses internal configuration.
Resource :
/<string:doc> -- The doc is the collection name
you want to post or put the object.
/<string:doc>/<int:id> --The id is the unique id for query or delete.
Database: Memory
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
```
- Test api via postman
It would be a much more handy and easy way to play around with the API immediately.
```bash
GET /posts --> Get all posts
POST /posts --> Add new post
PUT /posts/1 --> Update existing post which id is 1
DELETE /posts/1 --> Delete a post which id is 1
DELETE /posts --> Delete all posts
```
- Test api via curl
```bash
## Add a new post
$ curl -d "{\"text\":\"post 1\",\"author\":\"harry\"}" -H "Content-Type: application/json" -X POST http://localhost:5000/posts
{"author": "harry", "text": "post 1", "id": 1}
## Get post by Id
$ curl -X GET http://localhost:5000/posts/1
{"author": "harry", "text": "post 1", "id": 1}
## Get all posts
$ curl -X GET http://localhost:5000/posts
[{"author": "harry", "text": "post 1", "id": 1}]
## Update the post
$ curl -d "{\"text\":\"post updated\",\"author\":\"harry\"}" -H "Content-Type: application/json" -X PUT http://localhost:5000/posts/1
[{"author": "harry", "text": "post updated", "id": 1}]
## Delete
$ curl -X DELETE http://localhost:5000/posts
```
### Browse [Repository](https://github.com/harryho/flat-api.git)<file_sep>+++
title = "AWS: VPC - 5"
description = "VPC Peering, Direct Connect, Transit Gateway"
weight=6
+++
## VPC Part 5
### Direct Connect
AWS Direct Connect is a cloud service solution that makes it easy to establish a dedicated network connection from your premises to AWS. Using AWS Direct Connect, you can establish private connectivity between AWS and your datacenter, office, or colocation environment, which in many cases can reduce your network costs, increase bandwidth throughput, and provide a more consistent network experience than Internet-based connections.
Using industry standard 802.1q VLANs, this dedicated connection can be partitioned into multiple virtual interfaces. This allows you to use the same connection to access public resources such as objects stored in Amazon S3 using public IP address space, and private resources such as Amazon EC2 instances running within an Amazon Virtual Private Cloud (VPC) using private IP space, while maintaining network separation between the public and private environments. Virtual interfaces can be reconfigured at any time to meet your changing needs.
### Transit Gateway
AWS Transit Gateway is a service that enables customers to connect their Amazon Virtual Private Clouds (VPCs) and their on-premises networks to a single gateway.
With AWS Transit Gateway, you only have to create and manage a single connection from the central gateway in to each Amazon VPC, on-premises data center, or remote office across your network. Transit Gateway acts as a hub that controls how traffic is routed among all the connected networks which act like spokes. This hub and spoke model significantly simplifies management and reduces operational costs because each network only has to connect to the Transit Gateway and not to every other network. Any new VPC is simply connected to the Transit Gateway and is then automatically available to every other network that is connected to the Transit Gateway. This ease of connectivity makes it easy to scale your network as you grow.
<file_sep>+++
title = "Generic Type & Trait "
description="Rustlang Introduction: Generic Type, Trait and Lifetime "
weight = 4
+++
### Generic Type
* Generics are abstract stand-ins for concrete types or other properties. When we’re writing code, we can express the behavior of generics or how they relate to other generics without knowing what will be in their place when compiling and running the code.
#### Removing Duplication by Extracting a Function
* steps we took to change the duplication code :
* Identify duplicate code.
* Extract the duplicate code into the body of the function and specify the inputs and return values of that code in the function signature.
* Update the two instances of duplicated code to call the function instead.
* Defining a function makes our code more flexible and provides more functionality to callers of our function while preventing code duplication.
#### Definition with generic type
* Define structs to use a generic type parameter in one or more fields using the <> syntax.
* A sample below which won't compile
```rs
struct Point<T> {
x: T,
y: T,
}
fn main() {
let wont_work = Point { x: 5, y: 4.0 };
// ^^^ expected integral variable, found
// floating-point variable
// note: expected type `{integer}`
}
```
* Above sample can be refactored as below to make it work
```rs
struct Point<Y,U>{
x: T,
y: U,
}
```
* define enums to hold generic data types in their variants.
```rs
enum Option<T> {
Some(T),
None,
}
enum Result<T, E> {
Ok(T),
Err(E),
}
```
* implement methods on structs and enums, and use generic types in their definitions
```rs
struct Point<T> {
x: T,
y: T,
}
impl<T> Point<T> {
fn x(&self) -> &T {
&self.x
}
}
fn main() {
let p = Point { x: 5, y: 10 };
println!("p.x = {}", p.x());
}
```
* implement methods with concrete type f32, meaning we don’t declare any types after impl.
```rs
impl Point<f32> {
fn distance_from_origin(&self) -> f32 {
(self.x.powi(2) + self.y.powi(2)).sqrt()
}
}
```
* Generic type parameters in a struct definition aren’t always the same as those you use in that struct’s method signatures.
* Sample below the method mixup on the Point<T, U> struct from Listing 10-8. The method takes another Point as a parameter, which might have different types from the self Point we’re calling mixup on. The method creates a new Point instance with the x value from the self Point (of type T) and the y value from the passed-in Point (of type W).
```rs
struct Point<T, U> {
x: T,
y: U,
}
impl<T, U> Point<T, U> {
fn mixup<V, W>(self, other: Point<V, W>) -> Point<T, W> {
Point {
x: self.x,
y: other.y,
}
}
}
fn main() {
let p1 = Point { x: 5, y: 10.4 };
let p2 = Point { x: "Hello", y: 'c'};
let p3 = p1.mixup(p2);
println!("p3.x = {}, p3.y = {}", p3.x, p3.y); // p3.x = 5, p3.y = c
}
```
* The purpose of above example is to demonstrate a situation in which some generic parameters are declared with impl and some are declared with the method definition. Here, the generic parameters T and U are declared after impl, because they go with the struct definition. The generic parameters V and W are declared after fn mixup, because they’re only relevant to the method.
#### Performance of Code Using Generics
* The good news is that Rust implements generics in such a way that your code doesn’t run any slower using generic types than it would with concrete types.
* Rust accomplishes this by performing monomorphization of the code that is using generics at compile time. Monomorphization is the process of turning generic code into specific code by filling in the concrete types that are used when compiled.
* Sample of Rust compile the generic type
```rs
let integer = Some(5);
let float = Some(5.0);
// Above is generic type
// -------------------
// Rust will create specific definition as following
enum Option_i32 {
Some(i32),
None,
}
enum Option_f64 {
Some(f64),
None,
}
fn main() {
let integer = Option_i32::Some(5);
let float = Option_f64::Some(5.0);
}
```
* Rust compiles this code, it performs monomorphization. During that process, the compiler reads the values that have been used in Option<T> instances and identifies two kinds of Option<T>: one is i32 and the other is f64. As such, it expands the generic definition of Option<T> into Option_i32 and Option_f64, thereby replacing the generic definition with the specific ones.
* Because Rust compiles generic code into code that specifies the type in each instance, there is no runtime cost for using generics. When the code runs, it performs just as it would if we had duplicated each definition by hand. The process of monomorphization makes Rust’s generics extremely efficient at runtime.
### Traits: Defining Shared Behavior
* A trait tells the Rust compiler about functionality a particular type has and can share with other types. We can use trait bounds to specify that a generic can be any type that has certain behavior.
#### Defining a trait
* A type’s behavior consists of the methods we can call on that type. Different types share the same behavior if we can call the same methods on all of those types. Trait definitions are a way to group method signatures together to define a set of behaviors necessary to accomplish some purpose.
* Sample of trait
```rs
pub trait Summary {
fn summarize(&self) -> String;
}
```
#### Implementing a trait
* Implementing a trait on a type is similar to implementing regular methods. The difference is that after impl, we put the trait name that we want to implement, then use the for keyword, and then specify the name of the type we want to implement the trait for.
```rs
pub struct NewsArticle {
pub headline: String,
pub location: String,
pub author: String,
pub content: String,
}
impl Summary for NewsArticle {
fn summarize(&self) -> String {
format!("{}, by {} ({})", self.headline, self.author, self.location)
}
}
pub struct Tweet {
pub username: String,
pub content: String,
pub reply: bool,
pub retweet: bool,
}
impl Summary for Tweet {
fn summarize(&self) -> String {
format!("{}: {}", self.username, self.content)
}
}
```
* Test the implementation
```rs
let tweet = Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably already know, people"),
reply: false,
retweet: false,
};
println!("1 new tweet: {}", tweet.summarize());
```
#### Default implementation
* Sample
```rs
pub trait Summary {
fn summarize(&self) -> String {
String::from("(Read more...)")
}
}
```
* Default implementations can call other methods in the same trait, even if those other methods don’t have a default implementation. In this way, a trait can provide a lot of useful functionality and only require implementors to specify a small part of it.
```rs
pub trait Summary {
fn summarize_author(&self) -> String;
fn summarize(&self) -> String {
format!("(Read more from {}...)", self.summarize_author())
}
}
impl Summary for Tweet {
fn summarize_author(&self) -> String {
format!("@{}", self.username)
}
}
let tweet = Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably already know, people"),
reply: false,
retweet: false,
};
println!("1 new tweet: {}", tweet.summarize());
```
#### Traits as Parameters
* use traits to define functions that accept many different types.
* Sample: Instead of a concrete type for the item parameter, we specify the impl keyword and the trait name.
```rs
pub fn notify(item: impl Summary) {
println!("Breaking news! {}", item.summarize());
}
```
* Anotehr sample
```rs
pub fn notify<T: Summary>(item1: T, item2: T) {}
```
#### Clearer Trait Bounds with where Clauses
* Using too many trait bounds has its downsides. Each generic has its own trait bounds, so functions with multiple generic type parameters can contain lots of trait bound information between the function’s name and its parameter list, making the function signature hard to read.
* Sample with where
```rs
fn some_function<T, U>(t: T, u: U) -> i32
where T: Display + Clone,
U: Clone + Debug {}
```
#### Returning Types that Implement Traits
* use the impl Trait syntax in the return position to return a value of some type that implements a trait
* However, you can only use impl Trait if you’re returning a single type
```rs
fn returns_summarizable() -> impl Summary {
Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably already know, people"),
reply: false,
retweet: false,
}
}
```
#### Fixing the largest Function with Trait Bounds
* Without the PartialOrd trait, the largest will throw error "an implementation of `std::cmp::PartialOrd` might be missing for `T`"
* Without the Copy trait, the largest function will throw compilation error as well
```rs
fn largest<T: PartialOrd + Copy>(list: &[T]) -> T {
let mut largest = list[0]; // Without copy -- error: "cannot move out of here"
for &item in list.iter() {
if item > largest { // // Without copy -- error: "cannot move out of borrowed content"
largest = item;
}
}
largest
}
fn main() {
let number_list = vec![34, 50, 25, 100, 65];
let result = largest(&number_list);
println!("The largest number is {}", result);
let char_list = vec!['y', 'm', 'a', 'q'];
let result = largest(&char_list);
println!("The largest char is {}", result);
}
```
#### Using Trait Bounds to Conditionally Implement Methods
* By using a trait bound with an impl block that uses generic type parameters, we can implement methods conditionally for types that implement the specified traits.
```rs
#![allow(unused_variables)]
fn main() {
use std::fmt::Display;
struct Pair<T> {
x: T,
y: T,
}
impl<T> Pair<T> {
fn new(x: T, y: T) -> Self {
Self {
x,
y,
}
}
}
impl<T: Display + PartialOrd> Pair<T> {
fn cmp_display(&self) {
if self.x >= self.y {
println!("The largest member is x = {}", self.x);
} else {
println!("The largest member is y = {}", self.y);
}
}
}
```
### Lifetimes
#### Preventing Dangling References with Lifetimes
* Rust requires us to annotate the relationships using generic lifetime parameters to ensure the actual references used at runtime will definitely be valid.
* The main aim of lifetimes is to prevent dangling references, which cause a program to reference data other than the data it’s intended to reference.
```rs
{
let r; // ---------+-- 'a
// |
{ // |
let x = 5; // -+-- 'b |
r = &x; // | |
} // -+ |
// |
println!("r: {}", r); // |
} // ---------+
```
#### Generic Lifetimes in Functions
*
#### Lifetime Annotation Syntax
```rs
&i32 // a reference
&'a i32 // a reference with an explicit lifetime
&'a mut i32 // a mutable reference with an explicit lifetime
```
#### Lifetime Annotations in Function Signatures
```rs
#![allow(unused_variables)]
fn main() {
let string1 = String::from("abcd");
let string2 = "xyz";
let result = longest(string1.as_str(), string2);
println!("The longest string is {}", result);
}
fn longest<'a>(x: &'a str, y: &'a str) -> &'a str {
if x.len() > y.len() {
x
} else {
y
}
}
```
#### Lifetime Elision
```rs
fn first_word(s: &str) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s[..]
}
```
* The patterns programmed into Rust’s analysis of references are called the lifetime elision rules. These aren’t rules for programmers to follow; they’re a set of particular cases that the compiler will consider, and if your code fits these cases, you don’t need to write the lifetimes explicitly.
* The elision rules don’t provide full inference. If Rust deterministically applies the rules but there is still ambiguity as to what lifetimes the references have, the compiler won’t guess what the lifetime of the remaining references should be.
* Lifetimes on function or method parameters are called input lifetimes, and lifetimes on return values are called output lifetimes.
#### Lifetime Elision Rules
* The compiler uses three rules to figure out what lifetimes references have when there aren’t explicit annotations. The first rule applies to input lifetimes, and the second and third rules apply to output lifetimes. If the compiler gets to the end of the three rules and there are still references for which it can’t figure out lifetimes, the compiler will stop with an error. These rules apply to fn definitions as well as impl blocks.
* The second rule is if there is exactly one input lifetime parameter, that lifetime is assigned to all output lifetime parameters
* The third rule is if there are multiple input lifetime parameters, but one of them is &self or &mut self because this is a method, the lifetime of self is assigned to all output lifetime parameters.
```rs
#![allow(unused_variables)]
fn main() {
struct ImportantExcerpt<'a> {
part: &'a str,
}
impl<'a> ImportantExcerpt<'a> {
fn announce_and_return_part(&self, announcement: &str) -> &str {
println!("Attention please: {}", announcement);
self.part
}
}
}
```
#### Lifetime Annotations in Method Definitions
* Lifetime names for struct fields always need to be declared after the impl keyword and then used after the struct’s name, because those lifetimes are part of the struct’s type.
* In method signatures inside the impl block, references might be tied to the lifetime of references in the struct’s fields, or they might be independent. In addition, the lifetime elision rules often make it so that lifetime annotations aren’t necessary in method signatures.
```rs
impl<'a> ImportantExcerpt<'a> {
fn level(&self) -> i32 {
3
}
}
```
#### The Static Lifetime
```rs
let s: &'static str = "I have a static lifetime.";
```
#### Generic Type, Trait Bounds & Lifetimes
```rs
use std::fmt::Display;
fn longest_with_an_announcement<'a, T>(x: &'a str, y: &'a str, ann: T) -> &'a str
where T: Display
{
println!("Announcement! {}", ann);
if x.len() > y.len() {
x
} else {
y
}
}
```
#### Summary
Generic type parameters let you apply the code to different types. Traits and trait bounds ensure that even though the types are generic, they’ll have the behavior the code needs. You learned how to use lifetime annotations to ensure that this flexible code won’t have any dangling references. And all of this analysis happens at compile time, which doesn’t affect runtime performance!
<file_sep>+++
date = "2016-04-10T14:59:31+11:00"
title = "Azure Practices"
description = "Azure Practices"
+++
> *Here we are going to explore how to deploy web applications to Azure. From sep 2015, Microsoft launched new portal for Azure. To be honest, new portal is amazing, IMO, it is one of best changes from Microsoft.*
## Prerequisites
* You have experience with .net web applications.
* You have experience with website or web app deployment.
## Getting Started
* Register a Microsoft account. e.g. live.com, outlook.com, etc.
* Start free trial account on Azure cloud
* Install Azure powershell, Azure CLI, Azure SDK for Visual Studio
## App Service
### Web app
* Create a website from portal
* Create a website from visual studio
* Create Empty Asp.Net webstie with only index.html
* Publish it to Azure via Web Deploy
* Profile > Microsoft Azure App Service
* Add Azure account > New app service ( name: webapp )
* Publish method pick Web Deploy
* Validate Connection > Publish
* Create a deploy slot ( webdeploy )
* Create a web app from portal. Default deployment slot is not available
* Change app service plan > Choose Standard tier
* Add slot > Type dev as name > leave the configuration plan as default
* Change previous publish profile from Web Deploy to Web Deploy Package and create a package in local somewhere e.g. C:\webdeploy\webapp.zip
* Deploy to azure ( Start PowerShell as admin )
```
Add-AzureAccount
Get-AzureSubscription -Default `
Publish-AzureWebsiteProject -Name 'webdeploy' -Package 'C:\webdeploy\webapp.zip'
```
* Deploy website to dev slot
**Publish... > Profile > Expand the webdeploy > Deployment slots > dev > Next > Publish
### Webjobs
* Open previous app service ( webapp )
* Set deployment credentials for Git / FTP
* Set Git Repo
* Deploy options > Local Git Repository > Copy git URL https://xxxxxxx.git
* Create demoWeb project with only one index page.
* demoWeb ( folder )
*#** Index.html
* Deploy project to azure git repo ( Start powershell as admin)
```bash
git init
git status
git config user.name "harryho"
git config user.email "<EMAIL>"
git commit -m "initial commit"
git remote add azure https://xxxxxxx.git
git remote -v
git push azure master
```
* Use Kudu
* DEVELOPMENT TOOLS > Advanced Tools
* Create a webjob and deploy to azure (Start powershell as admin)
```bash
mkdir webjob01
cd webjob01
echo "Get-Date | Out-File -FilePath 'd:\home\site\wwwroot\dateoutput.txt -Append' ">getdatejob.ps1
.\getdatejob.ps1
cat .\dateoutput.txt
7z a -tzip getdatejob.zip *.ps1
7z l getdatejob.zip
help New-AzureWebisteJob
New-AzureWebsiteJob -Name 'WebAppV1301' -JobName 'GetDateJob01' -JobType Continuous -JobFile '.\getdatejob.zip'
```
* Create webjob from visual studio
* Create webjob project. Add one line program in the main method `Console.WriteLine("Hellow World"); `
* Right click project node > Publish Azure Webjob ...
* Setup job schedule
* Publish to app service ( webapp )
* Deploy Asp.net SPA with SQL database to Azure
* Create blank SQL database from portal. Remember the admin Id and password
* Install VS 2015, Azure SDK for VS 2015
* Create web project ( c## ), choose ASP.NET Web Application ( .Net Framework )
### Custom Domain
* Free tier cannot custom domain name
* Bind the existing name
* Navigate to Custom Domain
* Copy the external IP ( e.g. 10.1.1.1 ) for later setup.
* Enter the domain name, validate domain name ( First time is invalid )
* Open your domain register website, e.g. godaddy.com ( that is my domains register website)
* Choose the domain you want to bind. Unlock the domain.
* Navigate to Zone tab
* Remove existing A type which points to hosting server
* Add a new A Type pointing to azure
* Add an additional TXT Type pointing to azure for azure's verify
* Save all changes and wait for DNS
* Use the site https://digwebinterface.com to verify the new DNS been updated world wildly
* Back to azure portal to validate the domain name. Once it is valid, save and update it.
### Self-signed SSL setup
* Create a text file named serverauth.cnf, then copy the following content into it, and then save it in a working directory.
```ini
[ req ]
default_bits = 2048
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
attributes = req_attributes
x509_extensions = v3_ca
[ req_distinguished_name ]
countryName = Country Name (2 letter code)
countryName_min = 2
countryName_max = 2
stateOrProvinceName = State or Province Name (full name)
localityName = Locality Name (eg, city)
0.organizationName = Organization Name (eg, company)
organizationalUnitName = Organizational Unit Name (eg, section)
commonName = Common Name (eg, your app's domain name)
commonName_max = 64
emailAddress = Email Address
emailAddress_max = 40
[ req_attributes ]
challengePassword = <PASSWORD>
challengePassword_min = 4
challengePassword_max = 20
[ v3_ca ]
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid:always,issuer:always
basicConstraints = CA:false
keyUsage=nonRepudiation, digitalSignature, keyEncipherment
extendedKeyUsage = serverAuth
```
* In a command-line terminal, `CD` into your working directory and run the following command. Remember set your domain name as common name.
```
openssl req -sha256 -x509 -nodes -days 365 -newkey rsa:2048 -keyout myserver.key -out myserver.crt -config serverauth.cnf
```
* Export the certificate to a .pfx file by running the following command. When prompted, define a password to secure the .pfx file.
```
openssl pkcs12 -export -out myserver.pfx -inkey myserver.key -in myserver.crt
```
* Add SSL binding
* SNI SSL type for CNAME setup
* http to https redirect
* DEVELOPMENT TOOLS > Advanced Tools > Debug Console> Powershell
* Navigate from site > wwwroot .
* Edit Web.config. Add url rewite into Web.config proply
```xml
<configuration>
<system.webServer>
<rewrite>
<rules>
<rule name="HTTP/S to HTTPS Redirect" enabled="true" stopProcessing="true">
<match url="(.*)" />
<conditions logicalGrouping="MatchAny">
<add input="{SERVER_PORT_SECURE}" pattern="^1$" />
<add input="{SERVER_PORT_SECURE}" pattern="^0$" />
</conditions>
<action type="Redirect" url="https://{HTTP_HOST}/OWA/" redirectType="Permanent" />
</rule>
</rules>
</rewrite>
</system.webServer>
</configuration>
```
* Restart the web.config (Optional ). Cleanup the browser cache and hard refresh
<file_sep>+++
title = "AWS: IAM"
weight = 1
description="IAM - Idenity and Acces Management: IAM Identity, Dos & Don'ts, Federation Integration"
+++
## IAM
AWS Identity and Access Management (IAM) enables you to manage access to AWS services and resources securely. Using IAM, you can create and manage AWS users and groups, and use permissions to allow and deny their access to AWS resources.
### Root User
* Every account has a root user in AWS. A root user is something that's created automatically for you whenever you create an AWS account.
* Every single AWS account has a root user.
* The trouble is that root users have unrestricted access to every service and resource that is in AWS inside of your account.
* The permissions of root user can't be restricted in any way.
### Dos and Don'ts
* You should not be accessing the root account on a regular basis, whether that's daily, weekly, or whatever.
* Make sure that you turn on multi-factor authentication on the root account. Multi-factor authentication used to be called two-factor authentication. It really just means that we know the password and we have some sort of a token that we will get a number generated. It's something that you might even use your smartphone for. But it now means that I have to know the username and password and I have to have this token that's going to generate a code. We'll see more about how you'll do that later.
* Make sure that you've disabled your root access keys. This isn't the interactive login for root, it has to do with how we can access the account programmatically.
* Make sure that you rotate the credentials. Just because we say don't log in doesn't mean set the password and then forget it.
* Don't share the root user credentials. password. And all that the audit logs show is that root logged in and did the job. Kind of dangerous.
* Make sure that you create a user that has administrative privileges that's assigned to you and that you know the password only.
### Features & Functions
* Allows user to have very secure access including through the use of multi-factor authentication and federation.
* Grant user a lot of granular control over the specific resources.
* Grant temporary access to different people.
* Simplify the number of logins by using federating identities
* Integrate the IAM solution is with all of the different products that AWS offers.
### MFA
* MFA stands for multi-factor authentication
* Extra layer of security.
* Prevent against imposters, somebody who just happened to guess the right password or happened to actually shoulder surf and watch somebody key in their username and password.
### IAM User
___It is an entity that you create in AWS. The IAM user represents the person or service who uses the IAM user to interact with AWS. A primary use for IAM users is to give people the ability to sign in to the AWS Management Console for interactive tasks and to make programmatic requests to AWS services using the API or CLI. A user in AWS consists of a name, a password to sign into the AWS Management Console, and up to two access keys that can be used with the API or CLI.___
* Receive its own unique identifier, aka, ARN, which is an acronym, of course, which stands for Amazon Resource Name. Now, each one of these user accounts will have its own set of unique credentials, which again might be a common-sense factor, but still everybody needs their own username and password.
* Access the appropriate resources by using policies.
* very easy to remove.
* set up specific permissions. It's a good practice.
### IAM Policy
* A policy is just a JSON-formatted document. It details what you can do, and it includes information from four different categories.
* We can attach the policy to IAM identities. And those identities could be IAM user accounts, group accounts, or IAM roles.
### Federation
#### IAM Role
___It is very similar to a user, in that it is an identity with permission policies that determine what the identity can and cannot do in AWS. However, a role does not have any credentials (password or access keys) associated with it.___
* A role is used to assisgn temporary permissions
* Polices are associated to IAM Roles
* Roles are assumed by people or applications
#### IAM Group
___It is a collection of IAM users. You can use groups to specify permissions for a collection of users, which can make those permissions easier to manage for those users.___
#### Cross Accounts
* Flexible management allows me to take multiple accounts
* It's all token based, so it really optimizes the whole process of setting up security.
* Manage a lot of different services between multiple accounts, as we as between business units
* Steps:
* There are two different AWS accounts. There is one user in one account and we have some resources in another.
* Create a role that exists within the account with the resources.
* create a role, that permits access to the resources, can be then applied to the person or group over in the account by the virtue of what we call the sts:AssumeRole, aka, temporary security credential, aka, .
* Flow chart
{{<mermaid>}}
graph BT
EC2(Elastice Computer Cloud)
S3(Simple Storage Service)
RDS(Rational Database Service)
temporay_security_credential-->IAM_User
IAM_Role --> temporay_security_credential([sts:AssumeRole])
temporay_security_credential;
subgraph Account_1
IAM_User(IAM User or Group ABC)
IAM_User --> EC2
IAM_User --> S3
IAM_User --> RDS
end
subgraph Accuont_2
IAM_Role(IAM Role with IAM Policies attached)
IAM_Role --> EC2
IAM_Role --> S3
IAM_Role --> RDS
end
{{</mermaid >}}
* Samples:
The role `sts:AssumeRole` will be attached the IAM User or Group in Account 2
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "VisualEditor0",
"Effect": "Allow",
"Action": "sts:AssumeRole",
"Resource": "arn:aws:iam::*:role/<Role_Of_Account_1>"
},
{
"Sid": "VisualEditor1",
"Effect": "Allow",
"Action": "sts:GetSessionToken",
"Resource": "*"
}
]
}
```
#### SAML
* SAML 2.0 is the one that you'll use for things like Active Directory. It stands for Security Assertion Markup Language.
* It's an open standard. The idea is to create an open-standards-based approach that works with as many identity providers (idPs) as possible, things like Auth0, Microsoft Active Directory, and Shibboleth, etc.
* Use existing corporate credentials for authentication and authorizaion in AWS console, CL, or API calls.
* Steps:
* set up a SAML 2.0 -based federation,
* Permit given identity to log in to the console, use CLI commands, or API calls. Now, this requires you to have an identity provider, such as Active Directory.
* when the user authenticates with Active Directory, Active Directory issues the SAML assertion document that says yep you are who you said you are and passes that over so that now when you go to access resources AWS says oh yeah
* User is going to log in again to this identity provider. This checks against the local LDAP identity store. so again, Active Directory.
* Get confirmation, A token will be issue, that allows us to talk to Amazon's security token service, or STS, that allows us to assume a particular role within AWS.
* This passes back the temporary credentials. User is able to authenticate and gain access to acces AWS resources.
* don't even have an AWS account.
#### AWS DS
* There are three different varieties.
* Simple Active Directory for smaller, limited use cases.
* AWS Directory Service for Microsoft Active Directory, it's a mouthful. That's a fully-featured Active Directory.
* Active Directory Connector that actually just facilitates or front ends the authentication request. And then the actual authentication is sent back across your VPN to your on-premises.
<file_sep>+++
title = "AWS : CLI - 3"
description = "AWS CLI & Security Group"
weight=2
+++
### AWS CLI & & Security Group
Sometimes it is so annoying to update the rules of security group one by one, because of the change of your public IP address. Following is a script to make such change easier.
The script will only update the SSH / RDP protocals of specified the security groups. The SSH and RDP are most popular ones which allow admin to access the remote EC2.
* Preparation
- Update **OLD_IPS** with your old IP address
- Update **RDP_SG_LIST** and **SSH_SG_LIST** with your actual secuirty group IDs
- Update **PROFILE** if your default profile is different
```bash
# Profile
PROFILE=default
# Log file
LOG=aws_sg.log
# Old IP list - Update your old IP address here.
OLD_IPS=(
10.100.0.0
)
__show_sg_ids() {
aws ec2 --profile $PROFILE describe-security-groups \
--output json \
--filters "Name=group-name,Values=*Bastion*" \
--query 'SecurityGroups[*].{Name:GroupName,ID:GroupId,permissions:IpPermissions[*]}' | jq
}
__get_perm() {
PROTOCOL=$1
if [[ $PROTOCOL == "ssh" ]]; then
PERM='[{"IpProtocol":"tcp","FromPort":22,"ToPort":22,"IpRanges":[{"CidrIp":"IP_ADDRESS/32"}]}]'
elif [[ $PROTOCOL == "rdp" ]]; then
PERM='[{"IpProtocol":"tcp","FromPort":3389,"ToPort":3389,"IpRanges":[{"CidrIp":"IP_ADDRESS/32"}]}]'
fi
echo $PERM
}
__get_desc() {
PROTOCOL=$1
if [[ $PROTOCOL == "ssh" ]]; then
DESC='[{"IpProtocol":"tcp","FromPort":22,"ToPort":22,"IpRanges":[{"CidrIp":"IP_ADDRESS/32","Description":"Harry"}]}]'
elif [[ $PROTOCOL == "rdp" ]]; then
DESC='[{"IpProtocol":"tcp","FromPort":3389,"ToPort":3389,"IpRanges":[{"CidrIp":"IP_ADDRESS/32","Description":"Harry"}]}]'
fi
echo $DESC
}
__show_ips() {
aws ec2 --profile $PROFILE \
describe-security-groups \
--output json \
--query 'SecurityGroups[*].{Name:GroupName,ID:GroupId,permissions:IpPermissions[*]}' | grep -i "Harry" -C 2
}
__update_sg() {
PROTOCOL=$1
SGID=$2
OIP=$3
NIP=$4
echo $PROFILE $SG $OIP $NIP | tee -a $LOG
PERM=$(__get_perm $PROTOCOL)
DESC=$(__get_desc $PROTOCOL)
OLD_PERM=${PERM/"IP_ADDRESS"/$OIP}
NEW_PERM=${PERM/"IP_ADDRESS"/$NIP}
NEW_DESC=${DESC/"IP_ADDRESS"/$NIP}
echo $OLD_PERM | tee -a $LOG
echo $NEW_PERM | tee -a $LOG
echo $NEW_DESC | tee -a $LOG
aws ec2 --profile $PROFILE \
revoke-security-group-ingress \
--group-id $SGID --ip-permissions $OLD_PERM
aws ec2 --profile $PROFILE \
authorize-security-group-ingress \
--group-id $SGID \
--ip-permissions $NEW_PERM
aws ec2 --profile $PROFILE \
update-security-group-rule-descriptions-ingress \
--group-id $SGID --ip-permissions $NEW_DESC
aws ec2 --profile $PROFILE \
describe-security-groups \
--output json \
--group-ids $SGID | jq
}
# Update the rule with RDP
update_rdp_sg() {
OIP=$1
NIP=$2
RDP_SG_LIST=(
sg-0123456789
sg-9876543210
)
echo " :::::::::::: PROFILE - rdp :::::::::::: " | tee -a $LOG
for SG in "${RDP_SG_LIST[@]}"; do
__update_sg rdp $SG $OIP $NIP
done
__show_ips
}
# Update the rule with SSH
update_ssh_sg() {
OIP=$1
NIP=$2
SSH_SG_LIST=(
sg-aaaaaaaaaaa
sg-bbbbbbbbbbb
)
echo " :::::::::::: PROFILE - ssh :::::::::::: " | tee -a $LOG
for SG in "${SSH_SG_LIST[@]}"; do
__update_sg ssh $SG $OIP $NIP
done
__show_ips
}
main() {
echo 'Start...' $(date) | tee -a $LOG
PROFILE=$1
echo "profile $PROFILE " | tee -a $LOG
echo 'You can pass profile name as 1st parameter to overwrite the default setting.'
for OLD_IP in ${OLD_IPS[@]}; do
NEW_IP=$(curl ifconfig.me)
echo Old IP $OLD_IP | tee -a $LOG
echo New IP $NEW_IP | tee -a $LOG
# Update RDP bastion
update_rdp_sg $OLD_IP $NEW_IP
# Update SSH bastion
update_ssh_sg $OLD_IP $NEW_IP
done
echo "DONE $(date) !!!!!!!!!! " | tee -a $LOG
}
main $@
```
* How to use
> ./update_sg.sh <profile_name>
```
./update_sg.sh profile_A
./update_sg.sh profile_B
```
<file_sep>+++
date = "2016-01-10T14:59:31+11:00"
title = "Ubuntu 14 -- desktop, extra tools"
description = "Post-installation for Ubuntu 14 desktop - Part 2"
draft = false
+++
## Prerequisites
* Install Ubunt 14 Desktop
* Internet is availble
## Install chrome
```bash
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
sudo dpkg -i google-chrome-stable_current_amd64.deb
```
## Install Umake
```bash
sudo add-apt-repository ppa:ubuntu-desktop/ubuntu-make
sudo apt-get update
sudo apt-get install ubuntu-make
```
## Install IDEs via Umake
```bash
umake ide pycharm
umake web visual-studio-code
umake android android-studio
```
## General prerequest
```bash
sed -i "/^## deb .*partner/ s/^## //" /etc/apt/sources.list && apt-get update
sudo apt-get install geany byobu p7zip-full gimp pdfshuffler scribus \
filezilla lftp ubuntu-restricted-extras vlc pyrenamer \
imagemagick hugin darktable skype avidemux
```
## Remove Games
```bash
sudo apt-get remove aisleriot gnome-mahjongg gnomine gnome-sudoku
```
## Geany themes
```bash
cd ~/Downloads
git clone https://github.com/codebrainz/geany-themes.git
mkdir ~/.config/geany/colorschemes
cp ~/Downloads/geany-themes/colorschemes/* ~/.config/geany/colorschemes/
rm -rf ~/Downloads/geany-themes
```
## Cloud
> from: http://www.webupd8.org/2014/06/install-copycom-client-in-ubuntu-or.html
```bash
sudo add-apt-repository ppa:paolorotolo/copy
sudo apt-get update
sudo apt-get install copy
sudo /opt/copy-client/CopyAgent -installOverlay
nautilus -q
copy
```
## Data processing
```bash
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E084DAB9
sudo add-apt-repository 'deb http://star-www.st-andrews.ac.uk/cran/bin/linux/ubuntu trusty/'
sudo apt-get update
sudo apt-get install spyder python-numpy python-numpy-doc sqlite3 \
python-scipy python-matplotlib python-matplotlib-doc r-base git-core
```
## Don't forget to use your own name and email!
```bash
git config --global user.name "<NAME>"
git config --global user.email "<EMAIL>"
```
## Maps and GIS software
```bash
sudo apt-get install python-software-properties
sudo add-apt-repository 'deb http://qgis.org/debian trusty main'
gpg --keyserver keyserver.ubuntu.com --recv DD45F6C3
gpg --export --armor DD45F6C3 | sudo apt-key add -
sudo apt-get update
sudo apt-get install qgis python-qgis qgis-plugin-grass grass-gui grass-doc \
libgdal1-dev libproj-dev gpsbabel
```
## Latex type stuff
```bash
sudo apt-get install jabref ibus-qt4 texlive texlive-latex-extra \
texlive-humanities texlive-fonts-extra latex-beamer
sudo apt-get -f install
```
## Package download and install (Texmaker and RStudio)
```bash
wget http://www.xm1math.net/texmaker/texmaker_ubuntu_14.04_4.4.1_amd64.deb
wget http://download1.rstudio.org/rstudio-0.98.1102-amd64.deb
sudo dpkg -i *.deb
sudo rm *.deb
sudo apt-get update && apt-get upgrade
sudo apt-get autoremove
```
```bash
sudo nano /etc/update-manager/release-upgrades
```
<file_sep>+++
date = "2018-12-04T14:59:31+11:00"
title = "SFTP & GPG "
description = "SFTP & GPG"
+++
### SFTP
SFTP (SSH File Transfer Protocol) is a secure file transfer protocol. It runs over the SSH protocol. It supports the full security and authentication functionality of SSH.
SFTP has pretty much replaced legacy FTP as a file transfer protocol, and is quickly replacing FTP/S. It provides all the functionality offered by these protocols, but more securely and more reliably, with easier configuration. There is basically no reason to use the legacy protocols any more.
SFTP also protects against password sniffing and man-in-the-middle attacks. It protects the integrity of the data using encryption and cryptographic hash functions, and autenticates both the server and the user.
#### Login SFTP with pass phrase
* Install expect
```
# Ubuntu
sudo apt install expect
# RH/CentOS
sudo yum install expect
```
* Set passphrase to global variable
```
export PASSPHRASE=<PASSWORD>
```
* Create a script - sftp.sh
```bash
expect -c "
spawn sftp -oPORT=9022 -oIdentityFile=~/.ssh/Your_SSH_Private_Key -oPasswordAuthentication=no [email protected]
expect \"*\"
expect \"*\"
expect \"*\"
expect -nocase \"*passphrase*\" { send \"$PASSPHRASE\r\"; interact }
"
```
### GPG
GnuPG is a complete and free implementation of the OpenPGP standard as defined by RFC4880 (also known as PGP). GnuPG allows you to encrypt and sign your data and communications; it features a versatile key management system, along with access modules for all kinds of public key directories. GnuPG, also known as GPG, is a command line tool with features for easy integration with other applications. A wealth of frontend applications and libraries are available. GnuPG also provides support for S/MIME and Secure Shell (ssh).
#### Generate GPG key pair
* Generate key pair
```
# Open terminal & run command below
gpg --gen-key
# Select RSA from options
Please select what kind of key you want:
(1) RSA and RSA (default)
(2) DSA and Elgamal
(3) DSA (sign only)
(4) RSA (sign only)
# Use 4096
RSA keys may be between 1024 and 4096 bits long.
What keysize do you want? (2048) 4096
# Pick option 0
Please specify how long the key should be valid.
0 = key does not expire
<n> = key expires in n days
<n>w = key expires in n weeks
<n>m = key expires in n months
<n>y = key expires in n years
# Enter user id, email and comment
GnuPG needs to construct a user ID to identify your key.
Real name: [Your_User_ID]
Email address: [<EMAIL>]
Comment: {Your_comment]
# Enter passphase & confirm it
You need a passphase to protect your secret key
Enter passphase: [your_passphase]
Repeat passphase: [your_passphase]
```
#### Export public key
* The third party requires your public key to decrypt your message, so you need to export public key.
```
gpg --armor --output [export_file_name] --export [your_user_id]
## get finger print
ggp --fingerprint
```
* Pass your public key and fingerprint to the third party
#### Import public key
* Import public key file
```
gpg --import [public_key_file]
```
* Sign the public key with your priviate key
```
## Get user id of public key
gpg --list-keys
gpg --sign-key [public_key_user_id]
## Confirm to sign
Really sgin? (Y/N) Y
## Enter passphase
Enter passphase: [your_pass_phase]
```
* Update the trust level
```
gpg --edit-key [public_key_user_id]
## Enter trust after the command prompt
Command> trust
## Choose option 5
Please decide how far you trust this user to correctly
verify other users' keys (by looking at passports,
checking fingerprints from different sources...)?
1 = I don't know or won't say
2 = I do NOT trust
3 = I trust marginally
4 = I trust fully
s = I trust ultimately
m = back to the main menu
Your decision? 5
# Enter q to quit
Command> q
```
#### PGP Encryption
* Encrypt a file
```
gpg --armor --encrypt \
--recipient [public_key_user_id] --sign --local-user [your_user_id] \
--output [encrypted_filename]
## Enter passphase
Enter passphase: [your_passphase]
```
#### PGP Decryption
* Decrypt a file
```
gpg --armor [decrpyted_filename] --decrypt [encrypted_filename]
## Enter passphase
Enter passphase: [your_passphase]
```<file_sep>+++
title = "AWS Note - 4"
description = "Reset EC2 password"
draft="true"
+++
## Reset EC2 password
Amazon Virtual Private Cloud (Amazon VPC) enables you to launch AWS resources into a virtual network that you've defined. This virtual network closely resembles a traditional network that you'd operate in your own data center, with the benefits of using the scalable infrastructure of AWS.
### EC2 with Linux
### EC2 with Windows
<file_sep>+++
date = "2016-11-15T16:56:21+11:00"
title = "Angularjs Webpack ES6 Starter"
description = "This starter was inspired by another similar angular webpack starter repository"
+++
## Summary
It simply includes font-awesome, bootstrap for the people who don't want to use boostrap-webpack, font-awesome-webpack. I find it saves us so much effort to create prototype, since we don't need spectacular UI.
* This starter uses angular 1.5 for someone want to build component.
* This repo follows mvc patterns instead of component pattern.
* ES6, and ES7 support with babel.
* Development server with live reload.
* Production builds with cache busting.
* Testing environment using karma to run tests and jasmine as the framework.
* Code coverage when tests are run.
* Include font-awesome without font-awesome-loader.
* Include Bootstrap 3 without bootstrap-loader.
* No gulp and no grunt, just npm scripts.
## Structure of starter
```bash
\path\to\angularjs-webpack-es6-starter
| .babelrc <-// default setting es2015.
| karma.conf.js <-// tests and report setup
| webpack.config.js <-// webpack config
\---src
| tests.webpack.js
|
+---app
| | app.html <-// app view
| | app.js <-// app module
| | app.routes.js <-// app route to manage all routes
| | app.runner.js <-// app runner for state change enhancement
| | app.spec.js <-// app spec file for testing
| |
| +---common <-// common module for whole app
| | | common.js
| | | common.spec.js
| | +---directives <-// common directives for whole app
| | | appUiDirectives.js
| | | appUiDirectives.spec.js
| | | commonDirectives.js
| | | commonDirectives.spec.js
| | +---services <-// common views for whole app
| | | ApiService.js
| | | ApiService.spec.js
| | | UtilService.js
| | | UtilService.spec.js
| | \---views <-// contains common views
| | footer-view.html
| | header-view.html
| | sidebar-view.html
| | topbar-view.html
| \---main <-// built-in fonts, css, images
| \---dashboard
| +---controllers
| | dashboardController.js
| | dashboardController.spec.js
| \---views
| dashboard-view.html
|
+---public <-// built-in fonts, css, images
| | index.html
| +---fonts
| | +--- ...
| \---img
| +--- favicon.ico
\---style <-// css files including customized css
```
## Browse [Repository](https://github.com/harryho/angularjs-webpack-es6-starter.git)
<file_sep>+++
date = "2017-02-03T10:59:31+11:00"
title = "CentOS 7 Server"
description = "CentOS 7 Server note"
draft = false
+++
Prelude
> This article is mainly to help experienced user install and setup CentOS 7 Server.
## Prerequisites
* You are familiar with CentOS, at least you have some experience working on Linux system.
* You are familiar with basic bash/shell command
## Things to do after installing CentOS server
* How to setup your server
## Firewall setup
```bash
sudo firewall-cmd --permanent --add-port=22/tcp
sudo firewall-cmd --permanent --add-port=21/tcp
sudo firewall-cmd --permanent --add-port=80/tcp
sudo firewall-cmd --permanent --add-port=443/tcp
sudo firewall-cmd --permanent --add-port=8080/tcp
sudo firewall-cmd --reload
```
## SSH server setup
`!!! For production environment, SSH should be secured by the CA`
* Install SSH if it is not done yet
```bash
## yum install openssh openssh-server openssh-clients openssl-libs
```
* Configure SSH
```bash
## backup default config
sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.ori
sudo chmod a-w /etc/ssh/sshd_config.ori
## use any editor to update sshd_config
sudo vi /etc/ssh/sshd_config
## uncomment PasswordAuthentication yes to allow remote password login
## Password authentication is only for test environment
## setup ssh auto-start onboot
sudo systemctl restart sshd
```
## Update Time Zone if it is incorrect
```bash
ls -l /etc/localtime ## check the time zone
sudo timedatectl list | grep New_York ## find the time zone by the city
sudo timedatectl set-timezone America/New_York
```
## Install Git
* Option 1: You can use `yum` to install git, but it is quite out-of-date. The version of git is 1.8.x
```bash
sudo yum install git
git --version
```
* Option 2: Download the latest stable release of Git and compile the software from source. (__Recommended__)
### Install build tools
```
sudo yum groupinstall "Development Tools"
sudo yum install gettext-devel openssl-devel perl-CPAN perl-devel \
zlib-devel libcurl-devel expat-devel
sudo yum install yum-utils
```
### Download the latest release
```
wget https://github.com/git/git/archive/v2.x.x.tar.gz -O git.tar.gz
tar -zxf git.tar.gz
cd git-*
make configure
./configure --prefix=/usr/local
sudo make install
git --version
```
## Setup a better Vim
```bash
sudo yum isntall vim-enhanced
```
## Install Tmux
```bash
sudo yum install tmux
```
* Most useful tmux commands
> Ctrl+b " — split pane horizontally.
>
> Ctrl+b % — split pane vertically.
>
> Ctrl+b arrow key — switch pane.
>
> Hold Ctrl+b, don’t release it and hold one of the arrow keys — resize pane.
>
> Ctrl+b c — (c)reate a new window.
>
> Ctrl+b , — rename reate a new window.
>
> Ctrl+b n — move to the (n)ext window.
>
> Ctrl+b p — move to the (p)revious window.
## Install python 3
You will only find Python 2 on CentOS by default. In order to install the latest python3, we need to install `IUS` to which stands for Inline with Upstream Stable.
```bash
sudo yum -y install https://centos7.iuscommunity.org/ius-release.rpm
sudo yum -y install python36u
### Install development package
sudo yum -y insall python-devel python36u-devel
```
## Install nodejs
* Nodejs 6.x
```bash
sudo yum -y install nodejs
```
* Nodejs 8.x
```bash
curl --silent --location https://rpm.nodesource.com/setup_8.x | sudo bash -
```
* Upgrade NPM
```
sudo npm install -g npm
sudo npm install -g typescript
sudo mpm install -g yarn
```
## install docker CE (CentOS 7)
```bash
## add repo
sudo yum-config-manager \
--add-repo \
https://download.docker.com/linux/centos/docker-ce.repo
## check docker.list
yum list docker-ce --showduplicates | sort -r
## install docker engine
sudo yum install docker-ce
docker -v
```
## Install JDK 8
* Downlaod the JDK from Oracle website.
```bash
## At the confirmation prompt, enter y
## then RETURN to continue with the installation.
sudo yum install java-1.8.0-openjdk-devel
java -version
```
* Test JDK with a simple HelloWorld program
```java
import java.util.Calendar;
class HelloWorld {
public static void main(String[] args) {
Calendar cal = Calendar.getInstance();
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH) + 1;
int day = cal.get(Calendar.DATE);
int hour = cal.get(Calendar.HOUR_OF_DAY);
int minute = cal.get(Calendar.MINUTE);
String username = System.getProperty("user.name");
System.out.println(username+ ": Hello World! ");
System.out.println(year + "/" + month + "/" + day + " " + hour + ":" + minute);
}
}
```
* Compile and run the program
```bash
javac HelloWorld.java
java HelloWorld.java
```
## Install Go
* Install Go
```bash
cd /tmp
curl -LO https://redirector.gvt1.com/edgedl/go/go1.9.2.linux-amd64.tar.gz
## check hash
shasum -a 256 go*linux-amd64.tar.gz
## install tar ball
sudo tar -C /usr/local -xvzf go1.9.2.linux-amd64.tar.gz
```
* Setup GOROOT
```bash
cd /etc/profile.d
## Create a path.sh script
sudo vi path.sh
```
* Copy following code into `path.sh`
```
export PATH=$PATH:/usr/local/go/bin
```
* Setup local GOBIN, GOPATH
```bash
export GOBIN="$HOME/projects/go/bin"
export GOPATH="$HOME/projects/go/src"
export PATH
```
* Create a simple `hello.go` file to test
```go
package main
import (
"fmt"
"log"
"os/user"
)
func main(){
user, err := user.Current()
if err != nil {
log.Fatal(err)
}
fmt.Printf(user.Name + " said : Hello World! \n" )
}
```
* Run the program
```bash
go run $GOPATH/hello.go
go install $GOPATH/hello.go
$GOBIN/hello
```
## Install Cmake
```bash
sudo yum install epel-release
sudo yum install cmake3
sudo ln -s /usr/bin/cmake3 /usr/bin/cmake
```
## Install Rust
```bash
curl -f -L https://static.rust-lang.org/rustup.sh -O
sh rustup.sh
rustc --version
```
## Install PHP 7
* install and enable EPEL and Remi repository
```bash
sudo yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
sudo yum install http://rpms.remirepo.net/enterprise/remi-release-7.rpm
```
* install `yum-utils`
```
sudo yum install yum-utils
```
* Enable PHP 7 repo
```
sudo yum-config-manager --enable remi-php72
```
* Install PHP
```
sudo yum install php php-mcrypt php-cli php-gd php-curl php-mysql \
php-ldap php-zip php-fileinfo
```
## Install clang
```bash
sudo yum install llvm
sudo yum install clang
```
<file_sep>package main
import (
_ "bufio"
_ "bytes"
_ "compress/gzip"
_ "encoding/gob"
_ "flag"
"fmt"
_ "io/ioutil"
"math"
"os"
_ "os"
"reflect"
_ "strings"
)
// func main() {
// fmt.Println("hello world")
// // var arr1 [5]int;
// // for i := range arr1 {
// // fmt.Printf(" index = %d , value = %d \n", i, arr1[i])
// // }
// // var arr2 = [...]int{0,0,0,0,0}
// // for i := range arr2{
// // fmt.Printf(" index = %d , value = %d \n", i, arr1[i])
// // }
// arr := [5]int{1, 2, 3, 4, 5}
// slice := arr[0:2]
// fmt.Printf(" slice = %v \n", slice)
// slice = arr[0:]
// fmt.Printf(" slice = %v \n", slice)
// slice = arr[:2]
// fmt.Printf(" slice = %v \n", slice)
// slice = arr[1:5]
// fmt.Printf(" slice = %v \n", slice)
// slice = arr[:5]
// fmt.Printf(" slice = %v \n", slice)
// sl_from := []int{1, 2, 3}
// sl_to := make([]int, 10)
// n := copy(sl_to, sl_from)
// fmt.Println(sl_to)
// // output: [1 2 3 0 0 0 0 0 0 0]
// fmt.Printf("Copied %d elements\n", n) // n == 3
// sl3 := []int{1, 2, 3}
// sl3 = append(sl3, 4, 5, 6)
// fmt.Println(sl3)
// str := "golang"
// chars := []byte(str)
// chars[1] = '0'
// newStr := string(chars)
// fmt.Println(newStr)
// }
// type Stack struct {
// }
// func (*Stack) Pop(st []int) int {
// v := 0
// for ix := len(st) - 1; ix >= 0; ix-- {
// if v = st[ix]; v != 0 {
// st[ix] = 0
// }
// }
// return v
// }
// func main() {
// result := 0
// for i:=0; i <= 10; i++ {
// result = fibonacci(i)
// fmt.Printf("fibonacci(%d) is: %d\n", i, result)
// }
// }
// func fibonacci(n int) (res int) {
// if n <= 1 {
// res = 1
// } else {
// res = fibonacci(n-1) + fibonacci(n-2)
// }
// return
// }
// func main() {
// // callback(1, Add)
// plus := func(x, y int) int { return x + y }
// fmt.Println(plus(1, 2)) // 3
// // invoke func immediatley
// val := func(x, y int) int { return x + y }(1, 2) // 3
// fmt.Println(val)
// }
func Add(a, b int) {
fmt.Printf("The sum of %d and %d is: %d\n", a, b, a+b)
}
func callback(y int, f func(int, int)) {
f(y, 2)
// this becomes Add(1, 2)
}
type TagType struct { //tags
field1 bool "An important answer"
field2 string "The name of the thing"
field3 int "How much there are"
}
func refTag(tt TagType, ix int) {
ttType := reflect.TypeOf(tt)
ixField := ttType.Field(ix)
fmt.Printf("%v\n", ixField.Tag)
}
type innerS struct {
in1 int
in2 int
}
type outerS struct {
b int
c float32
int
// anonymous field
innerS // anonymous field
}
func Func() {
outer2 := outerS{6, 7.5, 60, innerS{5, 10}}
fmt.Println("outer2 is: ", outer2)
}
type Shape interface {
Area() float32
}
type Square struct {
side float32
}
type Circle struct {
radius float32
}
func (sq *Square) Area() float32 {
return sq.side * sq.side
}
func (c *Circle) Area() float32 {
return c.radius * c.radius * math.Pi
}
type Rectangle struct {
length, width float32
}
func (r Rectangle) Area() float32 {
return r.length * r.width
}
func classifier(items ...interface{}) {
for i, x := range items {
switch x.(type) {
case bool:
fmt.Printf("param #%d is a bool\n", i)
case float64:
fmt.Printf("param #%d is a float64\n", i)
case int, int64:
fmt.Printf("param #%d is an int\n", i)
case nil:
fmt.Printf("param #%d is nil\n", i)
case string:
fmt.Printf("param #%d is a string\n", i)
default:
fmt.Printf("param #%d’s type is unknown\n", i)
}
}
}
func modifyValByReflect() {
var x float64 = 3.4
v := reflect.ValueOf(x) // Pass value
fmt.Println("settability of v:", v.CanSet()) // false
v = reflect.ValueOf(&x) // Note: take the address of x.
fmt.Println("type of v:", v.Type()) // float64
fmt.Println("settability of v:", v.CanSet()) // false
v = v.Elem()
fmt.Println("The Elem of v is: ", v) // <float64 Value>
fmt.Println("settability of v:", v.CanSet()) // true
v.SetFloat(3.1415) // this works!
fmt.Println(v.Interface())
fmt.Println(v) // <float64 Value>
}
// func main() {
// shapes := []Shape{Rectangle{5, 3}, &Square{5}, &Circle{5}}
// fmt.Println("Looping through shapes for area ...")
// for n, _ := range shapes {
// fmt.Println("Shape details: ", shapes[n])
// fmt.Println("Area of this shape is: ", shapes[n].Area())
// }
// var shape Shape
// shape = &Square{4}
// switch t := shape.(type) {
// case *Square:
// fmt.Printf("Type Square %T with value %v\n", t, t)
// case *Circle:
// fmt.Printf("Type Circle %T with value %v\n", t, t)
// case *Rectangle:
// fmt.Printf("Type Rectangle %T with value %v\n", t, t)
// case nil:
// fmt.Println("nil value: nothing to check?")
// default:
// fmt.Printf("Unexpected type %T", t)
// }
// }
// func main() {
// inputFile, inputError := os.Open("input.dat")
// if inputError != nil {
// fmt.Printf("An error occurred on opening the inputfile\n" +
// "Does the file exist?\n" +
// "Have you got acces to it?\n")
// return // exit the function on error
// }
// defer inputFile.Close()
// inputReader := bufio.NewReader(inputFile)
// for {
// inputString, readerError := inputReader.ReadString('\n')
// if readerError == io.EOF {
// return
// }
// fmt.Printf("The input was: %s", inputString)
// }
// }
// var (
// firstName, lastName, s string
// i int
// f float32
// input = "56.12 / 5212 / Go"
// format = "%f / %d / %s"
// )
// func main() {
// fmt.Println("Please enter your full name: ")
// fmt.Scanln(&firstName, &lastName)
// // fmt.Scanf("%s %s", &firstName, &lastName)
// fmt.Printf("Hi %s %s!\n", firstName, lastName) // Hi <NAME>
// fmt.Sscanf(input, format, &f, &i, &s)
// fmt.Println("From the string we read: ", f, i, s)
// // ouwtput: From the string we read: 56.12 5212 Go
// }
// func main() {
// inputFile, inputError := os.Open("hello.go")
// if inputError != nil {
// fmt.Printf("An error occurred on opening the inputfile\n" +
// "Does the file exist?\n" +
// "Have you got acces to it?\n")
// return // exit the function on error
// }
// defer inputFile.Close()
// inputReader := bufio.NewReader(inputFile)
// for {
// inputString, readerError := inputReader.ReadString('\n')
// if readerError == io.EOF {
// return
// }
// fmt.Printf("The input was: %s", inputString)
// }
// }
// func main() {
// inputFile := "hello.go"
// outputFile := "hello.go.txt"
// buf, err := ioutil.ReadFile(inputFile)
// if err != nil {
// fmt.Fprintf(os.Stderr, "File Error: %s\n", err)
// // panic(err.Error())
// }
// fmt.Printf("%s\n", string(buf))
// err = ioutil.WriteFile(outputFile, buf, 0x644)
// if err != nil {
// panic(err.Error())
// }
// // diff hello.go hello.go.txt ## return nothing
// }
// func main() {
// file, err := os.Open("csv_data.txt")
// if err != nil {
// panic(err)
// }
// defer file.Close()
// var col1, col2, col3 []string
// for {
// var v1, v2, v3 string
// _, err := fmt.Fscanln(file, &v1, &v2, &v3)
// // scans until newline
// if err != nil {
// break
// }
// col1 = append(col1, v1)
// col2 = append(col2, v2)
// col3 = append(col3, v3)
// }
// fmt.Println(" Col 1 ",col1)
// fmt.Println(" Col 3 ",col2)
// fmt.Println(" Col 3 ",col3)
// }
// func main() {
// fName := "public.zip"
// var r *bufio.Reader
// fi, err := os.Open(fName)
// if err != nil {
// fmt.Fprintf(os.Stderr, "%v, Can’t open %s: error: %s\n", os.Args[0],
// fName, err)
// os.Exit(1)
// }
// fz, err := gzip.NewReader(fi)
// if err != nil {
// r = bufio.NewReader(fi)
// } else {
// r = bufio.NewReader(fz)
// }
// for {
// line, err := r.ReadString('\n')
// if err != nil {
// fmt.Println("Done reading file")
// os.Exit(0)
// }
// fmt.Println(line)
// }
// }
// func main() {
// outputFile, outputError := os.OpenFile("output.txt",
// os.O_WRONLY|os.O_CREATE, 0666)
// if outputError != nil {
// fmt.Printf("An error occurred with file creation\n")
// return
// }
// defer outputFile.Close()
// outputWriter := bufio.NewWriter(outputFile)
// outputString := "hello world!\n"
// for i := 0; i < 10; i++ {
// outputWriter.WriteString(outputString)
// }
// outputWriter.Flush()
// }
// func main() {
// os.Stdout.WriteString("hello, world\n")
// f, _ := os.OpenFile("output.txt", os.O_CREATE|os.O_WRONLY, 0)
// defer f.Close()
// for i := 0; i < 10; i++ {
// f.WriteString("hello world!\n")
// }
// }
// func main() {
// CopyFile("target_hello.txt", "hello.go")
// fmt.Println("Copy done!")
// }
// func CopyFile(dstName, srcName string) (written int64, err error) {
// src, err := os.Open(srcName)
// if err != nil {
// return
// }
// defer src.Close()
// dst, err := os.OpenFile(dstName, os.O_WRONLY|os.O_CREATE, 0644)
// if err != nil {
// return
// }
// defer dst.Close()
// return io.Copy(dst, src)
// }
// func main() {
// who := "Harry "
// if len(os.Args) > 1 {
// who += strings.Join(os.Args[1:], " ")
// }
// fmt.Println("Good Morning", who)
// }
// var NewLine = flag.Bool("n", false, "print on newline")
// // echo -n flag, of type *bool
// const (
// Space = " "
// Newline = "\n"
// )
// func main() {
// flag.PrintDefaults()
// flag.Parse()
// var s string = ""
// for i := 0; i < flag.NArg(); i++ {
// if i > 0 {
// s += Space
// }
// s += flag.Arg(i)
// }
// if *NewLine { // -n is parsed, flag becomes true
// s += Newline
// }
// os.Stdout.WriteString(s)
// }
// func cat(r *bufio.Reader) {
// for {
// buf, err := r.ReadBytes('\n')
// if err == io.EOF {
// break
// }
// fmt.Fprintf(os.Stdout, "%s", buf)
// }
// return
// }
// func cat(f *os.File) {
// const NBUF = 512
// var buf [NBUF]byte
// for {
// switch nr, err := f.Read(buf[:]); true {
// case nr < 0:
// fmt.Fprintf(os.Stderr, "cat: error reading: %s\n", err.Error())
// os.Exit(1)
// case nr == 0: // EOF
// return
// case nr > 0:
// if nw, ew := os.Stdout.Write(buf[0:nr]); nw != nr {
// fmt.Fprintf(os.Stderr, "cat: error writing: %s\n",
// ew)
// }
// }
// }
// }
// func main() {
// flag.Parse()
// if flag.NArg() == 0 {
// cat(os.Stdin)
// }
// for i := 0; i < flag.NArg(); i++ {
// f, err := os.Open(flag.Arg(i))
// if err != nil {
// fmt.Fprintf(os.Stderr, "%s:error reading from %s: %s\n",
// os.Args[0], flag.Arg(i), err.Error())
// os.Exit(1)
// }
// cat(f)
// f.Close()
// }
// }
// func main() {
// // unbuffered: os.Stdout implements io.Writer
// fmt.Fprintf(os.Stdout, "%s\n", "hello world! - unbuffered")
// // buffered:
// buf := bufio.NewWriter(os.Stdout)
// // and now so does buf:
// fmt.Fprintf(buf, "%s\n", "hello world! - buffered")
// buf.Flush()
// }
// type Address struct {
// Type string
// City string
// Country string
// }
// type VCard struct {
// FirstName string
// LastName string
// Addresses []*Address
// Remark string
// }
// func main() {
// pa := &Address{"private", "Aartselaar", "Belgium"}
// wa := &Address{"work", "Boom", "Belgium"}
// vc := VCard{"Jan", "Kersschot", []*Address{pa, wa}, "none"}
// // fmt.Printf("%v: \n", vc)
// // {<NAME> [0x126d2b80 0x126d2be0] none}:
// // JSON format:
// js, _ := json.Marshal(vc)
// fmt.Printf("JSON format: %s", js)
// // using an encoder:
// file, _ := os.OpenFile("vcard.json", os.O_CREATE|os.O_WRONLY, 0)
// defer file.Close()
// enc := json.NewEncoder(file)
// err := enc.Encode(vc)
// if err != nil {
// fmt.Println("Error in encoding json")
// }
// }
// var t, token xml.Token
// var err error
// func main() {
// input :=
// "<Person><FirstName>Laura</FirstName><LastName>Lynn</LastName></Person>"
// inputReader := strings.NewReader(input)
// p := xml.NewDecoder(inputReader)
// for t, err = p.Token(); err == nil; t, err = p.Token() {
// switch token := t.(type) {
// case xml.StartElement:
// name := token.Name.Local
// fmt.Printf("Token name: %s\n", name)
// for _, attr := range token.Attr {
// attrName := attr.Name.Local
// attrValue := attr.Value
// fmt.Printf("An attribute is: %s %s\n", attrName,
// attrValue)
// // ...
// }
// case xml.EndElement:
// fmt.Println("End of token")
// case xml.CharData:
// content := string([]byte(token))
// fmt.Printf("This is the content: %v\n", content)
// // ...
// default:
// // ...
// }
// }
// }
// type P struct {
// X, Y, Z int
// Name string
// }
// type Q struct {
// X, Y *int32
// Name string
// }
// func main() {
// // Initialize the encoder and decoder. Normally enc and dec would
// // be bound to network connections and the encoder and decoder
// // would run in different processes.
// var network bytes.Buffer
// // Stand-in for a network connection
// enc := gob.NewEncoder(&network) // Will write to network.
// dec := gob.NewDecoder(&network)
// // Will read from network.
// // Encode (send) the value.
// err := enc.Encode(P{3, 4, 5, "Pythagoras"})
// if err != nil {
// log.Fatal("encode error:", err)
// }
// // Decode (receive) the value.
// var q Q
// err = dec.Decode(&q)
// if err != nil {
// log.Fatal("decode error:", err)
// }
// fmt.Printf("%q: {%d,%d}\n", q.Name, *q.X, *q.Y)
// }
// type Address struct {
// Type string
// City string
// Country string
// }
// type VCard struct {
// FirstName string
// LastName string
// Addresses []*Address
// Remark string
// }
// func main() {
// pa := &Address{"private", "Aartselaar", "Belgium"}
// wa := &Address{"work", "Boom", "Belgium"}
// vc := VCard{"Jan", "Kersschot", []*Address{pa, wa}, "none"}
// // fmt.Printf("%v: \n", vc)
// // {<NAME> [0x126d2b80 0x126d2be0] none}:
// // using an encoder:
// file, _ := os.OpenFile("vcard.gob", os.O_CREATE|os.O_WRONLY, 0)
// defer file.Close()
// enc := gob.NewEncoder(file)
// err := enc.Encode(vc)
// if err != nil {
// log.Println("Error in encoding gob")
// }
// }
// func main() {
// hasher := sha1.New()
// io.WriteString(hasher, "test")
// b := []byte{}
// fmt.Printf("Result: %x\n", hasher.Sum(b))
// fmt.Printf("Result: %d\n", hasher.Sum(b))
// hasher.Reset()
// data := []byte("We shall overcome!")
// n, err := hasher.Write(data)
// if n != len(data) || err != nil {
// log.Printf("Hash write error: %v / %v", n, err)
// }
// checksum := hasher.Sum(b)
// fmt.Printf("Checksum: %x\n", checksum)
// }
//---deadlock -----
// func f1(in chan int) {
// fmt.Println(<-in)
// }
// func main() {
// out := make(chan int)
// out <- 2
// go f1(out)
// }
// func compute(ch chan int) {
// ch <- someComputation()
// // when it completes, signal on the channel.
// }
// func main() {
// ch := make(chan int) // allocate a channel.
// go compute(ch) // start something in a goroutine
// doSomethingElseForAWhile()
// result := <-ch
// }
// func main() {
// runtime.GOMAXPROCS(4)
// go func() {
// for i := 0; i < 10; i++ {
// println("Hello")
// time.Sleep(500)
// }
// }()
// go func() {
// for i := 0; i < 10; i++ {
// println("World")
// time.Sleep(500)
// }
// }()
// time.Sleep(999999)
// }
// func main() {
// runtime.GOMAXPROCS(4)
// start := time.Now()
// stockSymbols := []string{
// "goog",
// "msft",
// "aapl",
// "bbry",
// "hpq",
// "vz",
// "t",
// "tmus",
// "s",
// }
// numComplete := 0
// for _, symbol := range stockSymbols {
// go func(symbol string) {
// resp, _ := http.Get("http://dev.markitondemand.com/Api/v2/Quote?symbol=" + symbol)
// defer resp.Body.Close()
// body, _ := ioutil.ReadAll(resp.Body)
// quote := new(QuoteResponse)
// xml.Unmarshal(body, "e)
// fmt.Printf("%s: $%.2f\n", quote.Name, quote.LastPrice)
// numComplete++
// }(symbol)
// }
// for numComplete < len(stockSymbols) {
// time.Sleep(10 * time.Millisecond)
// }
// elapsed := time.Since(start)
// fmt.Printf("Execution Time: %s", elapsed)
// }
// type QuoteResponse struct {
// Status string
// Name string
// LastPrice float32
// Change float32
// ChangePercent float32
// TimeStamp string
// MSDate float32
// MarketCap int
// Volume int
// ChangeYTD float32
// ChangePercentYTD float32
// High float32
// Low float32
// Open float32
// }
// func main() {
// ch := make(chan string, 2)
// ch <- "Hello"
// ch <- "Hello"
// close(ch)
// fmt.Println(<-ch)
// fmt.Println(<-ch)
// ch <- "Hello"
// }
// func main() {
// stream := pump()
// go suck(stream)
// // the above 2 lines can be shortened to: go suck( pump() )
// time.Sleep(1e9)
// }
// func pump() chan int {
// ch := make(chan int)
// go func() {
// for i := 0; ; i++ {
// ch <- i
// }
// }()
// return ch
// }
// func suck(ch chan int) {
// for {
// fmt.Println(<-ch)
// }
// }
// func main() {
// suck(pump())
// time.Sleep(1e9)
// }
// func pump() chan int {
// ch := make(chan int)
// go func() {
// for i := 0; ; i++ {
// ch <- i
// }
// }()
// return ch
// }
// func suck(ch chan int) {
// go func() {
// for v := range ch {
// fmt.Println(v)
// }
// }()
// }
// func (c *container) Iter() <-chan items {
// ch := make(chan item)
// go func() {
// for i := 0; i < c.Len(); i++ {
// // or use a for-range loop
// ch <- c.items[i]
// }
// }()
// return ch
// }
// func main() {
// sendChan := make(chan int)
// receiveChan := make(chan string)
// go processChannel(sendChan, receiveChan)
// }
// func processChannel(in <-chan int, out chan<- string) {
// for inValue := range in {
// result := strconv.Itoa(inValue) // processing inValue
// // ...
// out <- result
// }
// }
// // Send the sequence 2, 3, 4, ... to returned channel
// func generate() chan int {
// ch := make(chan int)
// go func() {
// for i := 2; ; i++ {
// ch <- i
// }
// }()
// return ch
// }
// // Filter out input values divisible by prime, send rest to returned channel
// func filter(in chan int, prime int) chan int {
// out := make(chan int)
// go func() {
// for {
// if i := <-in; i%prime != 0 {
// out <- i
// }
// }
// }()
// return out
// }
// func sieve() chan int {
// out := make(chan int)
// go func() {
// ch := generate()
// for {
// prime := <-ch
// ch = filter(ch, prime)
// out <- prime
// }
// }()
// return out
// }
// func main() {
// primes := sieve()
// for {
// fmt.Println(<-primes)
// }
// }
// func main() {
// runtime.GOMAXPROCS(2) // in goroutine_select2.go
// ch1 := make(chan int)
// ch2 := make(chan int)
// go pump1(ch1)
// go pump2(ch2)
// go suck(ch1, ch2)
// time.Sleep(1e9)
// }
// func pump1(ch chan int) {
// for i := 0; ; i++ {
// ch <- i * 2
// }
// }
// func pump2(ch chan int) {
// for i := 0; ; i++ {
// ch <- i + 5
// }
// }
// func suck(ch1 chan int, ch2 chan int) {
// for {
// select {
// case v := <-ch1:
// fmt.Printf("Received on channel 1: %d\n", v)
// case v := <-ch2:
// fmt.Printf("Received on channel 2: %d\n", v)
// }
// }
// }
// var resume chan int
// func integers() chan int {
// yield := make(chan int)
// count := 0
// go func() {
// for {
// yield <- count
// count++
// }
// }()
// return yield
// }
// func generateInteger() int {
// return <-resume
// }
// func main() {
// resume = integers()
// fmt.Println(generateInteger()) //=> 0
// fmt.Println(generateInteger()) //=> 1
// fmt.Println(generateInteger()) //=> 2
// }
// type Any interface{}
// type EvalFunc func(Any) (Any, Any)
// func main() {
// evenFunc := func(state Any) (Any, Any) {
// oldSate := state.(int)
// newState := oldSate + 2
// return oldSate, newState
// }
// even := BuildLazyIntEvaluator(evenFunc, 0)
// for i := 0; i < 10; i++ {
// fmt.Printf("%vth even: %v\n", i, even())
// }
// }
// func BuildLazyEvaluator(evalFunc EvalFunc, initState Any) func() Any {
// retValChan := make(chan Any)
// loopFunc := func() {
// var actState Any = initState
// var retVal Any
// for {
// retVal, actState = evalFunc(actState)
// retValChan <- retVal
// }
// }
// retFunc := func() Any {
// return <-retValChan
// }
// go loopFunc()
// return retFunc
// }
// func BuildLazyIntEvaluator(evalFunc EvalFunc, initState Any) func() int {
// evalFn := BuildLazyEvaluator(evalFunc, initState)
// return func() int {
// return evalFn().(int)
// }
// }
// type Request struct {
// a, b int
// replyChan chan int // reply channel inside the Request
// }
// type binOp func(a, b int) int
// func run(op binOp, req *Request) {
// req.replyChan <- op(req.a, req.b)
// }
// func server(op binOp, service chan *Request, quitChan chan bool) {
// for {
// select {
// case req := <-service:
// go run(op, req)
// case <-quitChan:
// return
// }
// }
// }
// func startServer(op binOp) (service chan *Request, quitChan chan bool) {
// service = make(chan *Request)
// quitChan = make(chan bool)
// go server(op, service, quitChan)
// return service, quitChan
// }
// func main() {
// adder, quitChan := startServer(func(a, b int) int { return a + b })
// const N = 100
// var reqs [N]Request
// for i := 0; i < N; i++ {
// req := &reqs[i]
// req.a = i
// req.b = i + N
// req.replyChan = make(chan int)
// adder <- req
// }
// // checks:
// for i := N - 1; i >= 0; i-- { // doesn’t matter what order
// if <-reqs[i].replyChan != N+2*i {
// fmt.Println("fail at", i)
// } else {
// fmt.Println("Request ", i, "is ok!")
// }
// }
// quitChan <- true
// fmt.Println("done")
// }
// type Person struct {
// Name string
// salary float64
// chF chan func()
// }
// func NewPerson(name string, salary float64) *Person {
// p := &Person{name, salary, make(chan func())}
// go p.backend()
// return p
// }
// func (p *Person) backend() {
// for f := range p.chF {
// f()
// }
// }
// // Set salary.
// func (p *Person) SetSalary(sal float64) {
// p.chF <- func() { p.salary = sal }
// }
// // Retrieve salary.
// func (p *Person) Salary() float64 {
// fChan := make(chan float64)
// p.chF <- func() { fChan <- p.salary }
// return <-fChan
// }
// func (p *Person) String() string {
// return "Person - name is: " + p.Name + " - salary is: " + strconv.FormatFloat(p.Salary(), 'f', 2, 64)
// }
// func main() {
// bs := NewPerson("<NAME>", 2500.5)
// fmt.Println(bs)
// bs.SetSalary(4000.25)
// fmt.Println("Salary changed:")
// fmt.Println(bs)
// }
// var values = [5]int{10, 11, 12, 13, 14}
// func main() {
// // version A:
// fmt.Println("\nVersion A:")
// for ix := range values { // ix is the index
// func() {
// fmt.Print(ix, " ")
// }() // call closure, prints each index
// }
// fmt.Println()
// // version B: same as A, but call closure as a goroutine
// fmt.Println("\nVersion B:")
// for ix := range values {
// go func() {
// fmt.Print(ix, " ")
// }()
// }
// fmt.Println()
// time.Sleep(5e9)
// // version C: the right way
// fmt.Println("\n\nVersion C:")
// for ix := range values {
// go func(ix interface{}) {
// fmt.Print(ix, " ")
// }(ix)
// }
// fmt.Println()
// time.Sleep(5e9)
// // version D: print out the values:
// fmt.Println("\n\nVersion D:")
// for ix := range values {
// val := values[ix]
// go func() {
// fmt.Print(val, " ")
// }()
// }
// time.Sleep(1e9)
// }
// func badCall() {
// panic("bad end")
// }
// func test() {
// defer func() {
// if e := recover(); e != nil {
// fmt.Printf("Panicking %s\r\n", e)
// }
// }()
// badCall()
// fmt.Printf("After bad call\r\n")
// }
// func main() {
// fmt.Printf("Calling test\r\n")
// test()
// fmt.Printf("Test completed\r\n")
// }
// func main() {
// // 1) os.StartProcess //
// /*********************/
// /* Linux: */
// env := os.Environ()
// procAttr := &os.ProcAttr{
// Env: env,
// Files: []*os.File{
// os.Stdin,
// os.Stdout,
// os.Stderr,
// },
// }
// pid, err := os.StartProcess("/bin/ls", []string{"ls", "-l"}, procAttr)
// if err != nil {
// fmt.Printf("Error %v starting process!", err) //
// os.Exit(1)
// }
// fmt.Printf("The process id is %v", pid)
// /* Output:
// The process id is &{21275 0 0 {{0 0} 0 0 0 0}}The process id is &{21276 0 0 {{0 0} 0 0 0 0}}total 54
// -rwxrwxrwx 1 root root 250 Sep 21 19:33 csv_data.txt
// -rwxrwxrwx 1 root root 25227 Oct 4 23:34 hello.go
// -rwxrwxrwx 1 root root 6708 Sep 21 10:25 hello.go.txt
// -rwxrwxrwx 1 root root 130 Sep 21 11:08 output.txt
// -rwxrwxrwx 1 root root 8898 Sep 21 12:10 target_hello.txt
// -rwxrwxrwx 1 root root 1619 Sep 22 14:40 urlshorten.go.txt
// -rwxrwxrwx 1 root root 182 Sep 21 13:50 vcard.json
// */
// // 2nd example: show all processes
// pid, err = os.StartProcess("/bin/ps", []string{"-e", "opid,ppid,comm"}, procAttr)
// if err != nil {
// fmt.Printf("Error %v starting process!", err) //
// os.Exit(1)
// }
// fmt.Printf("The process id is %v", pid)
// // 2) cmd.Run //
// /***************/
// cmd := exec.Command("gedit") // this opens a gedit-window
// err = cmd.Run()
// if err != nil {
// fmt.Printf("Error %v executing command!", err)
// os.Exit(1)
// }
// fmt.Printf("The command is %v", cmd)
// }
// const (
// AvailableMemory = 10 << 20 // 10 MB, for example
// AverageAverageMemoryPerRequest = 10 << 10 // 10 KB
// MAXREQS = AvailableMemory / AverageAverageMemoryPerRequest // here amounts to 1000
// )
// var sem = make(chan int, MAXREQS)
// type Request struct {
// a, b int
// replyc chan int
// }
// func process(r *Request) {
// // Do something
// // May take a long time and use a lot of memory or CPU
// }
// func handle(r *Request) {
// process(r)
// // signal done: enable next request to start
// // by making 1 empty place in the buffer
// <-sem
// }
// func Server(queue chan *Request) {
// for {
// sem <- 1
// // blocks when channel is full (1000 requests are active)
// // so wait here until there is capacity to process a request
// // (doesn’t matter what we put in it)
// request := <-queue
// go handle(request)
// }
// }
// func main() {
// fmt.Println(" AvailableMemory ", AvailableMemory)
// fmt.Println(" AverageAverageMemoryPerRequest ", AverageAverageMemoryPerRequest)
// queue := make(chan *Request)
// go Server(queue)
// }
// const NCPU = 4
// func DoAll() {
// sem := make(chan int, NCPU) // Buffering optional but sensible.
// for i := 0; i < NCPU; i++ {
// go DoPart(sem)
// }
// // Drain the channel sem, waiting for NCPU tasks to complete
// for i := 0; i < NCPU; i++ {
// <-sem // wait for one task to complete
// }
// // All done.
// }
// func DoPart(sem chan int) {
// // do the part of the computation
// sem <- 1 // signal that this piece is done
// }
// func main() {
// runtime.GOMAXPROCS(NCPU)
// DoAll()
// }
// type Person struct {
// Name string
// salary float64
// chF chan func()
// }
// func NewPerson(name string, salary float64) *Person {
// p := &Person{name, salary, make(chan func())}
// go p.backend()
// return p
// }
// func (p *Person) backend() {
// for f := range p.chF {
// f()
// }
// }
// // Set salary.
// func (p *Person) SetSalary(sal float64) {
// p.chF <- func() { p.salary = sal }
// }
// // Retrieve salary.
// func (p *Person) Salary() float64 {
// fChan := make(chan float64)
// p.chF <- func() { fChan <- p.salary }
// return <-fChan
// }
// func (p *Person) String() string {
// return "Person - name is: " + p.Name + " - salary is: " + strconv.
// FormatFloat(p.Salary(), 'f', 2, 64)
// }
// func main() {
// bs := NewPerson("<NAME>", 2500.5)
// fmt.Println(bs)
// bs.SetSalary(4000.25)
// fmt.Println("Salary changed:")
// fmt.Println(bs)
// }
// /* Output Person - name is: <NAME> - salary is: 2500.50
// Salary changed:
// Person - name is: <NAME> - salary is: 4000.25 *
// */
// func main() {
// fmt.Println("sync", testing.Benchmark(BenchmarkChannelSync).String())
// fmt.Println("buffered", testing.Benchmark(BenchmarkChannelBuffered).String())
// }
// func BenchmarkChannelSync(b *testing.B) {
// ch := make(chan int)
// go func() {
// for i := 0; i < b.N; i++ {
// ch <- i
// }
// close(ch)
// }()
// for _ = range ch {
// }
// }
// func BenchmarkChannelBuffered(b *testing.B) {
// ch := make(chan int, 128)
// go func() {
// for i := 0; i < b.N; i++ {
// ch <- i
// }
// close(ch)
// }()
// for _ = range ch {
// }
// }
// /* Output:
// Windows: N Time 1 op Operations per sec
// sync 1000000 2443 ns/op --> 409 332 / s
// buffered 1000000 4850 ns/op --> 810 477 / s
// Linux:
// */
func main() {
file, err := os.Open("csv_data.txt")
if err != nil {
fmt.Printf("An error occurred on opening the inputfile\n" +
"Does the file exist?\n" +
"Have you got acces to it?\n")
return
}
defer file.Close()
cat(file)
// iReader := bufio.NewReader(file)
// for {
// str, err := iReader.ReadString('\n')
// if err != nil {
// return // error or EOF
// }
// fmt.Printf("The input was: %s", str)
// }
}
func cat(f *os.File) {
const NBUF = 512
var buf [NBUF]byte
for {
switch nr, er := f.Read(buf[:]); true {
case nr < 0:
fmt.Fprintf(os.Stderr, "cat: error reading from %s: %s\n", f, er)
os.Exit(1)
case nr == 0: // EOF
return
case nr > 0:
if nw, ew := os.Stdout.Write(buf[0:nr]); nw != nr {
fmt.Fprintf(os.Stderr, "cat: error writing from %s: %s\n", f, ew)
}
}
}
}
<file_sep>+++
title = "AWS: VPC - 3"
description = "VPC Peering, Direct Connect, Transit Gateway"
weight=5
+++
## VPC Part 3
### Endpoint
A VPC endpoint enables you to privately connect your VPC to supported AWS services and VPC endpoint services powered by AWS PrivateLink without requiring an internet gateway, NAT device, VPN connection, or AWS Direct Connect connection.
Endpoints are virtual devices. They are horizontally scaled, redundant, and highly available VPC components. They allow communication between instances in your VPC and services without imposing availability risks or bandwidth constraints on your network traffic.
* Endpoint service — Your own application in your VPC. Other AWS principals can create a connection from their VPC to your endpoint service
* Gateway endpoint — A gateway endpoint is a gateway that you specify as a target for a route in your route table for traffic destined to a supported AWS service.
> Amazon S3
> DynamoDB
* Interface endpoint — An interface endpoint is an elastic network interface with a private IP address from the IP address range of your subnet that serves as an entry point for traffic destined to a supported service.
#### Limits of Interface Endpoint
* For each interface endpoint, you can choose only one subnet per Availability Zone.
* Interface endpoints support the use of policies for services that support endpoint policies.
* An interface endpoint supports TCP traffic only.
* Endpoints are supported within the same Region only.
* Endpoints support IPv4 traffic only.
#### Limits of Gateway Endpoint
* You cannot use a prefix list ID in an outbound rule in a network ACL to allow or deny outbound traffic to the service specified in an endpoint. If your network ACL rules restrict traffic, you must specify the CIDR block (IP address range) for the service instead.
* Endpoints are supported within the same Region only.
* Endpoints support IPv4 traffic only.
* Cannot transfer an endpoint from one VPC to another,
* Endpoint connections cannot be extended out of a VPC.
* Must enable DNS resolution in your VPC, or if you're using your own DNS server, ensure that DNS requests to the required service (such as Amazon S3) are resolved correctly to the IP addresses maintained by AWS.
### Endpoint Service
#### Steps to the connections
* Create a Network Load Balancer for your application in your VPC and configure it for each subnet (Availability Zone) in which the service should be available.
* Create a VPC endpoint service configuration and specify your Network Load Balancer.
* Grant permissions to specific service consumers (AWS accounts, IAM users, and IAM roles) to create a connection to your endpoint service.
* A service consumer that has been granted permissions creates an interface endpoint to your service, optionally in each Availability Zone in which you configured your service.
* To activate the connection, accept the interface endpoint connection request. By default, connection requests must be manually accepted. However, you can configure the acceptance settings for your endpoint service so that any connection requests are automatically accepted.
* To help achieve high availability for service consumers that use zonal DNS hostnames to access the service, you can enable cross-zone load balancing. Cross-zone load balancing enables the load balancer to distribute traffic across the registered targets in all enabled Availability Zones.
#### Endpoint Service DNS Names
AWS generates endpoint-specific DNS hostnames that you can use to communicate with the service. These names include the VPC endpoint ID, the Availability Zone name and Region Name, for example, vpce-1234-abcdev-us-east-1.vpce-svc-123345.us-east-1.vpce.amazonaws.com. By default, your consumers access the service with that DNS name and usually need to modify the application configuration.
#### Endpoint Service Limitations
* An endpoint service supports IPv4 traffic over TCP only.
* Service consumers can use the endpoint-specific DNS hostnames to access the endpoint service, or the private DNS name.
* If an endpoint service is associated with multiple Network Load Balancers, then for a specific Availability Zone, an interface endpoint establishes a connection with *one load balancer only.
* For the endpoint service, the associated Network Load Balancer can support 55,000 simultaneous connections or about 55,000 connections per minute to each unique *target (IP address and port).
* Availability Zones in your account might not map to the same locations as Availability Zones in another account.
* Review the service-specific limits for your endpoint service.
#### VPC Endpoint Policies
A VPC endpoint policy is an IAM resource policy that you attach to an endpoint when you create or modify the endpoint. If you do not attach a policy when you create an endpoint, AWS attaches a default policy for you that allows full access to the service.
### VPC Peering
A VPC peering connection is a networking connection between two VPCs that enables you to route traffic between them privately. Instances in either VPC can communicate with each other as if they are within the same network. You can create a VPC peering connection between your own VPCs, with a VPC in another AWS account, or with a VPC in a different AWS Region.
AWS uses the existing infrastructure of a VPC to create a VPC peering connection; it is neither a gateway nor an AWS Site-to-Site VPN connection, and does not rely on a separate piece of physical hardware. There is no single point of failure for communication or a bandwidth bottleneck.
### VPN Connections
VPN connectivity option | Description
----- |:-----------
AWS Site-to-Site VPN |You can create an IPsec VPN connection between your VPC and your remote network. On the AWS side of the Site-to-Site VPN connection, a virtual private gateway provides two VPN endpoints (tunnels) for automatic failover. You configure your customer gateway on the remote side of the Site-to-Site VPN connection. For more information, see the AWS Site-to-Site VPN User Guide, and the AWS Site-to-Site VPN Network Administrator Guide.
AWS Client VPN | AWS Client VPN is a managed client-based VPN service that enables you to securely access your AWS resources in your on-premises network. With AWS Client VPN, you configure an endpoint to which your users can connect to establish a secure TLS VPN session. This enables clients to access resources in AWS or an on-premises from any location using an OpenVPN-based VPN client. For more information, see the AWS Client VPN User Guide.
AWS VPN CloudHub | If you have more than one remote network (for example, multiple branch offices), you can create multiple AWS Site-to-Site VPN connections via your virtual private gateway to enable communication between these networks. For more information, see Providing Secure Communication Between Sites Using VPN CloudHub in the AWS Site-to-Site VPN User Guide.
Third party software | VPN appliance You can create a VPN connection to your remote network by using an Amazon EC2 instance in your VPC that's running a third party software VPN appliance. AWS does not provide or maintain third party software VPN appliances; however, you can choose from a range of products provided by partners and open source communities. Find third party software VPN appliances on the AWS Marketplace. <file_sep>+++
date = "2016-12-07T16:56:21+11:00"
title = "Vue 2 CRM Project"
description="Vue2Crm is a reusable Vue.js CRM starter project for real-world business based on Vue 2 PWA template"
+++
## Summary
**Vue2Crm** is a reusable Vue.js CRM starter project for real-world business based on Vue 2 PWA template with Vuetify.
The goal of this project is to create a reusable project for real-world business. To achieve this target, we need a solution which includes authentication process, restful API feature and simple but elegant UI design.
## __Features__
* This starter project is built-in with Vue 2 PWA from scratch.
* The whole UI is built on the Vuetify
* It includes Vuex, Axios as well.
* It uses Json-Server as fake Restful API. (You can simple replace it with your own API)
## Structure of Vue2Crm
``` ini
path\to\vue2crm
+---build <-// webpack files
+---config <-// configuration of dev or prod environment
+---db <-// json files for json-server
| +---db.json <-// dummy db
| \---routes.json <-// configure fake restful api
+---screenshots
+---src <-// vue components
| +---components
| | +---404.vue
| | +---About.vue
| | +---Customers.vue
| | +---Customer.vue
| | +---Orders.vue
| | +---Order.vue
| | +---Login.vue
| | \---...
| +---router <-// vue-router
| +---utils
| | +---auth.js <-// auth service
| | +---backend-api.js <-// Axios instance
| | +---store.js <-// Vuex
| \---stylus <-// Customize stylus
+---static <-// css, fonts, image files
| +---img
| \---manifest.json <-// PWA manifest file
\---test
+---e2e
\---unit
```
## Screenshots
> 
> 
> 
> 
## Browse [Repository](https://github.com/harryho/vue2crm.git)
## __Alternatives__
There are two similar projects respectively built on the Angular and React. If you have interests in those technical stacks. You can find those projects below.
* [Angular4 Crm](/projects/angular4-crm).
* [React Redux Crm](/projects/react-crm).
<file_sep>+++
title="Good practice - 3"
description="Good practice advice - Part 3"
weight=22
+++
### Error
* How to stop a program when an error occurs:
```go
if err != nil {
fmt.Printf("Program stopping with error %v", err)
os.Exit(1)
}
// OR :
if err != nil {
panic("ERROR occurred: " + err.Error())
}
```
### Performance best practices and advice
* Use the initializing declaration form := wherever possible (in functions).
* Use bytes if possible instead of strings
* Use slices instead of arrays.
* Use arrays or slices instead of a map where possible (see ref. 15)
* Use for range over a slice if you only need the value and not the index; this is slightly faster than having to do a slice lookup for every element.
* When the array is sparse (containing many 0 or nil-values), using a map can result in lower memory consumption.
* Specify an initial capacity for maps.
* When defining methods: use a pointer to a type (struct) as a receiver.
* Use constants or flags to extract constant values from the code.
* Use caching whenever possible when large amounts of memory are being allocated.
* Use template caching
<file_sep>+++
title = "Struct & Interface"
description="Golang Introduction: Struct, Method, Interface & Reflection"
weight=3
+++
### Struct
> Go supports user-defined or custom types in the form of alias types or structs. A struct tries to represent a real-world entity with its properties. Structs are composite types, to use when you want to define a type which consist of a number of properties, each having their own type and value, grouping pieces of data together.
#### Struct with tags
* A field in a struct can, apart from a name and a type, also optionally have a tag: this is a string attached to the field, which could be documentation or some other important label. The tag-content cannot be used in normal programming, only the package reflect can access it.
* Sample
```go
package main
import (
"fmt"
"reflect"
)
type TagType struct { //tags
field1 bool "An important answer"
field2 string "The name of the thing"
field3 int "How much there are"
}
func main() {
tt := TagType{true, "Barak Obama", 1}
for i := 0; i < 3; i++ {
refTag(tt, i)
}
}
func refTag(tt TagType, ix int) {
ttType := reflect.TypeOf(tt)
ixField := ttType.Field(ix)
fmt.Printf("%v\n", ixField.Tag)
}
```
#### Anonymous fields and embedded structs
* It can only have one anonymous field of each data type in a struct.
* The inner struct is simply inserted or “embedded” into the outer. This simple ‘inheritance’ mechanism provides a way to derive some or all of your implementation from another type or types.
```go
type innerS struct {
in1 int
in2 int
}
type outerS struct {
b int
c float32
int // anonymous field
innerS // anonymous field
}
func Func(){
outer2 := outerS{6, 7.5, 60, innerS{5, 10}}
fmt.Println("output : ", outer2) // output: {6 7.5 60 {5 10}}
}
```
#### Conflicting names
* The rules when there are two fields with the same name
* An outer name hides an inner name. This provides a way to override a field or method.
* If the same name appears twice at the same level, it is an error if the name is used by the program
### Method
* method is a function that acts on variable of a certain type, called the receiver
* The receiver type can be (almost) anything, not only a struct type: any type can have methods, even a function type or alias types for int, bool, string or array. The receiver also cannot be an interface type.
* sample
```go
type List []int
func (l List) Len() int { return len(l) }
func (l *List) Append(val int) { *l = append(*l, val) }
```
* Receiver is most often a pointer to the receiver_type for performance reasons.
#### Embedded methods
* There are basically 2 ways for doing this:
* __Aggregation__ (or composition): include a named field of the type of the wanted functionality
* __Embedding__: Embed (anonymously) the type of the wanted functionality, like demonstrated
* The aggregated type requires a method to return the pointer. Mostly the method will looks like the type name, as a constructor method
* The embedded type does not need to a pointer
#### Multiple inheritance
```go
type Phone struct {}
func (*Phone) Call() {
return "Ring Ring !!! "
}
type Camera struct {}
func (*Camera) TakePhoto() {
return "Take a photo !!!"
}
type CameraPhone struct {
Phone
Camera
}
func main () {
cp := new(CameraPhone)
fmt.Println( cp.Call())
fmt.Println( cp.TakePhoto())
}
```
### Interface
* Go contains the very flexible concept of interfaces, with which a lot of aspects of object-orientation can be made available. Interfaces in Go provide a way to specify the behavior of an object: if something can do this, then it can be used here.
* Sample of interface syntax
```go
type Namer interface {
Method1(param_list) return_type
Method2(param_list) return_type
...
}
```
* Type switch
```go
type Shape interface {
Area() float32
}
type Square struct {
side float32
}
type Circle struct {
radius float32
}
func (sq *Square) Area() float32 {
return sq.side * sq.side
}
func (c *Circle) Area() float32 {
return c.radius * c.radius * math.Pi
}
type Rectangle struct {
length, width float32
}
func (r Rectangle) Area() float32 {
return r.length * r.width
}
func main() {
shapes := []Shape{Rectangle{5, 3}, &Square{5}, &Circle{5}}
fmt.Println("Looping through shapes for area ...")
for n, _ := range shapes {
fmt.Println("Shape details: ", shapes[n])
fmt.Println("Area of this shape is: ", shapes[n].Area())
}
var shape Shape
shape = &Square{4}
switch t := shape.(type) {
case *Square:
fmt.Printf("Type Square %T with value %v\n", t, t)
case *Circle:
fmt.Printf("Type Circle %T with value %v\n", t, t)
case *Rectangle:
fmt.Printf("Type Rectangle %T with value %v\n", t, t)
case nil:
fmt.Println("nil value: nothing to check?")
default:
fmt.Printf("Unexpected type %T", t)
}
}
// Looping through shapes for area ...
// Shape details: {5 3}
// Area of this shape is: 15
// Shape details: &{5}
// Area of this shape is: 25
// Shape details: &{5}
// Area of this shape is: 78.53982
// Type Square *main.Square with value &{4}
```
#### Empty Interface
* The empty or minimal interface has no methods and so doesn’t make any demands at all. So any variable, any type implements it (not only reference types as Object in Java/C#), and any or Any (Sample code below) is really a good name as alias and abbreviation!
```go
type Any interface{}
```
* Type classifier
```go
func classifier(items ...interface{}) {
for i, x := range items {
switch x.(type) {
case bool:
fmt.Printf("param #%d is a bool\n", i)
case float64:
fmt.Printf("param #%d is a float64\n", i)
case int, int64:
fmt.Printf("param #%d is an int\n", i)
case nil:
fmt.Printf("param #%d is nil\n", i)
case string:
fmt.Printf("param #%d is a string\n", i)
default:
fmt.Printf("param #%d’s type is unknown\n", i)
}
}
}
```
#### Interface to interface
* An interface value can also be assigned to another interface value, as long as the underlying value implements the necessary methods.
* This conversion is checked at runtime, and when it fails a runtime error occurs: this is one of the dynamic aspects of Go, comparable to dynamic languages like Ruby and Python.
### Reflection
#### Methods and types in reflect
* Reflection in computing is the ability of a program to examine its own structure, particularly through the types; it’s a form of metaprogramming.
* Two simple functions, reflect.TypeOf and reflect.ValueOf, retrieve Type and Value pieces out of any value.
#### Modify a value through reflection
* Pass the address instead of copy of value
* Use the Elem() function work on it which indirects through the pointer
```go
func modifyValByReflect() {
var x float64 = 3.4
v := reflect.ValueOf(x) // Pass value
fmt.Println("Settability of v:", v.CanSet()) // false
v = reflect.ValueOf(&x) // Note: take the address of x.
fmt.Println("type of v:", v.Type()) // float64
fmt.Println("Settability of v:", v.CanSet()) // false
v = v.Elem()
fmt.Println("The Elem of v is: ", v) // <float64 Value>
fmt.Println("Settability of v:", v.CanSet()) // true
v.SetFloat(3.1415) // this works!
fmt.Println(v.Interface())
fmt.Println(v) // <float64 Value>
}
```
#### Dynamic typing
* A refactoring pattern that is very useful is extracting interfaces, reducing thereby the number of types and methods needed, without having the need to manage a whole class-hierarchy as in more traditional class-based OO-languages.
* Go is the only one which combines interface values, static type checking (does a type implement the interface?), dynamic runtime conversion and no requirement for explicitly declaring that a type satisfies an interface. This property also allows interfaces to be defined and used without having to modify existing code.
### OO of Go
* Encapsulation (data hiding): in contrast to other OO languages where there are 4 or more access-levels, Go simplifies this to only 2:
* package scope: ‘object’ is only known in its own package, how? it starts with a lowercase letter
* exported: ‘object’ is visible outside of its package, how? it starts with an uppercase letter A type can only have methods defined in its own package.
* Inheritance: how? composition: embedding of 1 (or more) type(s) with the desired behavior (fields and methods); multiple inheritance is possible through embedding multiple types
* Polymorphism: how? interfaces: a variable of a type can be assigned to a variable of any interface it implements. Types and interfaces are loosely coupled, again multiple inheritance is possible through implementing multiple interfaces. Go’s interfaces aren’t a variant on Java or C# interfaces, they’re much more: they are independent and are key to large-scale programming and adaptable, evolutionary design.
<file_sep>+++
title = "Hello World"
draft=false
+++
<!-- >*Hello World* is the classic starter project for any programmer. -->
>*After I tried [go hugo](https://gohugo.io/) a few years ago, I found it is a no-brainer WordPress alternative tool to manage my site with github. Honestly, I am not a passionate blogger. My blog is just a personal notes or gallery of projects and experiences, which comes from work, hobby and individual projects. Github is definitely the developer's favor instead of mainstream blogger, so I won't suggest everyone to dump your WordPress. Comparing to markdown and command line, WordPress's UI is much more user friendly to most people. If you want to give a shot on your own, you can check out my note - [Create a blog on GitHub Pages](/blogs/create-a-blog-on-github), it shows you how I did it on my machine and make it available on your github page.*
>*The `Hello World` home page comes from my old site. I am too lazy to write a new one. It has been updated for few times by adding some new programming languages and it is not written for developer.*
```bash
Dutch : "Hello wereld",
English : "Hello world",
French : "Bonjour monde",
German : "Hallo Welt",
Greek : "γειά σου κόσμος",
Italian : "Ciao mondo",
Japanese : "こんにちは世界",
Korean : "여보세요 세계",
Mandarin : "你好世界",
Portuguese : "Olá mundo",
Russian : "Здравствулте мир",
Spanish : "Hola mundo"
```
The different languages above express the same meaning "Hello World". Basically most software engineers start writing "Hello World" application from the very beginning with different programming language. I hope after you read through this page, you will find programming is not rock science and everyone can code for fun.
As I mentioned before, here is for beginner to have fun to code sth, so I just pick some language which is easy to start or popular you might already heard or convenient for people to try it on your own. The languages below are simply ordered by alphabet.
### Assumptions
* You have a proper computer instead of a tablet or ipad. Actually you can write some code to create an app directly on your smart phone or tablet after installing some development apps. *AIDE*, *DroidScript* and *QPython* are such applications you can try if you want to play around.
* If you use Mac, the code for Linux should work on Mac as well. You know how to start a terminal on Mac or a command prompt on Windows. Finally, you are ready to get your hands dirty.
### Online Programming Editor
I list some online programming editor and compiler here, then you can simply try without installing any framework or tool on your PC or laptop.
* [repl.it](https://repl.it/languages )
* [tutorialspoint codingground](https://www.tutorialspoint.com/codingground.htm)
### Bash/Batch script
`Bash, aka shell, is built-in script on Unix/Linux-like operating system. Bash script file is end with `sh` as extension. Batch is built-in script on Windows operating system. Bash script is known as one of Unix shell scripts. The other shell scrips include ksh, csh, zsh, etc. Bash is one of most important and powerful tool for system admin.
`Batch` script file is end with `bat` as extension. On Windows there is another file end with `cmd`, it works the same as batch file. From November 2006, Microsoft create a new powerful language `PowerShell`, which is similar to Unix shells. Basically `PowerShell` has replaced `Batch` as first option for system admin.
* Unix/Linux
* Create a script file `hello.sh` with vi or nano
```bash
#!/bin/bash
var="Hello World!"
echo $var
```
* Change mode `chmod 755 hello.sh`
* Run the script `./hello.sh`
* Batch/Cmd (Windows)
* Create a script file hello.bat or hello.cmd with any editor
```batch
@echo off
set var='Hello World!'
echo.%var%
```
* Run the script `hello.bat` or `hello.cmd`
### C/C++
The C programming language was originally developed as a language to replace assembler in systems programming. It was very successful, making system code portable and easier to write and read. So Basically the kernel of most operating systems, Windows, Mac, Linux are coded in C.
Today C is one of the most used programming languages. Since C was designed to replace assembler language, and that in several important ways, it retains a very low level view of the machine. The C++ programming language was designed as a higher level version of C, providing support for object-oriented programming. It gives developer more power to handle the problem of real world.
* Create a program file `hello.c ` or `hello.cpp` with vi or any editor
```cpp
#include<stdio.h>
int main( )
{
char var[] = "World";
printf( "Hello %s \n Press any key to exit.", var );
char key = getchar();
return 0;
}
```
* Unix/Linux includes `gcc` by default. You just need to compile and run the console app. For Windows, you might need install another tool [cygwin](https://cygwin.com/install.html) or [MinGW](http://www.mingw.org/wiki/MinGW_for_First_Time_Users_HOWTO)
```
g++ hello.c -o hello
./hello
```
### C\#
C# is a multi-paradigm programming language encompassing strong typing, imperative, declarative, functional, generic, object-oriented (class-based), and component-oriented programming disciplines. In January 1999, <NAME> formed a team to build a new language at the time called Cool, which stood for "C-like Object Oriented Language". By the time the .NET project was publicly announced at the July 2000,Microsoft the language had been renamed C#.
* Download and install .net framework from Microsoft website
* Create a program file helloworld.cs
```cs
public class Hello
{
public static void Main()
{
System.Console.WriteLine("Hello, World!");
}
}
```
* Compile with .net framework command.
* Run helloworld.exe file
```bash
C:\Windows\Microsoft.NET\Framework\v3.5\csc.exe helloworld.cs
helloworld.exe
```
### Go
Go ,as known as golang, is a free and open source programming language created at Google. It is a compiled, statically typed language in the tradition of Algol and C, with garbage collection, expressive, concise, clean, and efficient. Its concurrency mechanisms make it easy to write programs that get the most out of multicore and networked machines, while its novel type system enables flexible and modular program construction.
* Download Go binary from www.golang.org
* Follow the instruction to [install](https://golang.org/doc/install) golang on your computer
* Create a program file hello.go
```go
package main
import "fmt"
func main() {
fmt.Println("hello world")
}
```
* Compile & run `go run hello.go`
### Java
Java is a general-purpose computer programming language that is concurrent, object-oriented, and specifically designed to have as few implementation dependencies as possible. It is intended to let application developers "write once, run anywhere", meaning that compiled Java code can run on all platforms with JVM without the need for recompilation.
* Download and install [Java JDK 8](http://www.oracle.com/technetwork/java/javase/downloads/index.html)
* For any Ubuntu 12 or higher version I recommand you follow this [instruction](http://www.webupd8.org/2012/09/install-oracle-java-8-in-ubuntu-via-ppa.html). It is pretty simple.
* For CentOS 6 or higher version, please follow this [instruction](https://wiki.centos.org/HowTos/JavaRuntimeEnvironment)
* For Windows please make sure you click the [JDK Download](http://www.oracle.com/technetwork/java/javase/downloads/index.html) button. The installer file end with `exe` is best option for beginner, instead of the zip file, because you don't need to setup **PATH** system environment by yourself.
* Create a program file HelloWorld.java
```java
public class HelloWorld {
public static void main(String[] args) {
System.out.println("Hello, World");
}
}
```
* Compile & run
```
javac HelloWorld.java
java HelloWorld
```
### Javascript
JavaScript, not to be confused with Java, was created in 10 days in May 1995 by <NAME>, then working at Netscape and now of Mozilla. The original name of this language was Mocha, in September of 1995 it was changed to LiveScript, then in December of the same year, the name JavaScript was adopted, because of very popular Java around then.
JavaScript is the programming language of the web. It's one of the most popular and in demand skills in today's job market for good reason. As a web developer, it is essential that you have a solid understanding of this versatile language.
* The sample will be presented as web page.
* Create a program file HelloWorld.html
```html
<!doctype html>
<html>
<head>
<script>
function helloWorld() {
document.write("Hello World");
}
helloWorld();
</script>
</head>
</html>
```
* Open file HelloWorld.html with your favorite browser.
### PHP
PHP as it's known today is actually the successor to a product named PHP/FI. Created in 1994 by <NAME>, the very first incarnation of PHP was a simple set of Common Gateway Interface (CGI) binaries written in the C programming language. Originally used for tracking visits to his online resume, he named the suite of scripts "Personal Home Page Tools," more frequently referenced as "PHP Tools." Over time, more functionality was desired, and Rasmus rewrote PHP Tools, producing a much larger and richer implementation.
* Download and install
**Linux**
Please find the install command [here](/blog/ubuntu-server-14/#install-php-compser)
**Windows**
* Download the file [PHP 5.x here](http://php.net/downloads.php)
* Pick the Thread safe zip file, download extra it to \path\to\php_folder.
* Update the PATH environment variable with your php directory
* Create a python script hello.py with any editor
```php
<?php
echo "Hello World!"
?>
```
* Run the script `php hello.php`
### Python
Python is currently one of the most popular dynamic programming languages, along with Perl, Tcl, PHP, and newcomer Ruby. Although it is often viewed as a "scripting" language, it is really a general purpose programming language along the lines of Lisp or Smalltalk (as are the others, by the way). Today, Python is used for everything from throw-away scripts to large scalable web servers that provide uninterrupted service 24x7. It is used for GUI and database programming, client- and server-side web programming, and application testing. It is used by scientists writing applications for the world's fastest supercomputers and by children first learning to program.
* Download and install [python 3.x](https://www.python.org/downloads/)
* Create a python script hello.py with any editor
```python
print "Hello World!"
```
* Run the script
python hello.py
### Rust
Rust is a systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety. Rust programming language is fundamentally about empowerment: no matter what kind of code you are writing now, Rust empowers you to reach farther, to program with confidence in a wider variety of domains than you did before.
* Download and install rust [rust 1.x](https://www.rust-lang.org/en-US/install.html)
* Create a project
`cargo new helloworld`
* Copy the content below into the rust file main.rs with any editor
```rust
fn main() {
println!("Hello World !")
}
```
* Run with rust app with cargo
cargo run
<file_sep>#!/bin/bash
echo $1
PROFILE=$1
domains=$(aws route53 list-hosted-zones \
--profile ${PROFILE} \
--out text | awk '{print $4}' )
# echo $domains
for domain in "${domains[@]}"; do
printf "${domain/"\.\s"/"\\t\\n"}"
done;
<file_sep>+++
date = "2013-09-06T16:43:12+11:00"
title = "Do you have potential to be a programmer?"
hidden=true
draft=true
+++
<!--### Do you have potential to be a programmer?-->
>*For many novices or beginners of software engineering or computer programming, they always question themselves like this "Do I have potential to be a programmer?" or "Should I choose programmer as my career?" or "Is it programmer right for me?". I am pretty sure you can find tons of answers or advices online, but there is a simple answer for such question for any career. If you love what you do, then it is right for you. It brings another question, how do I know if I love it or not. Not everyone has chance to try different jobs before they are qualified.*
>*There is a test created for this purpose. It is designed for people who have not enough programming skills but they want to know if they have such potential to be a programmer.*
>*The test needs you to create a few "Hello World" programs as the samples below with different programming languages. The programming languages I pick can run on Windows, Mac or Linux/Unix. You should try to complete the test on your own. There is no time limit.*
>*How much time you take to complete these programs or how many programs you can complete is not the purpose of this test, but you should try your best to complete as much as possible. I have to say it is not an easy task for most novices or beginners. Even you can't complete all programs, it doesn't mean you can't be a great programmer. I believe everyone can learn or do anything if they love or enjoy it.*
__So the test is to check if you really enjoy the process or problem-solving when you face to such challenges. If you always can learn something after you solve problem or you keep improving your skills during the test, it means you really love it and you can consider programmer as one of your future careers.__
__If you complete the test within 2 hours to one day from scratch, it means you already have capacity to be a programmer. It means the test is not designed for you.__
### Assumptions
* You have a proper computer instead of a tablet or ipad.
* You know how to download and install softwares on your computer.
* You know how to organize your folders and files.
* If you use Mac, the code for Linux should work on Mac as well. You know how to start a terminal on Mac or a command prompt on Windows. Finally, you are ready to get your hands dirty.
### Bash/Batch script
`Bash` is built-in script on Unix/Linux-like operating system. Bash script file is end with `sh` as extension. Batch is built-in script on Windows operating system. Bash script is known as one of Unix shell scripts. The other shell scrips include ksh, csh, zsh, etc. Bash is one of most important and powerful tool for system admin.
`Batch` script file is end with `bat` as extension. On Windows there is another file end with `cmd`, it works the same as batch file. From November 2006, Microsoft create a new powerful language `PowerShell`, which is similar to Unix shells. Basically `PowerShell` has replaced `Batch` as first option for system admin.
* Unix/Linux
* Create a script file `hello.sh` with vi or nano
```bash
#!/bin/bash
var="Hello World!"
echo $var
```
* Change mode `chmod 755 hello.sh`
* Run the script `./hello.sh`
* Batch/Cmd (Windows)
* Create a script file hello.bat or hello.cmd with notepad
```dos
@echo off
set var='Hello World!'
echo.%var%
```
* Run the script `hello.bat` or `hello.cmd`
### C/C++
The C programming language was originally developed as a language to replace assembler in systems programming. It was very successful, making system code portable and easier to write and read. So Basically the kernel of most operating systems, Windows, Mac, Linux are coded in C.
Today C is one of the most used programming languages. Since C was designed to replace assembler language, and that in several important ways, it retains a very low level view of the machine. The C++ programming language was designed as a higher level version of C, providing support for object-oriented programming. It gives developer more power to handle the problem of real world.
## Windows
* Download and install C/C++ development and compiler. [Instructions](http://www.cprogramming.com/code_blocks/)
* Create a program file `hello.c ` or `hello.cpp` with codeblocks, vi or notepad
```cpp
#include<stdio.h>
int main( )
{
char var[] = "World";
printf( "Hello %s \n Press any key to exit.", var );
char key = getchar();
return 0;
}
```
## Unix/Linux
Unix/Linux includes `gcc` by default. You just need to compile and run the console app. For Windows, you might need install another tool [cygwin](https://cygwin.com/install.html) or [MinGW](http://www.mingw.org/wiki/MinGW_for_First_Time_Users_HOWTO)
```
g++ hello.c -o hello
./hello
```
### C\# ##
C# is a multi-paradigm programming language encompassing strong typing, imperative, declarative, functional, generic, object-oriented (class-based), and component-oriented programming disciplines. In January 1999, <NAME> formed a team to build a new language at the time called Cool, which stood for "C-like Object Oriented Language". By the time the .NET project was publicly announced at the July 2000,Microsoft the language had been renamed C#.
## Windows
* Download and install .net framework from Microsoft website
* Create a program file helloworld.cs
```cs
public class Hello
{
public static void Main()
{
System.Console.WriteLine("Hello, World!");
}
}
```
* Compile with .net framework command.
* Run helloworld.exe file
```bash
C:\Windows\Microsoft.NET\Framework\v3.5\csc.exe helloworld.cs
helloworld.exe
```
## Linux/Mac
### Mono
* Download and install mono framework and IDE for Linux or Mac from [here](http://www.mono-project.com/download/)
* Create a program file helloworld.cs as above in the IDE and run
### .Net Core
* Download and install .Net Core framework for Linux or mac from [here](https://www.microsoft.com/net/download/core)
* Follow the instructions to create a "Hello World" app
### Go
Go ,as known as golang, is a free and open source programming language created at Google. It is a compiled, statically typed language in the tradition of Algol and C, with garbage collection, expressive, concise, clean, and efficient. Its concurrency mechanisms make it easy to write programs that get the most out of multicore and networked machines, while its novel type system enables flexible and modular program construction.
* Download Go binary from www.golang.org
* Follow the instruction to [install](https://golang.org/doc/install) golang on your computer
* Create a program file hello.go
```go
package main
import "fmt"
func main() {
fmt.Println("hello world")
}
```
* Compile & run `go run hello.go`
### Java
Java is a general-purpose computer programming language that is concurrent, object-oriented, and specifically designed to have as few implementation dependencies as possible. It is intended to let application developers "write once, run anywhere", meaning that compiled Java code can run on all platforms with JVM without the need for recompilation.
* Download and install [Java JDK 8](http://www.oracle.com/technetwork/java/javase/downloads/index.html)
* For any Ubuntu 12 or higher version I recommand you follow this [instruction](http://www.webupd8.org/2012/09/install-oracle-java-8-in-ubuntu-via-ppa.html). It is pretty simple.
* For CentOS 6 or higher version, please follow this [instruction](https://wiki.centos.org/HowTos/JavaRuntimeEnvironment)
* For Windows please make sure you click the [JDK Download](http://www.oracle.com/technetwork/java/javase/downloads/index.html) button. The installer file end with `exe` is best option for beginner, instead of the zip file, because you don't need to setup **PATH** system environment by yourself.
* Create a program file HelloWorld.java
```java
public class HelloWorld {
public static void main(String[] args) {
System.out.println("Hello, World");
}
}
```
* Compile & run
```
javac HelloWorld.java
java HelloWorld
```
### JavaScript
JavaScript, not to be confused with Java, was created in 10 days in May 1995 by <NAME>, then working at Netscape and now of Mozilla. The original name of this language was Mocha, in September of 1995 it was changed to LiveScript, then in December of the same year, the name JavaScript was adopted, because of very popular Java around then.
JavaScript is the programming language of the web. It's one of the most popular and in demand skills in today's job market for good reason. As a web developer, it is essential that you have a solid understanding of this versatile language.
* The sample will be presented as web page.
* Create a program file HelloWorld.html
```html
<!doctype html>
<html>
<head>
<script>
function helloWorld() {
document.write("Hello World");
}
helloWorld();
</script>
</head>
</html>
```
* Open file HelloWorld.html with your browser.
### PHP
PHP as it's known today is actually the successor to a product named PHP/FI. Created in 1994 by <NAME>, the very first incarnation of PHP was a simple set of Common Gateway Interface (CGI) binaries written in the C programming language. Originally used for tracking visits to his online resume, he named the suite of scripts "Personal Home Page Tools," more frequently referenced as "PHP Tools." Over time, more functionality was desired, and Rasmus rewrote PHP Tools, producing a much larger and richer implementation.
* Download and install
**Linux**
Please find the install command [here](/os/ubuntu-server-14/#install-php-compser)
**Windows**
* Download the file [PHP 5.x here](http://php.net/downloads.php)
* Pick the Thread safe zip file, download extra it to \path\to\php_folder.
* Update the PATH environment variable with your php directory
* Create a python script hello.py with notepad
```php
<?php
echo "Hello World!"
?>
```
* Run the script `php hello.php`
### Python
Python is currently one of the most popular dynamic programming languages, along with Perl, Tcl, PHP, and newcomer Ruby. Although it is often viewed as a "scripting" language, it is really a general purpose programming language along the lines of Lisp or Smalltalk (as are the others, by the way). Today, Python is used for everything from throw-away scripts to large scalable web servers that provide uninterrupted service 24x7. It is used for GUI and database programming, client- and server-side web programming, and application testing. It is used by scientists writing applications for the world's fastest supercomputers and by children first learning to program.
* Download and install [python 3.x](https://www.python.org/downloads/)
* Create a python script hello.py with any file editor
```python
print "Hello World!"
```
* Run the script `python hello.py`
### Ruby
Ruby is a dynamic, reflective, object-oriented, general-purpose programming language. Ruby was influenced by Perl, Smalltalk, Eiffel, Ada, and Lisp. It supports multiple programming paradigms, including functional, object-oriented, and imperative. It also has a dynamic type system and automatic memory management.
* Download and install proper [Ruby](https://www.ruby-lang.org/en/documentation/installation/)
* Create a ruby program hello.rb with with any file editor
```ruby
print "Hello World!"
```
* Run the program `ruby hello.rb`
<file_sep>// var a = "abc"
// console.log(a);
// this.a = "123";
// console.log(a);
// console.log(this);
// foo = 'abc';
// alert(foo); // abc
// this.foo = 'def';
// alert(foo); //
// console.log( " 1==true : ", 1==true)
// console.log( " ''==true : ", ''==true)
// console.log( " '1'==true : ", '1'==true)
// console.log( " \"\"==true : ", "1"==true)
// console.log( " {}==true : ", [{}]==true)
// console.log( " []==true : ", ['1']==true)
// console.log( " 0==false : ", 0==false)
// console.log( " -1==false : ", -1==false)
// console.log( " ''==false : ", ''==false)
// console.log( " '1'==false : ", '1'==false)
// console.log( " \"\"==false : ", ""==false)
// console.log( " {}==false : ", {}==false)
// console.log( " []==false : ", []==false)
// console.log( " null==false : ", null==false)
// console.log( " undefined==false : ", undefined==false)
// console.log( " undefined==null : ", null==undefined)
// var obj = {
// _p: '_f',
// _f: function() {
// console.log(' f -> ', this);
// // console.log(this.__proto__.constructor.name);
// _ff = {
// // console.log(' ff ', this);
// _p: '_ff',
// _fff: function() {
// this.p = '123';
// console.log(' fff -> ', this);
// // console.log(this.__proto__.constructor.name);
// }
// };
// _ff._fff();
// }
// };
// obj._f();
// console.log(' -------------------------------------- ');
// var obj2 = {
// _p: 123,
// _f: () => {
// console.log(' f -> ', this);
// // console.log(this.__proto__.constructor.name);
// _ff = {
// _p: '_ff',
// _fff: () => {
// this.p = '123';
// console.log(' fff -> ', this);
// // console.log(this.__proto__.constructor.name);
// }
// };
// _ff._fff();
// }
// };
// obj2._f();
//////////////////////////////////////////
// var obj3 = {
// p: 'obj3',
// toBeCalled: function() {
// console.log(' this is toBeCalled ', this.p);
// },
// toBind: function(obj) {
// obj.toBeCalled();
// // console.log(' this is toBind ', obj);
// }
// };
// var testBind = obj3.toBind;
// testBind(obj3);
// var obj4 = {
// p: 'obj4',
// toBeCalled: () => {
// console.log(' this is toBeCalled ', this.p);
// },
// toBind: obj => {
// obj.toBeCalled();
// // console.log(' this is toBind ', obj);
// }
// };
// var testBind2 = obj4.toBind;
// testBind2(obj4);
<file_sep>+++
title = "PowerShell 7"
description = "Empower Windows with modern PowerShell"
+++
__This article will show you how to empower your Windows OS with modern PowerShell & other convenient tools.__
## PowerShell
> PowerShell is a cross-platform task automation solution made up of a command-line shell, a scripting language, and a configuration management framework. PowerShell runs on Windows, Linux, and macOS.
### PowerShell 7
#### Differences between PS 5 and PS 7
Windows PowerShell 5.1 is built on top of the .NET Framework v4.5. With the release of PowerShell 6.0, PowerShell became an open source project built on .NET Core 2.0. PowerShell 7.0 is built on .NET Core 3.1. And, with the release of PowerShell 7.2, PowerShell will be built on .NET 6.0. Moving from the .NET Framework to .NET Core allowed PowerShell to become a cross-platform solution. PowerShell runs on Windows, macOS, and Linux.
There are few differences in the PowerShell language between Windows PowerShell and PowerShell. The differences are most notable in the availability and behavior of PowerShell cmdlets between Windows and non-Windows platforms and the changes that stem from the differences between the .NET Framework and .NET Core.
#### Installation
Install latest PowerShell (version 7.x) from Windows Store
### Windows Terminal
> The Windows Terminal is a modern, fast, efficient, powerful, and productive terminal application for users of command-line tools and shells like Command Prompt, PowerShell, and WSL. Its main features include multiple tabs, panes, Unicode and UTF-8 character support, a GPU accelerated text rendering engine, and custom themes, styles, and configurations.
#### Install Nerd font
- [Nerd Font](https://www.nerdfonts.com/font-downloads)
- Download Meslo Nerd Font
- Install font file
#### Installation
This is an open source project and we welcome community participation. To participate please visit https://github.com/microsoft/terminal
#### Update Setting
- Settings > Startups
- Profile -> PowerShell ( Default is Windows PowerShell )
- Launch mode -> Maximised
- Update Setting file
- Hide the proflle Windows PowerShell & move the PowerShell to the top
```json
"list": [
{
"guid": "{574e775e-4f2a-5b96-ac1e-a2962a402336}",
"hidden": false,
"name": "PowerShell",
"source": "Windows.Terminal.PowershellCore"
},
...
{
"commandline": "powershell.exe",
"font":
{
"face": "MesloLGM NF"
},
"guid": "{61c54bbd-c2c6-5271-96e7-009a87ff44bf}",
"hidden": true,
"name": "Windows PowerShell"
}
```
- Create a customzed color scheme
```json
{
"background": "#001D26",
"black": "#282C34",
"blue": "#61AFEF",
"brightBlack": "#9aabc5",
"brightBlue": "#61AFEF",
"brightCyan": "#56B6C2",
"brightGreen": "#98C379",
"brightPurple": "#C678DD",
"brightRed": "#E06C75",
"brightWhite": "#DCDFE4",
"brightYellow": "#E5C07B",
"cursorColor": "#FFFFFF",
"cyan": "#56B6C2",
"foreground": "#DCDFE4",
"green": "#98C379",
"name": "One Half Dark (mod)",
"purple": "#C678DD",
"red": "#E06C75",
"selectionBackground": "#FFFFFF",
"white": "#DCDFE4",
"yellow": "#E5C07B"
}
```
- Settings > Default > Appearence
- Color scheme : One Half Dark (mod)
- Font face: MesloLGM NF
- Acrylic opacity: 60%
### Git
```
winget install -e --id Git.Git
```
### Scoop
> A command-line installer for Windows
#### Install from [Scoop](https://scoop.sh)
```powershell
Invoke-Expression (New-Object System.Net.WebClient).DownloadString('https://get.scoop.sh')
# or shorter
iwr -useb get.scoop.sh | iex
```
#### Other tools
```
scoop install curl sudo jq
scoop install neovim gcc
scoop install 7zip bat
```
### User Profile
- Create user profile folder
```cmd
mkdir ~\.config\powershell
```
- Create a new profile
```
nvim ~\.config\powershell\profile.ps1
```
- Add alias
```powershell
# Alias
Set-Alias vi nvim
Set-Alias ll ls
Set-Alias g git
Set-Alias grep findstr
Set-Alias tig $env:USERPROFILE\app\git\usr\bin\tig.exe
Set-Alias less $env:USERPROFILE\app\git\usr\bin\less.exe
```
- Update built-in profile
```powershell
# Get all profiles
$PROFILE | Get-Member -Type NoteProperty
nvim $PROFILE.CurrentUserCurrentHost
```
- Update $PROFILE.CurrentUserCurrentHost as below
```
. $env:USERPROFILE\.config\powershell\profile.ps1
```
### Oh My Posh
- Installation
[Please follow latest instruction](https://ohmyposh.dev/docs/migrating)
- Create customized theme file `sample.omp.json`
```
{
"$schema": "https://raw.githubusercontent.com/JanDeDobbeleer/oh-my-posh/main/themes/schema.json",
"final_space": false,
"osc99": true,
"blocks": [
{
"type": "prompt",
"alignment": "left",
"segments": [
{
"type": "shell",
"style": "diamond",
"leading_diamond": "╭─",
"trailing_diamond": "",
"foreground": "#ffffff",
"background": "#0077c2",
"properties": {
}
},
{
"type": "root",
"style": "diamond",
"leading_diamond": "",
"trailing_diamond": "",
"foreground": "#FFFB38",
"background": "#ef5350",
"properties": {
"root_icon": "\uf292",
"prefix": "<parentBackground>\uE0B0</> "
}
},
{
"type": "path",
"style": "powerline",
"powerline_symbol": "\uE0B0",
"foreground": "#E4E4E4",
"background": "#444444",
"properties": {
"style": "full",
"enable_hyperlink": true
}
},
{
"type": "git",
"style": "powerline",
"powerline_symbol": "\uE0B0",
"foreground": "#011627",
"background": "#FFFB38",
"background_templates": [
"{{ if or (.Working.Changed) (.Staging.Changed) }}#ffeb95{{ end }}",
"{{ if and (gt .Ahead 0) (gt .Behind 0) }}#c5e478{{ end }}",
"{{ if gt .Ahead 0 }}#C792EA{{ end }}",
"{{ if gt .Behind 0 }}#C792EA{{ end }}"
],
"properties": {
"branch_icon": "\ue725 ",
"fetch_status": true,
"fetch_upstream_icon": true,
"template": "{{ .HEAD }} {{ if .Working.Changed }}{{ .Working.String }}{{ end }}{{ if and (.Working.Changed) (.Staging.Changed) }} |{{ end }}{{ if .Staging.Changed }}<#ef5350> \uF046 {{ .Staging.String }}</>{{ end }}"
}
}
]
},
{
"type": "prompt",
"alignment": "right",
"segments": [
{
"type": "node",
"style": "diamond",
"leading_diamond": " \uE0B6",
"trailing_diamond": "\uE0B4",
"foreground": "#3C873A",
"background": "#303030",
"properties": {
"prefix": "\uE718 ",
"postfix": "",
"display_package_manager": true,
"yarn_icon": " <#348cba></>",
"npm_icon": " <#cc3a3a></> "
}
},
{
"type": "time",
"style": "diamond",
"invert_powerline": true,
"leading_diamond": " \uE0B6",
"trailing_diamond": "\uE0B4",
"background": "#40c4ff",
"foreground": "#ffffff",
"properties": {
"prefix": " \uf5ef ",
"postfix": " "
}
}
]
},
{
"type": "prompt",
"alignment": "left",
"newline": true,
"segments": [
{
"type": "text",
"style": "plain",
"foreground": "#21c7c7",
"properties": {
"prefix": "",
"postfix": "",
"text": "╰─"
}
},
{
"type": "exit",
"style": "plain",
"foreground": "#e0f8ff",
"properties": {
"prefix": "\u276F",
"display_exit_code": false,
"always_enabled": true,
"error_color": "#ef5350"
}
}
]
}
]
}
```
- Update user profile
```powershell
# Prompt
Import-Module posh-git
$omp_config = "$env:USERPROFILE\sample.omp.json"
oh-my-posh --init --shell pwsh --config $omp_config | Invoke-Expression
```
### Posh-Git
```
Install-Module posh-git
```
### Terminal Icons
- Installation
```powershell
Install-Module -Name Terminal-Icons -Repository PSGallery -Force
```
- Update user profile
```powershell
Import-Module -Name Terminal-Icons
```
### Z
```
Install-Module -Name z --Respository SGallery -Force
```
### PSReadLine
- Installation
```powershell
Install-Module -Name PSReadLine -AllowPrerelease -Scope CurrentUser -Force -SkipPublisherCheck
```
- Update user profile
```powershell
# PSReadLine
Set-PSReadLineOption -EditMode Emacs
Set-PSReadLineOption -BellStyle None
Set-PSReadLineKeyHandler -Chord 'Ctrl+d' -Function DeleteChar
Set-PSReadLineOption -PredictionSource History
```
### FZF
- Installation
```powershell
scoop install fzf
Install-Module -Name PSFzf -Scope CurrentUser -Force
```
- Update user profile
```powershell
# Fzf
Import-Module PSFzf
Set-PsFzfOption -PSReadlineChordProvider 'Ctrl+f' -PSReadlineChordReverseHistory 'Ctrl+r'
```
<file_sep>+++
title = "Azure: RBAC - 1"
weight = 1
description="Introduction of RBAC"
+++
## RBAC
Azure RBAC is an authorization system built on Azure Resource Manager that provides fine-grained access management of Azure resources.
### What to do with RBAC
* Allow one user to manage virtual machines in a subscription and another user to manage virtual networks
* Allow a DBA group to manage SQL databases in a subscription
* Allow a user to manage all resources in a resource group, such as virtual machines, websites, and subnets
* Allow an application to access all resources in a resource group
### How it works
The way you control access to resources using Azure RBAC is to create role assignments. This is a key concept to understand – it's how permissions are enforced. A role assignment consists of three elements: security principal, role definition, and scope.
#### Security principal
A security principal is an object that represents a user, group, service principal, or managed identity that is requesting access to Azure resources.
* User - An individual who has a profile in Azure Active Directory.
* Group - A set of users created in Azure Active Directory.
* Service principal - A security identity used by applications or services to access specific Azure resources.
* Managed identity - An identity in Azure Active Directory that is automatically managed by Azure.
#### Role definition
A role definition is a collection of permissions. It's typically just called a role. A role definition lists the operations that can be performed, such as read, write, and delete.
Built-in roles:
* Owner - Has full access to all resources including the right to delegate access to others.
* Contributor - Can create and manage all types of Azure resources but can't grant access to others.
* Reader - Can view existing Azure resources.
* User Access Administrator - Lets you manage user access to Azure resources.
#### Scope
Scope is the set of resources that the access applies to. When you assign a role, you can further limit the actions allowed by defining a scope.
Scopes are structured in a parent-child relationship:management group, subscription, resource group, or resource.
{{<mermaid>}}
graph TB
MG(Management Group)
S1(Subscrition)
S2(Subscrition)
RG1(Resource Group)
RG2(Resource Group)
R1(Resource)
R2(Resource)
MG --- S1
MG --- S2
S2 --- RG1
S2 --- RG2
RG1 --- R1
RG1 --- R2
{{</mermaid >}}
#### Role assignments
A role assignment is the process of attaching a role definition to a user, group, service principal, or managed identity at a particular scope for the purpose of granting access. Access is granted by creating a role assignment, and access is revoked by removing a role assignment.
{{<mermaid>}}
stateDiagram
SP:Security Principal
RA:Role_Assignment
S:Scope
RD:Role_Definition
S --> RA
SP --> RA
RD --> RA
{{</mermaid >}}
#### Multiple role assignments
Azure RBAC is an additive model, so your effective permissions are the sum of your role assignments. The sum of the Contributor permissions and the Reader permissions is effectively the Contributor role for the resource group. Therefore, in this case, the Reader role assignment has no impact.
#### Deny assignments
Previously, Azure RBAC was an allow-only model with no deny, but now Azure RBAC supports deny assignments in a limited way. Similar to a role assignment, a deny assignment attaches a set of deny actions to a user, group, service principal, or managed identity at a particular scope for the purpose of denying access.
A role assignment defines a set of actions that are allowed, while a deny assignment defines a set of actions that are not allowed. In other words, deny assignments block users from performing specified actions even if a role assignment grants them access.
<file_sep>+++
title = "String & Representation"
description="String & Representation"
weight=3
+++
## String & Representation
### str()
* `print()` -> `str()` -> `__str(self)__`
* Fallback to `repr()`. By default, `str()` simply calls `repr()`
* Produces a readable, human-friendly representation of an object
* It is also the string constructor
### repr()
* Exactness is more important than human-friendliness
* Suited for debugging. Unambiguous, precise, include type
* Includes identifying information.
* Generally best for logging and developers
* The default repr() is not very helpful
* As a rule, you should always write a repr() for your classes
* standard library `reprlib.repr()` is a replacement for `repr()`
* Example
```python
>>> l = ['a'] * 1000
>>> import reprlib
>>> reprlib.repr(l)
"['a', 'a', 'a', 'a', 'a', 'a', ...]"
```
### format
* `"{:f}".format(obj)` -> `__format__(self, f)`
* Fallback to `str()`
### built-in functions
* `ascii()` replaces non-ASCII characters with escape sequences
* `chr()` converts an integer Unicode codepoint to a single character string
* `ord()` converts a single character to its integer Unicode codepoint
## Numeric & scalar types
### int
* unlimited precision signed integer
* bool in an int
```python
>>> False - True
-1
>>> False - False
0
>>> True - False
1
>>> True - True
0
```
### float
* IEEE-754 double precision (64-bit)
* 53 bits of binary precision
* 15 to 17 bits of decimal precision
* Floating-point numbers are represented in computer hardware as base 2 (binary) fractions.
* 0.001 has value 0/2 + 0/4 + 1/8. These two fractions have identical values, the only real difference being that the first is written in base 10 fractional notation, and the second in base 2
* Unfortunately, most decimal fractions cannot be represented exactly as binary fractions. A consequence is that, in general, the decimal floating-point numbers you enter are only approximated by the binary floating-point numbers actually stored in the machine.
* Example of float data
```python
>>> f=0.9-0.8
>>> f
0.09999999999999998
>>> f=0.2-0.1
>>> f
0.1
>>> f=0.8-0.7
>>> f
0.10000000000000009
>>> float(2**53)
9007199254740992.0
>>> float(2**53+1)
9007199254740992.0
>>> float(2**53+2)
9007199254740994.0
>>> float(2**53+3)
9007199254740996.0
>>> float(2**53+4)
9007199254740996.0
```
### decimal
* standard library module decimal containing the class Decimal
* decimal floating point configurable (although finite) precision defaults to 28 digits of decimal precision
* identity is preserved. `x == (x // y) * y + x % y`, so integer division and modulus are consistent
* Example
```python
>>> from decimal import Decimal
>>> Decimal(0.6)-Decimal(0.5)
Decimal('0.09999999999999997779553950750')
>>> Decimal('0.6')-Decimal('0.5')
Decimal('0.1')
>>> Decimal(0.2)-Decimal(0.1)
Decimal('0.1000000000000000055511151231')
>>> Decimal(0.8)-Decimal(0.7)
Decimal('0.1000000000000000888178419700')
>>> Decimal('0.8')-Decimal('0.7')
Decimal('0.1')
## Change the precise
>>> import decimal
>>> decimal.getcontext().prec=4
>>> Decimal(-7) / Decimal(3)
Decimal('-2.333')
## Python handle different type in different way with %
>>> Decimal(-7) % Decimal(3)
Decimal('-1') ##
>>> Decimal(-7) // Decimal(3)
Decimal('-2') ## The next multiple of 3 towards zero is -6
>>> (-7) // (3)
-3 ## The largest multiple of 3 less than -7 is -9
>>> (-7) % (3)
2 ## The
```
### fraction
* standard library module fractions containing the class Fraction for rational numbers
* Denominator cannot be zero. e.g. 2 / 3, 2 is numerator, 3 is denominator.
* Example
```python
>>>from fractions import Fraction
>>>f = Fraction("2/3")
>>>f
Faction(2,3)
>>> Fraction(0.2)
Fraction(3602879701896397, 18014398509481984)
>>> Fraction(0.5)
Fraction(1, 2)
>>> Fraction(Decimal('0.3'))
Fraction(3, 10)
>>> Fraction(Decimal('0.3')) // Fraction(6, 7)
0
>>> Fraction(Decimal('0.3')) % Fraction(6, 7)
Fraction(3, 10)
>>> Fraction(Decimal('0.3')) - Fraction(6, 7)
Fraction(-39, 70)
>>> Fraction(Decimal('0.3')) + Fraction(6, 7)
Fraction(81, 70)
>>> Fraction(Decimal('0.3')) * Fraction(6, 7)
Fraction(9, 35)
>>> from math import floor, ceil
>>> ceil(Fraction('8/7'))
2
>>> floor(Fraction('8/7'))
1
```
### number base conversions
bin() | oct() | hex() | int(x, base)
------|-------|----------|------
base 2 | base 8 | base 16 | bases 2 to 36
* Example
```python
>>> 0b0101
5
>>> 0o63527
26455
>>> 0o63
51
>>> 0xad2
2770
>>> hex(22)
'0x16'
>>> oct(22)
'0o26'
>>> bin(22)
'0b10110'
```
### complex
* complex construction string argument may have parentheses but must not contain spaces
* `cmath` standard Library module contains complex equivalents of math
* Example
```python
>>> 1+1j
(1+1j)
>>> type(1+1j)
<class 'complex'>
>>> (1+1j) + (1-1j)
(2+0j)
>>> (1+1j) + (2-2j)
(3-1j)
>>> (1+1j) - (2-2j)
(-1+3j)
>>> complex('-2+1j')
(-2+1j)
>>> complex('(-2+1j)')
(-2+1j)
>>> complex(-2, 1)
(-2+1j)
```
### date & time
---
* Gregorian calendar
* `weekday()`
0 Monday
1 Tuesday
2 Wednesday
3 Thursday
4 Friday
5 Saturday
6 Sunday
* `isoweekday()`
1 Monday
2 Tuesday
3 Wednesday
4 Thursday
5 Friday
6 Saturday
7 Sunday
* timedelta
* Constructor accepts and sums
• days
• seconds
• microseconds
• milliseconds
• minutes
• hours
• weeks
* Instances store only
• days
• seconds
• microseconds
* Example
```python
from datetime import (date, time)
from datetime import datetime as Datetime
from datetime import timedelta
from datetime import (tzinfo, timezone)
### date
>>> datetime.date(2000,month=2, day=10)
datetime.date(2000, 2, 10)
>>> datetime.date.today()
datetime.date(2014, 3, 14)
>>> datetime.date.fromtimestamp(99999999)
datetime.date(1973, 3, 3)
>>> datetime.date.fromordinal(9999)
datetime.date(28, 5, 17)
>>> datetime.date.max
datetime.date(9999, 12, 31)
>>> datetime.date.min
datetime.date(1, 1, 1)
>>> datetime.date.today().weekday()
4
>>> datetime.date.today().isoweekday()
5
>>> d = datetime.date.fromtimestamp(99999999)
>>> d.strftime('%A %d %B %b')
'Saturday 03 March Mar'
>>> d.strftime('%A %d %B %b %Y')
'Saturday 03 March Mar 1973'
>>> "The date is {:%A %d %B %b %y}".format(d)
'The date is Saturday 03 March Mar 73'
>>> "{date:%A} {date.day} {date:%B} {date:%Y}".format(date=d)
'Saturday 3 March 1973'
### time
>>> t=datetime.time(23,59,1,7451)
>>> t.isoformat()
'23:59:01.007451'
>>> t.strftime('%Hh%Mm%Ss')
'23h59m01s'
>>> datetime.time.max
datetime.time(23, 59, 59, 999999)
>>> datetime.time.min
datetime.time(0, 0)
### datetime
>>> datetime.datetime(2001,6,7,8,15,25,895)
datetime.datetime(2001, 6, 7, 8, 15, 25, 895)
>>> dt= datetime.datetime(2001,6,7,8,15,25,895)
>>> dt.isoformat()
'2001-06-07T08:15:25.000895'
### timedelta
>>> td = datetime.timedelta(weeks=2, days=1, hours=1, minutes=1, microseconds=2, milliseconds= 1)
>>> td
datetime.timedelta(15, 3660, 1002)
```<file_sep>+++
title = "F# Overview"
description = "F# Overview"
weight = 1
+++
## F`#`
> F# is an open-source, cross-platform, interoperable programming language for writing succinct, robust and performant code. Your focus remains on your problem domain, rather than the details of programming.
### Organizing F# Code
The following table shows reference articles related to organizing your F# code.
Title | Description
----------|------------
Namespaces | Learn about namespace support in F#. A namespace lets you organize code into areas of related functionality by enabling you to attach a name to a grouping of program elements.
Modules | Learn about modules. An F# module is like a namespace and can also include values and functions. Grouping code in modules helps keep related code together and helps avoid name conflicts in your program.
open Declarations | Learn about how open works. An open declaration specifies a module, namespace, or type whose elements you can reference without using a fully qualified name.
Signatures | Learn about signatures and signature files. A signature file contains information about the public signatures of a set of F# program elements, such as types, namespaces, and modules. It can be used to specify the accessibility of these program elements.
Access Control | Learn about access control in F#. Access control means declaring what clients are able to use certain program elements, such as types, methods, functions, and so on.
XML Documentation | Learn about support for generating documentation files for XML doc comments, also known as triple slash comments. You can produce documentation from code comments in F# as in other .NET languages.
### Literals and Strings
The following table shows reference articles that describe literals and strings in F#.
Title | Description
----------|------------
Literals | Learn about the syntax for literal values in F# and how to specify type information for F# literals.
Strings | Learn about strings in F#. The string type represents immutable text, as a sequence of Unicode characters. string is an alias for System.String in .NET.
Interpolated strings | Learn about interpolated strings, a special form of string that allows you to embed F# expressions directly inside them.
### Values and Functions
The following table shows reference articles that describe language concepts related to values, let-bindings, and functions.
Title | Description
----------|------------
Values | Learn about values, which are immutable quantities that have a specific type; values can be integral or floating point numbers, characters or text, lists, sequences, arrays, tuples, discriminated unions, records, class types, or function values.
Functions | Functions are the fundamental unit of program execution in any programming language. An F# function has a name, can have parameters and take arguments, and has a body. F# also supports functional programming constructs such as treating functions as values, using unnamed functions in expressions, composition of functions to form new functions, curried functions, and the implicit definition of functions by way of the partial application of function arguments.
Function Expressions | Learn how to use the F# 'fun' keyword to define a lambda expression, which is an anonymous function.
### Loops and Conditionals
The following table lists articles that describe F# loops and conditionals.
Title | Description
----------|------------
Conditional Expressions: if...then...else | Learn about the if...then...else expression, which runs different branches of code and also evaluates to a different value depending on the Boolean expression given.
Loops: for...in Expression | Learn about the for...in expression, a looping construct that is used to iterate over the matches of a pattern in an enumerable collection such as a range expression, sequence, list, array, or other construct that supports enumeration.
Loops: for...to Expression | Learn about the for...to expression, which is used to iterate in a loop over a range of values of a loop variable.
Loops: while...do Expression | Learn about the while...do expression, which is used to perform iterative execution (looping) while a specified test condition is true.
### Pattern Matching
The following table shows reference articles that describe language concepts.
Title | Description
----------|------------
Pattern Matching | Learn about patterns, which are rules for transforming input data and are used throughout F#. You can compare data with a pattern, decompose data into constituent parts, or extract information from data in various ways.
Match Expressions | Learn about the match expression, which provides branching control that is based on the comparison of an expression with a set of patterns.
Active Patterns | Learn about active patterns. Active patterns enable you to define named partitions that subdivide input data. You can use active patterns to decompose data in a customized manner for each partition.
### Exception Handling
The following table shows reference articles that describe language concepts related to exception handling.
Title | Description
----------|------------
Exception Handling | Contains information about exception handling support in F#.
The try...with Expression | Learn about how to use the try...with expression for exception handling.
The try...finally Expression | Learn about how the F# try...finally expression enables you to execute clean-up code even if a block of code throws an exception.
The use Keyword | Learn about the keywords use and using, which can control the initialization and release of resources.
Assertions | Learn about the assert expression, which is a debugging feature that you can use to test an expression. Upon failure in Debug mode, an assertion generates a system error dialog box.
### Types and Type Inference
The following table shows reference articles that describe how types and type inference work in F#.
Title | Description
----------|------------
Types | Learn about the types that are used in F# and how F# types are named and described.
Basic Types | Learn about the fundamental types that are used in F#. It also provides the corresponding .NET types and the minimum and maximum values for each type.
Unit Type | Learn about the unit type, which is a type that indicates the absence of a specific value; the unit type has only a single value, which acts as a placeholder when no other value exists or is needed.
Type Abbreviations | Learn about type abbreviations, which are alternate names for types.
Type Inference | Learn about how the F# compiler infers the types of values, variables, parameters, and return values.
Casting and Conversions | Learn about support for type conversions in F#.
Generics | Learn about generic constructs in F#.
Automatic Generalization | Learn about how F# automatically generalizes the arguments and types of functions so that they work with multiple types when possible.
Constraints | Learn about constraints that apply to generic type parameters to specify the requirements for a type argument in a generic type or function.
Flexible Types | Learn about flexible types. A flexible type annotation is an indication that a parameter, variable, or value has a type that is compatible with type specified, where compatibility is determined by position in an object-oriented hierarchy of classes or interfaces.
Units of Measure | Learn about units of measure. Floating point values in F# can have associated units of measure, which are typically used to indicate length, volume, mass, and so on.
Byrefs | Learn about byref and byref-like types in F#, which are used for low-level programming.
### Tuples, Lists, Collections, Options
The following table shows reference articles that describe types supported by F#.
Title | Description
----------|------------
Tuples | Learn about tuples, which are groupings of unnamed but ordered values of possibly different types.
Collections | An overview of the F# functional collection types, including types for arrays, lists, sequences (seq), maps, and sets.
Lists | Learn about lists. A list in F# is an ordered, immutable series of elements all of the same type.
Options | Learn about the option type. An option in F# is used when a value may or may not exist. An option has an underlying type and may either hold a value of that type or it may not have a value.
Arrays | Learn about arrays. Arrays are fixed-size, zero-based, mutable sequences of consecutive data elements, all of the same type.
Sequences | Learn about sequences. A sequence is a logical series of elements all of one type. Individual sequence elements are only computed if necessary, so the representation may be smaller than a literal element count indicates.
Sequence Expressions | Learn about sequence expressions, which let you generate sequences of data on-demand.
Reference Cells | Learn about reference cells, which are storage locations that enable you to create mutable variables with reference semantics.
### Records and Discriminated Unions
The following table shows reference articles that describe record and discriminated union type definitions supported by F#.
Title | Description
----------|------------
Records | Learn about records. Records represent simple aggregates of named values, optionally with members.
Anonymous Records | Learn how to construct and use anonymous records, a language feature that helps with the manipulation of data.
Discriminated Unions | Learn about discriminated unions, which provide support for values that may be one of a variety of named cases, each with possibly different values and types.
Structs | Learn about structs, which are compact object types that can be more efficient than a class for types that have a small amount of data and simple behavior.
Enumerations | Enumerations are types that have a defined set of named values. You can use them in place of literals to make code more readable and maintainable.
### Object Programming
The following table shows reference articles that describe F# object programming.
Title | Description
----------|------------
Classes | Learn about classes, which are types that represent objects that can have properties, methods, and events.
Interfaces | Learn about interfaces, which specify sets of related members that other classes implement.
Abstract Classes | Learn about abstract classes, which are classes that leave some or all members unimplemented, so that implementations can be provided by derived classes.
Type Extensions | Learn about type extensions, which let you add new members to a previously defined object type.
Delegates | Learn about delegates, which represent a function call as an object.
Inheritance | Learn about inheritance, which is used to model the "is-a" relationship, or subtyping, in object-oriented programming.
Members | Learn about members of F# object types.
Parameters and Arguments | Learn about language support for defining parameters and passing arguments to functions, methods, and properties. It includes information about how to pass by reference.
Operator Overloading | Learn about how to overload arithmetic operators in a class or record type, and at the global level.
Object Expressions | Learn about object expressions, which are expressions that create new instances of a dynamically created, anonymous object type that is based on an existing base type, interface, or set of interfaces.
### Async, Tasks and Lazy
The following table lists topics that describe F# async, task and lazy expressions.
Title | Description
----------|------------
Async Expressions | Learn about async expressions, which let you write asynchronous code in a way that is very close to the way you would naturally write synchronous code.
Task Expressions | Learn about task expressions, which are an alternative way of writing asynchronous code used when interoperating with .NET code that consumes or produces .NET tasks.
Lazy Expressions | Learn about lazy expressions, which are computations that are not evaluated immediately, but are instead evaluated when the result is actually needed.
### Computation expressions and Queries
The following table lists topics that describe F# computation expressions and queries.
Title | Description
----------|------------
Computation Expressions | Learn about computation expressions in F#, which provide a convenient syntax for writing computations that can be sequenced and combined using control flow constructs and bindings. They can be used to manage data, control, and side effects in functional programs.
Query Expressions | Learn about query expressions, a language feature that implements LINQ for F# and enables you to write queries against a data source or enumerable collection.
### Attributes, Reflection, Quotations and Formatting
The following table lists articles that describe F# reflective features, including attributes, quotations, nameof, and plain text formatting.
Title | Description
----------|------------
Attributes | Learn how F# Attributes enable metadata to be applied to a programming construct.
nameof | Learn about the nameof operator, a metaprogramming feature that allows you to produce the name of any symbol in your source code.
Caller Information | Learn about how to use Caller Info Argument Attributes to obtain caller information from a method.
Source Line, File, and Path Identifiers | Learn about the identifiers __LINE__, __SOURCE_DIRECTORY__, and __SOURCE_FILE__, which are built-in values that enable you to access the source line number, directory, and file name in your code.
Code Quotations | Learn about code quotations, a language feature that enables you to generate and work with F# code expressions programmatically.
Plain Text Formatting | Learn how to use sprintf and other plain text formatting in F# applications and scripts.
### Type Providers
The following table lists articles that describe F# type providers.
Title | Description
----------|------------
Type Providers | Learn about type providers and find links to walkthroughs on using the built-in type providers to access databases and web services.
Create a Type Provider | Learn how to create your own F# type providers by examining several simple type providers that illustrate the basic concepts.
<file_sep>+++
title = "Closure & Decorator"
description ="Closure & Decorator"
weight=2
+++
## Closure & Decorator
### LEGB rules
* Local, Enclosing, Gloable, Built-in
### Local function
* Useful for specialized, one-off functions
* Aid in code organization and readability
* Similar to lambdas, but more general
* May contain multiple expressions
* May contain statements
### Closure
* Closure maintain references to objects from earlier scopes
* LEGB does not apply when making new bindings
* Usage of nonlocal
* Example
```python
def make_timer():
last_called = None
def elapsed():
nonlocal last_called
now = time.time()
if last_called is None:
last_called = now
return None
result = now - last_called
last_called = now
return result
return elapsed
if __name__ == "__main__":
mt = make_timer()
print(mt ())
print('-----------------------------')
print(mt ())
print('-----------------------------')
print(mt ())
```
* Use as function factory
* Example
```python
def raise_to(exp):
def raise_to_exp(x):
return pow(x, exp)
return raise_to_exp
if __name__ == "__main__":
square=raise_to(2)
cube= raise_to(3)
print(square(2))
print(cube(2))
## test result:
## 4
## 8
```
### Decorator
* Replace, enhance, or modify existing functions
* Does not change the original function definition
* Calling code does not need to change
* Decorator mechanism uses the modified function’s original name
* Example
* use function as decorator
```python
def escape_unicode(f):
def wrap(*args, **kwargs):
x = f(*args, **kwargs)
return ascii(x)
return wrap
@escape_unicode
def hello_greek():
return 'γειά σου κόσμος'
if __name__ == "__main__":
print(hello_greek())
## test result:
## '\u03b3\u03b5\u03b9\u03ac \u03c3\u03bf\u03c5 \u03ba\u03cc\u03c3\u03bc\u03bf\u03c2'
```
* Example: multiple decorators including function and instance
```python
class Trace:
def __init__(self):
self.enabled = True
def __call__(self, f):
@functools.wraps(f)
def wrap(*args, **kwargs):
if self.enabled:
print('Calling {}'.format(f.__name__))
return f(*args, **kwargs)
return wrap
def escape_unicode(f):
@functools.wraps(f)
def wrap(*args, **kwargs):
x = f(*args, **kwargs)
return ascii(x)
return wrap
@tracer
@escape_unicode
def hello_greek():
return 'γειά σου κόσμος'
## test result
## Calling hello_greek
## '\u03b3\u03b5\u03b9\u03ac \u03c3\u03bf\u03c5 \u03ba\u03cc\u03c3\u03bc\u03bf\u03c2'
```
* Use as validator
```python
def check_non_negative(index):
def validator(f):
def wrap(*args):
if args[index] < 0:
raise ValueError(
'Arg {} must be non-negative'.format(index)
)
return f(*args)
return wrap
return validator
@check_non_negative(1)
def create_seq(value, size):
return [value]*size
if __name__ == "__main__":
create_seq('0', -3)
## test result
....
'Arg {} must be non-negative'.format(index)
ValueError: Arg 1 must be non-negative
```
## Properties & Class
### @staticmethod
* No access needed to either class or instance objects.
* Most likely an implementation detail of the class.
* May be able to be moved to become a module-scope function
### @classmethod
* Requires access to the class object to call other class methods or the constructor
* Always use self for the first argument to instance methods.
* Always use cls for the first argument to class methods.
* Use case: use as named constructors
```python
class FileStream(object):
@classmethod
def from_file(cls, filepath, ignore_comments=False, *args, **kwargs):
with open(filepath, 'r') as fileobj:
for obj in cls(fileobj, ignore_comments, *args, **kwargs):
yield obj
@classmethod
def from_socket(cls, socket, ignore_comments=False, *args, **kwargs):
raise NotImplemented ## Placeholder until implemented
def __init__(self, iterable, ignore_comments=False, *args, **kwargs):
...
```
### @property
* Encapsulation
* Example
```python
class A
@property
def prop(self):
return self._prop
@prop.setter
def prop(self, value):
self._prop = value
```
<file_sep>+++
title = "MySql: Schema & Metadata"
description="Query schema & permission "
+++
## Information schema
> INFORMATION_SCHEMA provides access to database metadata, information about the MySQL server such as the name of a database or table, the data type of a column, or access privileges. Other terms that are sometimes used for this information are data dictionary and system catalog.
### Check out table size
```sql
SET @target_schema='THE_TARGET_SCHEMA';
SELECT
TABLE_NAME, table_rows, data_length, index_length,
round(((data_length + index_length) / 1024 / 1024 /1024),2) 'Size in GB',
round(((data_length + index_length) / 1024 / 1024 ),2) 'Size in MB'
FROM information_schema.TABLES
WHERE table_schema = @target_schema
ORDER BY data_length DESC
LIMIT 50;
```
### Check out running process
```sql
SET @target_schema='THE_TARGET_SCHEMA';
SELECT * FROM information_schema.PROCESSES
WHERE command <> 'Sleep'
AND db = target_schema
;
-- Another short cut to show all process
SHOW FULL PROCESSLIST;
```
### Get the information of stored proc or function
```sql
SET @target_schema='THE_TARGET_SCHEMA';
SELECT * FROM information_schema.ROUTINES
WHERE routine_schema = target_schema
;
```
### Optimize table after deletion
```sql
-- Query the table sorted by data free space
SELECT table_name , data_length, data_free
FROM information_schema.tables
WHERE table_schema=@target_schema
AND data_free > 0
ORDER BY data_free DESC
-- Get table names which need optimization
SELECT table_name
FROM information_schema.tables
WHERE table_schema=@target_schema
AND data_free > 0
ORDER BY data_free DESC
-- Optmize table
OPTIMIZE TABLE XXXXX
```
<file_sep>+++
title = "Digital Ocean"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "Cron Job Note - 2"
description = "Common Cron Job examples - Refresh Cassandra database"
+++
### Refresh the database (NoSQL) - Cassandra
> The sample script is used to backup the data from production database and refresh the data to staging or test database. It is not supposed to restore data because of database corruption.
#### Backup the Cassandra database nightly
* There is a keyspace named `hho_ks` in the Cassandra nodes store the production data.
* Every night the staging Cassandra server will be refreshed with production's snapshot
* This solution is not built on the incremental snapshot.
* The production and staging nodes are running within the same subnet.
* The backup script will be run nightly on production server
* Cron job setting
```bash
00 20 * * * <user> /home/<user>/bin/cass_snapshot.sh >> /home/<user>/bin/refresh.log 2>&1
```
* The script to create a snapshot: `cass_snapshot.sh`
```bash
#!/bin/bash
# the log file sits home/<user>/bin/refresh.log
# SET staging Cassandra IP
CASS_STG_IP=0.0.0.0
echo "$(date): Beginning refresh of staging Cassandra ${CASS_STG_IP}"
START=$(date +%s)
cd /data/cassandra
# Prepare a schema script of keyspace hho_ks
echo "$(date): Prepare a schema script of keyspace hho_ks"
sudo bash -c "cqlsh -e 'DESC KEYSPACE hho_ks' > hho_ks.cql"
# Remove old snapshots folder and create new snapshots folder with some sub-folders
echo "$(date): Remove old snapshots folder and create new snapshots folder with some sub-folders"
sudo rm -rf snapshots
sudo mkdir snapshots
cd snapshots
sudo mkdir -p service interval imported_file market_file meter_config
cd /data/cassandra
# Clear snapshot hho_ks
echo "$(date): Clear snapshot hho_ks"
# find /data/cassandra/data/hho_ks/ -name snapshots -type d
nodetool clearsnapshot hho_ks > /dev/null 2>&1
# find /data/cassandra/data/hho_ks/ -name snapshots -type d
# Create new snapshot (cut out the snapshot id into a variable)
echo "$(date): Create new snapshot"
SNAP_ID=$(nodetool snapshot hho_ks | cut -c 66-78)
# echo SNAP_ID=$SNAP_ID
# Copy snapshot data to new folder snapshot
# ~ 7mins
echo "$(date): Copy snapshot data to new snapshot folder"
for d in $(ls /data/cassandra/snapshots); do \
sudo cp -R /data/cassandra/data/hho_ks/$d*/snapshots/$SNAP_ID/. /data/cassandra/snapshots/$d/ ; \
done
# Create a tarball of snapshots
echo "$(date): Create a tarball of snapshots"
sudo rm -rf snapshots.tar.gz
sudo tar -zcvf snapshots.tar.gz snapshots/ > /dev/null 2>&1 # ~30mins
# Copy tarball and schema script to staging Cassandra
echo "$(date): Copy tarball and schema script to staging Cassandra ${CASS_STG_IP}"
scp snapshots.tar.gz <user>@${CASS_STG_IP}:/home/<user>/ # ~5mins
scp hho_ks.cql <user>@${CASS_STG_IP}:/home/<user>/
# Refresh snapshots on staging Cassandra
echo "$(date): SSH to staging Cassandra ${CASS_STG_IP}"
sudo ssh <user>@${CASS_STG_IP} 'bash -s' < /home/<user>/bin/cass_refresh.sh
echo "$(date): Completed refresh of staging Cassandra ${CASS_STG_IP}"
END=$(date +%s)
echo "Refresh duration: $(( $END - $START ))s"
```
* Refresh the staging Cassandra node
* The script to refresh the staging node: `cass_refresh`
```bash
#!/bin/bash
# Unzip tarball
echo "$(date): Unzip tarball"
cd
rm -rf snapshots
tar -zxvf snapshots.tar.gz > /dev/null 2>&1 # ~5mins
# Attempt to drop keyspace hho_ks
# It will likely throw a java.lang.RuntimeException
echo "$(date): Drop keyspace hho_ks"
cqlsh -e "drop keyspace hho_ks" > /dev/null 2>&1
sleep 1m
# Start/restart Cassandra service
echo "$(date): Restart Cassandra service"
sudo systemctl start cassandra.service
sudo systemctl restart cassandra.service
# Wait for 5 minutes to ensure the Cassandra service is running
echo "$(date): Wait for 5 minutes to ensure the Cassandra service is running"
sleep 5m
# Check Cassandra service status
# sudo systemctl status cassandra
# Attempt to drop keyspace hho_ks again
# It will likely complain: Cannot drop non existing keyspace 'hho_ks'
cqlsh -e "drop keyspace hho_ks" > /dev/null 2>&1
sleep 1m
# Remove data folder hho_ks
echo "$(date): Remove data folder hho_ks"
sudo rm -r -f /var/lib/cassandra/data/hho_ks
# Recreate keyspace hho_ks
echo "$(date): Recreate keyspace hho_ks"
cqlsh --file="hho_ks.cql" > /dev/null 2>&1
# Copy snapshots to new data folder hho_ks
# ~5mins
echo "$(date): Copy snapshots to new data folder hho_ks"
for d in $(ls snapshots); do \
sudo cp -R snapshots/$d/. /var/lib/cassandra/data/hho_ks/$d*/ ; \
done;
# Refresh keyspace
# ~10mins
echo "$(date): Refresh keyspace"
for d in $(ls snapshots); do \
nodetool refresh -- hho_ks $d ; \
done
```
<file_sep>+++
title = "DigitialOcean: Get Started"
description="User Setup, Security Update & Features"
weight=2
+++
## User Setup
### Create a new admin user
#### Add a new user
```
# Add new user
# set password
adduser <admin_user>
# user to sudo group
usermod -aG sudo <admin_user>
```
#### Set SSH access for new user
```
# Switch session to new user
su - <admin_user>
# navigate to user home
cd
# Prepare ssh directory
mkdir .ssh
chmod 700 ~/.ssh
# Copy root key
sudo cp /root/.ssh/authorized_keys ~/.ssh/authorized_keys
chmod 644 /home/<admin_user>/.ssh/authorized_keys
sudo chown -R <admin_user>:<admin_user> ~/
```
#### Login as new user via SSH
```
ssh -i ~/.ssh/<your_droplet_rsa> <admin_user>@<your_droplet_ip>
```
#### Set root password & disable SSH
```
sudo passwd
# rename key file
sudo mv /root/.ssh/authorized_keys /root/.ssh/disabled_authorized_keys
```
### Security Update
```
# Update only for security
sudo apt-get install unattended-upgrades
# Update security packages
sudo unattended-upgrade -d --dry-run
sudo unattended-upgrade -d
# Update quietly
sudo unattended-upgrade
```
### Tagging & Cloud Firewall
* Tags are custom labels you apply to Droplets that have multiple uses
* Add tags to your droplet. e.g. my-web-server
* DigitalOcean Cloud Firewalls are a free, network-based, stateful firewall service for your DigitalOcean Droplets. They block all traffic that isn’t expressly permitted by a rule. You can define the Droplets protected by a firewall individually or by using tags.
* Always setup Firewall for your droplets
* Set SSH permission for only given IP address
* Set HTTP for port 80
* Set HTTPS for port 443
## Other Features
### Floating IPs
DigitalOcean Floating IPs are publicly-accessible static IP addresses that you can assign to Droplets. A floating IP provides an additional static address you can use to access a Droplet without replacing or changing the Droplet’s original public IP address.
### Block Storage Volumes
DigitalOcean Block Storage is a flexible, convenient way of managing additional storage (in units called volumes) for your Droplets. Volumes are independent resources that you can move between Droplets within the same region. You can increase the size of a volume without powering down the Droplet it’s attached to. They’re most useful when you need more storage space but don’t need the additional processing power or memory that a larger Droplet would provide,
### Load Balancers
DigitalOcean Load Balancers are a fully-managed, highly available load balancing service. Load balancers distribute traffic to groups of Droplets, which decouples the overall health of a backend service from the health of a single server to ensure that your services stay online.
## Known Limits
* Some Droplet network traffic is restricted to help prevent malicious actions, like reflected DDoS attacks
* TCP and UDP traffic on port 11211 inbound from external networks (due to the Memcached amplification attacks in March 2018)
* Multicast traffic.
* Traffic not matching a Droplet's IP address/MAC address.
* SMTP via Floating IPs and IPv6.
* Users can create up to 100 volumes and up to a total of 16 TiB of disk space per region. You can contact our support team to request an increase. You can attach a maximum of 7 volumes to any one node or Droplet, and this limit cannot be changed.
* General Purpose plans are not yet compatible with DigitalOcean Kubernetes or Managed Databases.
* You can't create more than 10 Droplets at the same time using the control panel or the API
### Build Web Host
* Next step is to build a web host in droplet. <file_sep>+++
title = "Python"
description = "Python Notes"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "AWS"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "Java"
description = "Java Notes"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "Debug PHP with Free IDE"
description="Eclipse PDT and Netbeans for PHP development"
+++
## PDT and Netbeans
> *PDT and Netbeans are two most popluar free PHP IDEs. We choose such IDE for productivity, so code intelligence and debug are two key factors, which let us love IDE. Because both are not created for PHP development at the start, there is no built-in server to support the PHP web debug. When we want to use it to debug, we would come across some wierd problems. Here is how to prepare the IDE for PHP debug.*
## AMP package or without AMP
### Use AMP package as smart choice
* If you haven't installed install any MySQL, Apache, PHP on your computer, I will recommend you to choose AMP ( Apache, MySQL, PHP ) package first, especially when project is small, the time is so tight, your client just want you to do some minor change. In that scenario, AMP package is a much better choice. Popular AMP packages include : EasyPHP, MAMP, WAMP, XAMPP. You can pick any of them on your favor.
* Here is not going to discuss any specific AMP package. You can find more detailed instruction from their official website. If you still have problem, you can take a look how to do without the AMP package, but I won't guarantee you the solution below will work for your AMP.
### Manage your shits without AMP package
* Here we just focus on how to work with shits ( Apache, PHP ) which you download and install them piece by piece. 10 years ago, it was easier to choose which one to download and install, because there was no much option, but now there are many options which make us confused.
* The more worst and nastiest problem today is compatible issue between x86 and x64 applications in Windows. Even sometimes they are claimed compiled as x86, until you test it you will never they are really compatible. The reason is Windows has different versions of C++ redistributed compiler, if you use different compilers from Windows to compile your source code, you cannot ensure they are compatible to work together.
* To avoid this problem, it is better to make sure the package or software are compatible at the beginning. That is why the AMP package is much better and easy to do that. They help you solve such nasty problem by bundling all you need together.
* Download the compatible packages, especially Apache and PHP. When you download PHP, you need to know the PHP is compiled by VC9, VC11, or VC14, and x86 or x64. After that, you need to download proper Apache from [here](https://www.apachelounge.com/download)
## Prerequisites
* Apache path `c:\apache`. Version 2.4.x, VC11, x86
* PHP path `c:\php`, Version 5.6.x, VC11, x86
* Use `localhost:1234` as test website URL
* Project workspace path `c:\php_workspace`
* Website root path `c:\php_workspace\phpsite`, the `index.php` is under this root path
## Prepare PHP for debugging
### Download XDebug and install it
* Go to xdebug [site](xdebug.org). Use the [wizard tool](https://xdebug.org/wizard.php) to find the xdebug tool.
* Type `c:\php\php.exe -i | clip` to copy the php info to memory. And then paste the content to the input area, and click `Analyse my phpinfo() output`. It will show the correct file to download.
* Download the dll file and put it into php folder `c:\php\ext`. Update the `php.ini` file by adding the following lines at the bottom of file.
```ini
[XDebug]
zend_extension="c:/php/ext/php_xdebug-x.x.x-x.x-vcxx.dll"
xdebug.remote_enable=1
xdebug.remote_host=localhost
xdebug.remote_port=9000
xdebug.remote_autostart=1
xdebug.remote_connect_back=1
```
### Setup Apache to load PHP
* Add php module loading inside your apache configuration file.
* On the file `c:\apache\conf\httpd.conf` with nodepad and update as following setting
```apache
### Update apache root
### ServerRoot "c:/Apache24"
ServerRoot "c:/apache"
### Change origin 80 to 1234
### Listen 80
Listen 1234
### Add ServerName
ServerName localhost:1234
### Add PHP directory
PHPIniDir "C:/php"
### Add PHP module and handler
LoadModule php5_module "c:/php/php5apache2_4.dll"
AddHandler application/x-httpd-php .php
<FilesMatch \.php$>
SetHandler application/x-httpd-php
</FilesMatch>
### Change origin doc root htdocs
### DocumentRoot "c:/Apache24/htdocs"
DocumentRoot "c:/php_workspace/phpsite"
<Directory "c:/php_workspace/phpsite">
Options Indexes FollowSymLinks
AllowOverride All
Require all granted
</Directory>
```
## Debug PHP with PDT
**If you have PHP 7 installed, please choose the up to PHP 5.6.x as PHP runtime.**
* Open phpsite as PHP the project with Eclipse PDT
* Setup PHP Web Application for debugging
- Choose menu `Run` > `Debug Configurations` > `PHP Web Application`
- Add new configuration by clicking 
* Configure PHP Web Server
- Choose `Default PHP Web Server` from the dropdown list
- Click the button `configuration`, it prop up a Window dialog.
- On the tab `Server`, Set the `localhost:1234` as `Base URL`. It should be the same as ServerName in your `httd.conf`
- On the tab `Debugger`, choose `XDebug` from the dropdown list, then other setting as default.
- On the tab `Path Mapping`, add new mapping. Enter `/` as `Path on Server`, Put `c:\php_workspace\phpsite` as `Path in File system`, then leave other setting as default.
- Close the Window dialog.
- Choose the File `c:\php_workspace\phpsite\index.php` as startup page.
- If the `Auto Generated URL` is not `localhost:1234/index.php`, then manually update it.
- After all these done, you can debug your website now.
## Debug PHP with Netbeans
**If you have PHP 7 installed, please choose the up to PHP 5.6.x as PHP runtime.**
* Open phpsite as PHP the project with Netbeans.
* Configure PHP Web Server
- On the `Projects` panel, choose the project `phpsite` , right click and choose `Properties`
- Choose `Sources` within the categories. Check the PHP version is the same as your PHP version.
- Choose `Run Configurations` within the categories, and update the default configuration.
- Choose `Local Web Site` from `Run As` dropdown list.
- Set `localhost:1234` as Project URL
- Click the button `Advanced ...` to update web server
- Add a new path mapping. Enter `/` as `Path on Server`, Put `c:\php_workspace\phpsite` as `Path in File system`, then leave other setting as default.
- Leave other default setting and click button `OK`
- Now you can debug php site with Netbeans
## Use Nginx instead of Apache
* Download `RunHiddenConsole`
- Download [RunHiddenConsole](http://redmine.lighttpd.net/attachments/660/RunHiddenConsole.zip)
- Extract the file `RunHiddenConsole.exe` to folder `c:\bin\`
* Install Nginx 32 bit version.
- We assume the ngnix's path is `c:\nginx\`
* Confirm `php-cgi.exe` is within the PHP folder `c:\php`.
* Setup Nginx FastCGI with PHP
- Back the original `nginx.conf`
- Create a script to launch `nginx` and `php` in sequence.
```dos
@ECHO OFF
ECHO Start PHP FastCGI...
SET PATH=c:\php;%PATH%
c:\bin\RunHiddenConsole.exe c:\php\php-cgi.exe -b 127.0.0.1:9000
ECHO Start Nginx ...
c:\bin\RunHiddenConsole.exe c:\nginx\nginx.exe
```
- Open the `nginx.conf` via notepad
- Replace the `server` block with following setting
```nginx
server {
listen 1234;
server_name localhost;
root c:/php_workspace/phpsite;
#charset koi8-r;
### Static
location / {
index index.php;
### try_files $uri $uri/ @missing;
}
location ~ /\.ht {
deny all;
}
location ~ /\.rewrite {
deny all;
}
### PHP FastCGI
location ~ \.php$ {
root c:/php_workspace/phpsite;
### root html;
fastcgi_pass 127.0.0.1:41234;
fastcgi_index index.php;
fastcgi_param SCRIPT_FILENAME c:/php_workspace/phpsite/$fastcgi_script_name;
include fastcgi_params;
}
}
```
cat file | xclip -selection clipboard
### PHP 5.x anp PHP 7.x on Ubuntu 16
## Add repo
sudo add-apt-repository -y ppa:ondrej/php
sudo apt-get update
sudo apt-get install php5.6-fpm
sudo apt-get install
### Trouble shooting
## Update
Today I got again problem with PHP 7 running despite I have disabled php7.0 apache module: phpinfo was showing php 7 using fastCGI ...
... So if after you follow the below instructions you face this situation, you may need to disable the proxy_fcgi apache module:
sudo a2dismod proxy_fcgi proxy; sudo service apache2 restart
1. Re-Install PHP 5.6
What worked for me was this guide: http://www.lornajane.net/posts/2016/php-7-0-and-5-6-on-ubuntu
Actually is not required to remove php7.0, you can install php5.6 together ( also because you will have dependency problem with phpmyadmin package that required php7.0)
Assuming libapache2-mod-php is a suitable way to enable PHP in Apache for you, you can proceed in this way:
sudo add-apt-repository ppa:ondrej/php
sudo apt-get update
sudo apt-get install php7.0 php5.6 php5.6-mysql php-gettext php5.6-mbstring \
php-mbstring php7.0-mbstring php-xdebug libapache2-mod-php5.6 libapache2-mod-php7.0
2. Switch PHP version:
From php5.6 to php7.0:
Apache:
sudo a2dismod php5.6 ; sudo a2enmod php7.0 ; sudo service apache2 restart
CLI:
sudo update-alternatives --set php /usr/bin/php7.0
From php7.0 to php5.6:
Apache:
sudo a2dismod php7.0 ; sudo a2enmod php5.6 ; sudo service apache2 restart
CLI:
sudo update-alternatives --set php /usr/bin/php5.6
## Build xdebug for different PHP
### PHP 5.6
```
php5.6 -i | xsel --clipboard
### open url http://xdebug.org/wizard.php
### copy the content and download the correct xdebug tar ball xdebug-2.5.3.tar.gz
tar -xvf xdebug-2.5.3.tar.gz
cd xdebug-2.5.3
phpize5.6
### You will output as below
### ...
### Zend Module Api No: 20131226
### Zend Extension Api No: 220131226
./configure --with-php-config=/usr/bin/php-config5.6
make
sudo cp modules/xdebug.so /usr/lib/php/20131226
```
### Create xdebug.ini with `mods-available`
```
zend_extension="/usr/lib/php/20131226/xdebug.so"
xdebug.remote_enable=1
xdebug.remote_handler=dbgp
xdebug.remote_mode=req
xdebug.remote_host=127.0.0.1
xdebug.remote_port=9000
```
### Create symbolic links
```
sudo ln -s /etc/php/5.6/mods-available/xdebug.ini /etc/php/5.6/cli/conf.d/20-xdebug.ini
sudo ln -s /etc/php/5.6/mods-available/xdebug.ini /etc/php/5.6/fpm/conf.d/20-xdebug.ini
```
### PHP 7.0
```
php7.0 -i | xsel --clipboard
### open url http://xdebug.org/wizard.php
### copy the content and download the correct xdebug tar ball xdebug-2.5.3.tar.gz
tar -xvf xdebug-2.5.3.tar.gz
cd xdebug-2.5.3
phpize7.0
### You will output as below
### ...
### Zend Module Api No: 20151012
### Zend Extension Api No: 320151012
./configure --with-php-config=/usr/bin/php-config7.0
make
sudo cp modules/xdebug.so /usr/lib/php/20151012
```
### Create xdebug.ini with `mods-available`
```
zend_extension="/usr/lib/php/20151012/xdebug.so"
xdebug.remote_enable=1
xdebug.remote_handler=dbgp
xdebug.remote_mode=req
xdebug.remote_host=127.0.0.1
xdebug.remote_port=9000
```
### Create symbolic links
```
sudo ln -s /etc/php/7.0/mods-available/xdebug.ini /etc/php/7.0/cli/conf.d/20-xdebug.ini
sudo ln -s /etc/php/7.0/mods-available/xdebug.ini /etc/php/7.0/fpm/conf.d/20-xdebug.ini
```
<file_sep>+++
title = "AWS: SQS,SNS,SES - 2"
description = "Use Case - SQS, SNS, SES"
weight=7
+++
## Use Case
### Overview
{{<mermaid>}}
graph LR
Sender_Email("<EMAIL>")
Email_Failed
Email_Delivered
SNS_Subscriptions --> Email_Failed
SNS_Subscriptions --> Email_Delivered
Bounce_Notification --> Email_Failed
Complaint_Notification --> Email_Failed
Delivery_Notification --> Email_Delivered
subgraph SQS
subgraph Email_Status_Queue
SNS_Subscriptions
end
end
subgraph SNS
subgraph Topics
Email_Failed
Email_Delivered
end
end
subgraph SES
Sender_Email
subgraph Notifications
Bounce_Notification
Complaint_Notification
Delivery_Notification
end
end
{{</mermaid >}}
### SNS Setup
* Create a topic for failed email, e.g. bounce or spam complaint
- It is named **Email_Failed** in the diagram above
* Create a topic for delivered email
- It is named **Email_Delivered** in the diagram above
### SES Setup
* Create a new domain for sender email, e.g. <EMAIL>
* Setup the notifications
- Bounce Notification maps to **Email_Failed**
- Complaint Notification maps to **Email_Failed**
- Delivery Notification maps to **Email_Delivered**
* Verify the domain - test.com
* Verify the DKIM - *.domainkey.test.com
### SQS Setup
* Create a new queue named **Email_Status_Queue**
* Add SNS subscriptions **Email_Failed** and **Email_Delivered** to the queue
### Integration
* Sender code of sample
```java
package email.sample;
// import ....
public class SesSample {
static final String FROM = "<EMAIL>";
static final String TO = "<EMAIL>";
static final String CONFIGSET = "ConfigSet";
// The subject line for the email.
static final String SUBJECT = "SES test";
// The email body for recipients with non-HTML email clients.
static final String TEXTBODY = "This email was sent through Amazon SES "
public static void main(String[] args) throws IOException {
try {
AmazonSimpleEmailService client =
AmazonSimpleEmailServiceClientBuilder.standard()
// Replace the AWS Region
.withRegion(Regions.US_WEST_2).build();
SendEmailRequest request = new SendEmailRequest()
.withDestination(
new Destination().withToAddresses(TO))
.withMessage(new Message()
.withBody(new Body()
.withText(new Content()
.withCharset("UTF-8").withData(TEXTBODY)))
.withSubject(new Content()
.withCharset("UTF-8").withData(SUBJECT)))
.withSource(FROM);
client.sendEmail(request);
System.out.println("Email sent!");
} catch (Exception ex) {
System.out.println("Error message: " + ex.getMessage());
}
}
}
```
* Sample of SQS code
```java
// .....
public class SqsConsumer {
public void receive(Object message) throws Exception {
if (message instanceof CamelMessage) {
String body = ((CamelMessage) message).getBodyAs(String.class, camelContext());
JsonNode envelope = Json.parse(body);
if (envelope.has("Message")) {
JsonNode notification = Json.parse(envelope.get("Message").asText());
String notificationType = notification.get("notificationType").asText();
log.debug("Processing email notification: " + notificationType);
switch (notification.get("notificationType").asText()) {
case "Received":
received.tell(new EmailActorProtocol.EmailReceived(notification), self());
break;
case "Bounce":
response.tell(new EmailActorProtocol.EmailBounced(notification), self());
break;
case "Delivery":
response.tell(new EmailActorProtocol.EmailDelivered(notification), self());
break;
case "Complaint":
response.tell(new EmailActorProtocol.EmailComplaintReceived(notification), self());
break;
default:
throw new RuntimeException(String.format("Notification type %s not supported", notificationType));
}
}
}
}
}
// EmailActorProtocol
// public class EmailActorProtocol {
// public interface EmailResponse {
// }
// @Data
// static public class EmailReceived {
// private final JsonNode body;
// }
// @Data
// static public class EmailDelivered implements EmailResponse {
// private final JsonNode body;
// }
// @Data
// static public class EmailBounced implements EmailResponse {
// private final JsonNode body;
// }
// @Data
// static public class EmailComplaintReceived implements EmailResponse {
// private final JsonNode body;
// }
// static public class GetHealth {
// }
// @Data
// static public class Health {
// private final boolean healthy;
// }
// }
```
<file_sep># Blog
[](https://gitter.im/blog-hugo/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
* Use any `markdown` editor to edit the blog or post under the folder `content`
* Commit the markdown files to repo `blog-hugo` by running script the commit.bat. The commit.bat will only commit the markdown files. To commit other files please use git bash or git extension.
* Deploy posts to harryho.github.io by running script deploy.bat. After that, you can check it via browser.
#### __Hugo version upto 0.54__
> The hugo binaries can be found under folder `archived/hugo`
The theme Docdock has been forked as customized repository
```
git submodule add --force https://github.com/harryho/hugo-theme-docdock.git themes/docdock
```
Update submodule
```
git submodule init
git submodule update
cd themes/<your_theme>
git checkout hho-blog # only hho-blog branch should be used
git pull
```
Run the site locally
```
hugo server
# with script
# Linux / Mac
./start.sh
# Windows
./start.cmd
```
Edit the site with draft
```bash
# Run the start script with param d
./start.sh d
```
<file_sep>+++
date = "2018-07-04T14:59:31+11:00"
title = "Ubuntu Desktop 18 LTS note"
description = "Post-installation for Ubuntu 18 desktop"
+++
### Prelude
> This article is mainly to record the stuff to do post Ubuntu Desktop 18.04 Installation.
### Purpose
All actions post installation is to make the Ubuntu Desktop a wonderful toolkit for developer.
### Prerequisite
* Install all essentials
```bash
sudo apt install -y git curl
sudo apt-get install -y apt-transport-https ca-certificates gnupg-agent
sudo apt install -y software-properties-common
```
### Install & Setup Zsh
* Install Zsh
```bash
sudo apt install -y zsh
```
* Setup Zsh
> I prefer [Prezto Zsh](https://github.com/sorin-ionescu/prezto.git), which is the minimal version of Oh-My-Zsh. In my opinion, Oh-My-Zsh is kind of slow and sort of overblown.
- Create a script `setup_prezto.zsh`
- Save the code below to the scirpt and run `zsh ./setup_prezto.sh && source ~/.zshrc `
```bash
git clone --recursive https://github.com/sorin-ionescu/prezto.git \
"${ZDOTDIR:-$HOME}/.zprezto"
setopt EXTENDED_GLOB
for rcfile in "${ZDOTDIR:-$HOME}"/.zprezto/runcoms/^README.md(.N); do
ln -s "$rcfile" "${ZDOTDIR:-$HOME}/.${rcfile:t}"
done
if [ -d ~/.zprezto ];then
cp ubt18/z* ~/.zprezto/runcoms/
fi
[[ ! -d ~/.zprezto-contrib ]] && mkdir -p ~/.zprezto-contrib;
sudo -R $UsER:$USER .zprezto
sudo -R $UsER:$USER .zprezto-contrib
chsh -s /bin/zsh
echo "Setup finished."
echo "Please reboot or restart terminal."
```
### Install & Setup Vim with useful plugins
* Install Vim
```
sudo apt install -y vim
```
* Setup Vim with some amazing plugins
> There are many different vim plugins available online. There is a plance callded [Vim Awesome](https://vimawesome.com/), which you can find anything you want.
> My favorite option is the [Junegunn's Vim plugins](https://github.com/junegunn/vim-plug). I like the simplicity of this solution.
- Prepare the vimrc as below
```
set nu
colorscheme delek
:imap jj <Esc>
:imap jk <Esc>
:imap kj <Esc>
:imap ii <Esc>
" Specify a directory for plugins
" - For Neovim: ~/.local/share/nvim/plugged
" - Avoid using standard Vim directory names like 'plugin'
call plug#begin('~/.vim/plugged')
" Make sure you use single quotes
" Shorthand notation; fetches https://github.com/junegunn/vim-easy-align
Plug 'junegunn/vim-easy-align'
" Any valid git URL is allowed
Plug 'https://github.com/junegunn/vim-github-dashboard.git'
" Multiple Plug commands can be written in a single line using | separators
Plug 'SirVer/ultisnips' | Plug 'honza/vim-snippets'
" On-demand loading
Plug 'scrooloose/nerdtree', { 'on': 'NERDTreeToggle' }
Plug 'tpope/vim-fireplace', { 'for': 'clojure' }
" Using a non-master branch
Plug 'rdnetto/YCM-Generator', { 'branch': 'stable' }
" Using a tagged release; wildcard allowed (requires git 1.9.2 or above)
Plug 'fatih/vim-go', { 'tag': '*' }
" Plugin options
Plug 'nsf/gocode', { 'tag': 'v.20150303', 'rtp': 'vim' }
" Plugin outside ~/.vim/plugged with post-update hook
Plug 'junegunn/fzf', { 'dir': '~/.fzf', 'do': './install --all' }
Plug 'Shougo/vimproc.vim', { 'do': 'make' }
function! BuildYCM(info)
" info is a dictionary with 3 fields
" - name: name of the plugin
" - status: 'installed', 'updated', or 'unchanged'
" - force: set on PlugInstall! or PlugUpdate!
if a:info.status == 'installed' || a:info.force
!./install.py
endif
endfunction
Plug 'Valloric/YouCompleteMe', { 'do': function('BuildYCM') }
" Plug 'Valloric/YouCompleteMe', { 'do': './install.py' }
Plug 'fatih/vim-go', { 'do': ':GoInstallBinaries' }
" Unmanaged plugin (manually installed and updated)
Plug '~/my-prototype-plugin'
" Initialize plugin system
call plug#end()
```
### Install MySql
- Here I just install the default MySql 5.7. If you want to install the new version or MariaDB, please check out the official website.
```bash
# Install MySql server and client
sudo apt install mysql-server mysql-client
# Check if the MySql service active and running
sudo systemctl status mysql.service
# Enable or Restart MySql service
sudo systemctl enable mysql.service
sudo systemctl restart mysql.service
# Create root account
sudo mysql_secure_installation
# After the password for root has been created
sudo mysql -u root -p
# Update the password via mysql comomand pompt
mysql>ALTER USER 'root'@'localhost' IDENTIFIED WITH mysql_native_password BY '<PASSWORD>';
# Create another admin is highly recommended in production env
mysql>GRANT ALL PRIVILEGES ON *.* TO 'admin'@'localhost' IDENTIFIED BY 'your_strong_password';
```
### Install Java
* Install [AdoptOpenJDK](https://adoptopenjdk.net/installation.html#x64_linux-jdk) for Linux
```
# Download tarball you need
# Unzip the tarball
tar xzf OpenJDK11U-jdk_x64_linux_hotspot_x.y.z.tar.gz
sudo mv OpenJDK11U-jdk_x64_linux_hotspot_x.y.z.tar.gz \
/usr/lib/jvm/OpenJDK11U-jdk_x64_linux_hotspot_x.y.z
sudo apt install update-java-alternatives
# Check alternative JDK
install update-java-alternatives -l
adoptopenjdk-11-hotspot-amd64 1111 /usr/lib/jvm/adoptopenjdk-11-hotspot-amd64
adoptopenjdk-14-hotspot-amd64 1141 /usr/lib/jvm/adoptopenjdk-14-hotspot-amd64
adoptopenjdk-8-hotspot-amd64 1081 /usr/lib/jvm/adoptopenjdk-8-hotspot-amd64
# Switch JDK
update-java-alternatives -s adoptopenjdk-xx-hotspot-amd64
```
### Other useful tools
- Office
- I prefer [WPS Office](http://wps-community.org). Rename the template files, because the template names contain Chinese characters and it may cause problem later.
- Dictionary
- Offline Dictionary
```
## Install dict client & server (dictd)
sudo apt install -y dict
sudo apt install -y dictd
## Install dictionary libraries
sudo apt-get install dict-gcide
sudo apt-get install dict-wn
sudo apt-get install dict-devil
sudo apt-get install dict-moby-thesaurus
```
- Meida
- VLC
```
sudo apt install vlc
```
- Spotify
```bash
sudo snap install spotify
```
<file_sep>+++
title = "Java Note - 2: Concurrency"
description="Common good practice for Java Concurrency programming"
+++
## Thread
### Join
* The join method allows one thread to wait for the completion of another. join responds to an interrupt by exiting with an InterruptedException.
* Demo code of thread join
```java
public class JoinDemo implements Runnable {
private Random rand = new Random(System.currentTimeMillis());
public void run() {
//simulate some CPU expensive task
for (int i = 0; i < 100000000; i++) {
rand.nextInt();
}
System.out.println("[" + Thread.currentThread().getName() + "] finished .");
}
public static void main(String[] args) throws InterruptedException {
Thread[] threads = new Thread[5];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(new JoinDemo(), "joinThread " + i);
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
System.out.println("["
+ Thread.currentThread().getName()
+ "] All -threads have finished.");
}
}
```
### Common problem of multithred program
* When there are many threads running, the exact sequence in which all running threads are executed depends next to the thread
configuration like priority also on the available CPU resources and the way the scheduler chooses the next thread to execute.
Although the behavior of the scheduler is completely deterministic, it is hard to predict which threads execute in which moment
at a given point in time. This makes access to shared resources critical as it is hard to predict which thread will be the first thread
that tries to access it.
* Sample code without sync can show you what the problem is. If you run the following sample code, you may get different output from mine here. It is also common Thread safe issue for multiple threads program.
```java
public class NotSyncCounter implements Runnable {
private static int counter = 0;
public void run() {
while (counter < 10) {
System.out.println("["
+ Thread.currentThread().getName()
+ "] - before: " + counter);
counter++;
System.out.println("["
+ Thread.currentThread().getName()
+ "] - after: " + counter);
}
}
public static void main(String[] args) throws InterruptedException {
Thread[] threads = new Thread[5];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(new NotSyncCounter(), " - thread-" + i);
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
}
}
//Possible output:
// [ - thread-2] - before: 0
// [ - thread-1] - before: 0
// [ - thread-4] - before: 0
// [ - thread-3] - before: 0
// [ - thread-0] - before: 0
// [ - thread-3] - after: 4
// [ - thread-3] - before: 5
// [ - thread-4] - after: 3
// [ - thread-1] - after: 2
// [ - thread-1] - before: 6
// [ - thread-1] - after: 7
// [ - thread-2] - after: 1
// [ - thread-1] - before: 7
// [ - thread-4] - before: 6
// [ - thread-4] - after: 9
// [ - thread-4] - before: 9
// [ - thread-3] - after: 6
// [ - thread-0] - after: 5
// [ - thread-4] - after: 10
// [ - thread-1] - after: 8
// [ - thread-2] - before: 7
// [ - thread-2] - after: 11
```
* To solve the problme, there is `synchronized` keyword in Java available for us to handle the multiple threads program.
* Demo code of `synchronized` to solve the problem on above sample.
```java
public class SyncCounter implements Runnable {
private static int counter = 0;
public void run() {
while (counter < 10) {
synchronized (SyncCounter.class) {
System.out.println("["
+ Thread.currentThread().getName()
+ "] - before: " + counter);
counter++;
System.out.println("["
+ Thread.currentThread().getName()
+ "] - after: " + counter);
}
}
}
public static void main(String[] args) throws InterruptedException {
Thread[] threads = new Thread[5];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(new SyncCounter(), " - thread-" + i);
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
}
}
```
## Deadlock
In general the following requirements for a deadlock can be identified:
* Mutual exclusion: There is a resource which can be accessed only by one thread at any point in time.
* Resource holding: While having locked one resource, the thread tries to acquire another lock on some other exclusive resource.
* No preemption: There is no mechanism, which frees the resource if one threads holds the lock for a specific period of time.
* Circular wait: During runtime a constellation occurs in which two (or more) threads are each waiting on the other thread to free a resource that it has locked.
* Monitor with wait and notify
```java
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public class SyncWaitNotfiyDemo {
private static final Queue <Integer>queue = new ConcurrentLinkedQueue<Integer>();
public Integer getNextInt() {
Integer retVal = null;
synchronized (queue) {
try {
while (queue.isEmpty()) {
queue.wait();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
retVal = queue.poll();
}
return retVal;
}
public synchronized void putInt(Integer value) {
synchronized (queue) {
queue.add(value);
queue.notify();
}
}
public static void main(String[] args) throws InterruptedException {
final SyncWaitNotfiyDemo queue = new SyncWaitNotfiyDemo();
Thread thread1 = new Thread(new Runnable() {
public void run() {
for (int i = 0; i < 10; i++) {
queue.putInt(i);
}
}
});
Thread thread2 = new Thread(new Runnable() {
public void run() {
for (int i = 0; i < 10; i++) {
Integer nextInt = queue.getNextInt();
System.out.println("Next int: " + nextInt);
}
}
});
thread1.start();
thread2.start();
thread1.join();
thread2.join();
}
}
```
## Useful concurrent collections
### ConcurrentHashMap
> `ConcurrentHashMap` is undoubtedly most popular collection class introduced in Java 5 and most of us are already using it. It provides a concurrent alternative of Hashtable or Synchronized Map classes with aim to support higher level of concurrency by implementing fined grained locking. Multiple reader can access the Map concurrently while a portion of Map gets locked for write operation depends upon concurrency level of Map. Also it provides better scalability than there synchronized counter part. Iterator of `ConcurrentHashMap` are fail-safe iterators which doesn't throw ConcurrencModificationException thus eliminates another requirement of locking during iteration which result in further scalability and performance.
### CopyOnWriteArrayList and CopyOnWriteArraySet
> `CopyOnWriteArrayList` is a concurrent alternative of synchronized List. It provides better concurrency than synchronized List by allowing multiple concurrent reader and replacing the whole list on write operation. Yes, write operation is costly on `CopyOnWriteArrayList` but it performs better when there are multiple reader and requirement of iteration is more than writing. Since `CopyOnWriteArrayList` Iterator also don't throw ConcurrencModificationException it eliminates need to lock the collection during iteration. Remember both `ConcurrentHashMap` and `CopyOnWriteArrayList` doesn't provides same level of locking as Synchronized Collection and achieves thread-safety by there locking and mutability strategy. So they perform better if requirements suits there nature. Similarly, `CopyOnWriteArraySet` is a concurrent replacement to Synchronized Set.
### BlockingQueue and `Deque`
> `BlockingQueue` makes it easy to implement producer-consumer design pattern by providing inbuilt blocking support for put() and take() method. put() method will block if Queue is full while take() method will block if Queue is empty. Java 5 API provides two concrete implementation of `BlockingQueue` in form of `ArrayBlockingQueue` and `LinkedBlockingQueue`, both of them implement FIFO ordering of element. ArrayBlockingQueue is backed by Array and its bounded in nature while `LinkedBlockingQueue` is optionally bounded. Consider using `BlockingQueue` to solve producer Consumer problem in Java instead of writing your won wait-notify code. Java 5 also provides `PriorityBlockingQueue`, another implementation of `BlockingQueue` which is ordered on priority and useful if you want to process elements on order other than FIFO.
> `Deque` interface is added in Java 6 and it extends Queue interface to support insertion and removal from both end of Queue referred as head and tail. Java6 also provides concurrent implementation of `Deque` like ArrayDeque and LinkedBlockingDeque. `Deque` Can be used efficiently to increase parallelism in program by allowing set of worker thread to help each other by taking some of work load from other thread by utilizing `Deque` double end consumption property. So if all Thread has there own set of task Queue and they are consuming from head; helper thread can also share some work load via consumption from tail.
### ConcurrentSkipListMap and ConcurrentSkipListSet
> Just like `ConcurrentHashMap` provides a concurrent alternative of synchronized HashMap. ConcurrentSkipListMap and ConcurrentSkipListSet provide concurrent alternative for synchronized version of SortedMap and SortedSet. For example instead of using TreeMap or TreeSet wrapped inside synchronized Collection, You can consider using ConcurrentSkipListMap or ConcurrentSkipListSet from java.util.concurrent package. They also implement NavigableMap and NavigableSet to add additional navigation method.
### Synchronizer
## Counter Semaphore
* Counting Semaphore in Java maintains specified number of pass or permits, In order to access a shared resource, Current Thread must acquire a permit. If permit is already exhausted by other thread than it can wait until a permit is available due to release of permit from different thread. This concurrency utility can be very useful to implement producer consumer design pattern or implement bounded pool or resources like Thread Pool, DB Connection pool etc.
```java
import java.util.concurrent.Semaphore;
public class SemaphoreDemo {
Semaphore binary = new Semaphore(1);
public static void main(String args[]) {
final SemaphoreDemo test = new SemaphoreDemo();
new Thread(){
@Override
public void run(){
test.mutualExclusion();
}
}.start();
new Thread(){
@Override
public void run(){
test.mutualExclusion();
}
}.start();
}
private void mutualExclusion() {
try {
binary.acquire();
//mutual exclusive region
System.out.println(Thread.currentThread().getName()
+ " inside mutual exclusive region");
Thread.sleep(1000);
} catch (InterruptedException i.e.) {
ie.printStackTrace();
} finally {
binary.release();
System.out.println(Thread.currentThread().getName()
+ " outside of mutual exclusive region");
}
}
}
// Output:
// Thread-0 inside mutual exclusive region
// Thread-0 outside of mutual exclusive region
// Thread-1 inside mutual exclusive region
// Thread-1 outside of mutual exclusive region
```
## CountDownLatch
* `CountDownLatch` in Java is a kind of synchronizer which allows one Thread to wait for one or more Threads before starts processing. You can also implement same functionality using wait and notify mechanism in Java but it requires lot of code and getting it write in first attempt is tricky, With `CountDownLatch` it can be done in just few lines. `CountDownLatch` also allows flexibility on number of thread for which main thread should wait, It can wait for one thread or n number of thread, there is not much change on code.
* The difficulty to use it properly is where to use `CountDownLatch`. First, let us figour out how `CountDownLatch` works. usaullly main thread of application, which calls CountDownLatch.await() will wait until count reaches zero or its interrupted by another Thread. All other thread are required to do count down by calling CountDownLatch.countDown() once they are completed. One disadvantage of `CountDownLatch` is not reusable, once its count reaches zero.
* Sample program requires 3 services namely CacheService, AlertService and ValidationService to be started and ready before application can handle any request.
```java
import java.util.Date;
import java.util.concurrent.CountDownLatch;
import java.util.logging.Level;
import java.util.logging.Logger;
public class CountDownLatchDemo {
public static void main(String args[]) {
final CountDownLatch latch = new CountDownLatch(3);
Thread cacheService = new Thread(new Service("CacheService", 1000, latch));
Thread alertService = new Thread(new Service("AlertService", 1000, latch));
Thread validationService = new Thread(new Service("ValidationService", 1000, latch));
cacheService.start(); //separate thread will initialize CacheService
alertService.start(); //another thread for AlertService initialization
validationService.start();
//count is 3 since we have 3 Threads (Services)
try{
latch.await(); //main thread is waiting on CountDownLatch to finish
System.out.println("All services are up, Application is starting now");
}catch(InterruptedException ie){
ie.printStackTrace();
}
}
}
/**
* Service class which will be executed by Thread using CountDownLatch synchronizer.
*/
class Service implements Runnable{
private final String name;
private final int timeToStart;
private final CountDownLatch latch;
public Service(String name, int timeToStart, CountDownLatch latch){
this.name = name;
this.timeToStart = timeToStart;
this.latch = latch;
}
@Override
public void run() {
try {
Thread.sleep(timeToStart);
} catch (InterruptedException ex) {
Logger.getLogger(Service.class.getName()).log(Level.SEVERE, null, ex);
}
System.out.println( name + " is Up");
latch.countDown(); //reduce count of CountDownLatch by 1
}
}
```
## CylicBarrier
* `CyclicBarrier` is similar to `CountDownLatch` which we have seen in the last article What is `CountDownLatch` in Java and allows multiple threads to wait for each other (barrier) before proceeding. The difference between `CountDownLatch` and `CyclicBarrier` is an also very popular multi-threading interview question in Java. `CyclicBarrier` is a natural requirement for a concurrent program because it can be used to perform final part of the task once individual tasks are completed.
* The demo of `CyclicBarrier` on which we initialize `CyclicBarrier` with 3 parties, means in order to cross barrier, 3 thread needs to call await() method. each thread calls await method in short duration but they don't proceed until all 3 threads reached the barrier, once all thread reach the barrier, barrier gets broker and each thread started their execution from that point.
```java
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.`CyclicBarrier`;
import java.util.logging.Level;
import java.util.logging.Logger;
public class CyclicBarrierDemo {
//Runnable task for each thread
private static class Task implements Runnable {
private CyclicBarrier barrier;
public Task(CyclicBarrier barrier) {
this.barrier = barrier;
}
@Override
public void run() {
try {
System.out.println(Thread.currentThread().getName() + " is waiting on barrier");
barrier.await();
System.out.println(Thread.currentThread().getName() + " has crossed the barrier");
} catch (InterruptedException ex) {
Logger.getLogger(CyclicBarrierDemo.class.getName()).log(Level.SEVERE, null, ex);
} catch (BrokenBarrierException ex) {
Logger.getLogger(CyclicBarrierDemo.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
public static void main(String args[]) {
//creating CyclicBarrier with 3 parties i.e. 3 Threads needs to call await()
final CyclicBarrier cb = new CyclicBarrier(3, new Runnable(){
@Override
public void run(){
//This task will be executed once all thread reaches barrier
System.out.println("All parties are arrived at barrier, lets play");
}
});
//starting each of thread
Thread t1 = new Thread(new Task(cb), "Thread 1");
Thread t2 = new Thread(new Task(cb), "Thread 2");
Thread t3 = new Thread(new Task(cb), "Thread 3");
t1.start();
t2.start();
t3.start();
}
}
// Output:
// Thread 1 is waiting on barrier
// Thread 3 is waiting on barrier
// Thread 2 is waiting on barrier
// All parties have arrived at barrier, lets play
// Thread 3 has crossed the barrier
// Thread 1 has crossed the barrier
// Thread 2 has crossed the barrier
```
## Producer / Consumer pattern
* Producer Consumer Design pattern is a classic concurrency or threading pattern which reduces coupling between
Producer and Consumer by separating Identification of work with Execution of Work. In producer consumer design pattern a shared queue is used to control the flow and this separation allows you to code producer and consumer separately.
* It is everywhere in real life and depict coordination and collaboration. Like one person is preparing food (Producer) while other one is serving food (Consumer), both will use shared table for putting food plates and taking food plates. Producer which is the person preparing food will wait if table is full and Consumer (Person who is serving food) will wait if table is empty. table is a shared object here. On Java library Executor framework itself implement Producer Consumer design pattern be separating responsibility of addition and execution of task.
```java
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
public class ProducerConsumerPattern {
public static void main(String args[]){
//Creating shared object
BlockingQueue sharedQueue = new LinkedBlockingQueue();
//Creating Producer and Consumer Thread
Thread prodThread = new Thread(new Producer(sharedQueue));
Thread consThread = new Thread(new Consumer(sharedQueue));
//Starting producer and Consumer thread
prodThread.start();
consThread.start();
}
}
//Producer Class in java
class Producer implements Runnable {
private final BlockingQueue sharedQueue;
public Producer(BlockingQueue sharedQueue) {
this.sharedQueue = sharedQueue;
}
@Override
public void run() {
for(int i=0; i<10; i++){
try {
System.out.println("Produced: " + i);
sharedQueue.put(i);
} catch (InterruptedException ex) {
Logger.getLogger(Producer.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
//Consumer Class in Java
class Consumer implements Runnable{
private final BlockingQueue sharedQueue;
public Consumer (BlockingQueue sharedQueue) {
this.sharedQueue = sharedQueue;
}
@Override
public void run() {
while(true){
try {
System.out.println("Consumed: "+ sharedQueue.take());
} catch (InterruptedException ex) {
Logger.getLogger(Consumer.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
// Output:
// Produced: 0
// Produced: 1
// Consumed: 0
// Produced: 2
// Consumed: 1
// Produced: 3
// Consumed: 2
// Produced: 4
// Consumed: 3
// Produced: 5
// Consumed: 4
// Produced: 6
// Consumed: 5
// Produced: 7
// Consumed: 6
// Produced: 8
// Consumed: 7
// Produced: 9
// Consumed: 8
// Consumed: 9
```
## Executor -- Thread Pool
* Java 1.5 introduced Thread pool in Java in the form of Executor framework, which allows Java programmer to decouple submission of a task to execution of the task. It also introduced a full feature built-in Thread Pool framework commonly known as Executor framework. Executor framework also provides different kind of Thread Pool e.g. `SingleThreadExecutor` which creates just one worker thread or `CachedThreadPool` which creates worker threads as and when necessary.
* Demo of thread pool
```java
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class ThreadPoolDemo {
public static void main(String args[]) {
ExecutorService service = Executors.newFixedThreadPool(10);
for (int i =0; i<100; i++){
service.submit(new Task(i));
}
}
}
final class Task implements Runnable{
private int taskId;
public Task(int id){
this.taskId = id;
}
@Override
public void run() {
System.out.println("Task ID : "
+ this.taskId +" performed by "
+ Thread.currentThread().getName());
}
}
// Output:
// Task ID : 0 performed by pool-1-thread-1
// Task ID : 7 performed by pool-1-thread-8
// Task ID : 8 performed by pool-1-thread-9
// Task ID : 6 performed by pool-1-thread-7
// Task ID : 4 performed by pool-1-thread-5
// Task ID : 5 performed by pool-1-thread-6
// Task ID : 3 performed by pool-1-thread-4
// Task ID : 1 performed by pool-1-thread-2
// ...
```
## Submit(Runnable)
> The submit(Runnable) method also takes a Runnable implementation, but returns a Future object. This Future object can be used to check if the Runnable as finished executing.
## Submit(Callable)
> The submit(Callable) method is similar to the submit(Runnable) method except for the type of parameter it takes. The Callable instance is very similar to a Runnable except that its call() method can return a result.
## InvokeAny()
> The invokeAny() method takes a collection of Callable objects, or subinterfaces of Callable. If one of the tasks complete (or throws an exception), the rest of the Callable's are cancelled.
## InvokeAll()
> The invokeAll() method invokes all of the Callable objects and returns a list of Future objects. Keep in mind that a task might finish due to an exception, so it may not have "succeeded". There is no way on a Future to tell the difference.
## Demo of submit, InvokeAny
```java
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.ExecutionException;
import java.util.HashSet;
import java.util.Set;
public class SubmitInvokeDemo {
public static void main(String args[]) throws InterruptedException, ExecutionException {
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future future = executorService.submit(new Runnable() {
public void run() {
System.out.println("Asynchronous task");
}
});
future.get(); //returns null if the task has finished correctly.
Set<Callable<String>> callables = new HashSet<Callable<String>>();
callables.add(new Callable<String>() {
public String call() throws Exception {
return "Task 1";
}
});
callables.add(new Callable<String>() {
public String call() throws Exception {
return "Task 2";
}
});
callables.add(new Callable<String>() {
public String call() throws Exception {
return "Task 3";
}
});
String result = executorService.invokeAny(callables);
System.out.println("result = " + result);
executorService.shutdown();
}
}
```
### Join and Fork
* Here is an introduction into the Fork/Join Framework that is part of the JDK since version 1.7.
## Join and Fork with Executor Service
* The demo code submit() our tasks to the ExecutorService and then use the returned instance of `Future` to wait() for the result. The normal `ExecutorService` where you would have to block the current thread while waiting for a result. If we would only provide as many threads to the pool as we have CPUs available, the program would run out of resources and hang indefinitely.
```java
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.ExecutionException;
import java.util.Random;
public class FindMinTask implements Callable<Integer> {
private int[] numbers;
private int startIndex;
private int endIndex;
private ExecutorService executorService;
public FindMinTask(
ExecutorService executorService,
int[] numbers, int startIndex, int endIndex) {
this.executorService = executorService;
this.numbers = numbers;
this.startIndex = startIndex;
this.endIndex = endIndex;
}
public Integer call() throws Exception {
int sliceLength = (endIndex - startIndex) + 1;
if (sliceLength > 2) {
FindMinTask lowerFindMin = new FindMinTask(
executorService, numbers, startIndex,
startIndex + (sliceLength / 2) - 1);
Future<Integer> futureLowerFindMin = executorService.submit(lowerFindMin);
FindMinTask upperFindMin = new FindMinTask(
executorService, numbers, startIndex + (sliceLength / 2),
endIndex);
Future<Integer> futureUpperFindMin = executorService.submit(upperFindMin);
return Math.min(futureLowerFindMin.get(), futureUpperFindMin.get());
} else {
return Math.min(numbers[startIndex], numbers[endIndex]);
}
}
public static void main(String[] args)
throws InterruptedException, ExecutionException {
int[] numbers = new int[100];
Random random = new Random(System.currentTimeMillis());
for (int i = 0; i < numbers.length; i++) {
numbers[i] = random.nextInt(100);
}
ExecutorService executorService = Executors.newFixedThreadPool(64);
Future<Integer> futureResult = executorService
.submit(new FindMinTask(
executorService,
numbers, 0, numbers.length - 1));
System.out.println(futureResult.get());
executorService.shutdown();
}
}
```
## Join and Fork with JoinForkPool
* The `ForkJoinPool` implements the already mentioned work-stealing strategy, i.e. every time a running thread has to wait for some result; the thread removes the current task from the work queue and executes some other task ready to run. This way the current thread is not blocked and can be used to execute other tasks. Once the result for the originally suspended task has been computed the task gets executed again and the join() method returns the result. This is an important difference between `JoinForkPool` and `ExecutorService`.
* Demo of JoinForkPool
```java
import java.awt.*;
import java.awt.image.*;
import java.io.*;
import java.util.concurrent.ForkJoinPool;
import javax.imageio.*;
import java.util.concurrent.RecursiveAction;
public class GrayscaleImageAction extends RecursiveAction {
private static final long serialVersionUID = 1L;
private int row;
private BufferedImage bufferedImage;
public GrayscaleImageAction(int row, BufferedImage bufferedImage) {
this.row = row;
this.bufferedImage = bufferedImage;
}
@Override
protected void compute() {
for (int column = 0; column < bufferedImage.getWidth(); column++) {
int rgb = bufferedImage.getRGB(column, row);
int r = (rgb >> 16) & 0xFF;
int g = (rgb >> 8) & 0xFF;
int b = (rgb & 0xFF);
int gray = (int) (0.2126 * (float) r + 0.7152 * (float) g + 0.0722 * (float) b);
gray = (gray << 16) + (gray << 8) + gray;
bufferedImage.setRGB(column, row, gray);
}
}
public static void main(String[] args) throws IOException {
ForkJoinPool pool = new ForkJoinPool(
Runtime.getRuntime().availableProcessors());
BufferedImage bufferedImage = ImageIO.read(new File(args[0]));
for (int row = 0; row < bufferedImage.getHeight(); row++) {
GrayscaleImageAction action = new GrayscaleImageAction(
row, bufferedImage);
pool.execute(action);
}
pool.shutdown();
ImageIO.write(bufferedImage, "jpg", new File(args[1]));
}
}
```<file_sep>+++
title="Packaging"
description="Package, Visibility, Pitfalls"
weight=8
+++
### Package
* Package is a way to structure code: a program is constructed as a “package” (often abbreviated as pkg), which may use facilities from other packages. Every go-file belongs to one (and only one) package (like a library or namespace in other languages). Many different .go files can belong to one package, so the filename(s) and package name are generally not the same.
* The package to which the code-file belongs must be indicated on the first line, e.g.: package main . A standalone executable belongs to package main. Each Go application contains one package called main. An application can consist of different packages, but even if you use only package main, you don’t have to stuff all code in 1 big file: you can make a number of smaller files each having package main as 1 st codeline. If you compile a source file with a package name other than main, like pack1, the object file is stored in pack1.a; a package name is written in lowercase letters.
#### Standard library
* The Go installation contains a number of ready-to-use packages, which form the standard library.
* To build a program, the packages, and the files within them, must be compiled in the correct order. Package dependencies determine the order in which to build packages.
#### Import
* A Go program is created by linking together a set of packages through the import keyword.
### VISIBILITY RULE
* When the identifier ( of a constant, variable, type, function, struct field, ...) starts with an uppercase letter, like Group1, then the ‘object’ with this identifier is visible in code outside the package (thus available to client-programs, ‘importers’ of the package), it is said to be exported (like public in OO languages). Identifiers which start with a lowercase letter are not visible outside the package, but they are visible and usable in the whole package (like private).
#### package alias
* A package can, if this is useful (for shortening, name conflicts, ...), also be given another name (an alias), like: import fm “fmt” .
<file_sep>+++
title = "C Lecture - 3"
description = "Exercise 41~ 48"
+++
Author: <NAME>
All content comes from Zed's [Lecture Repository](https://github.com/zedshaw/learn-c-the-hard-way-lectures.git) and [Libraries Repository](https://github.com/zedshaw/liblcthw). All credit goes to Zed.
### Exercise 41 Project devpkg
.\ex41\devpkg
.\ex41\devpkg\commands.c
```c
#include <apr_uri.h>
#include <apr_fnmatch.h>
#include <unistd.h>
#include "commands.h"
#include "dbg.h"
#include "bstrlib.h"
#include "db.h"
#include "shell.h"
int Command_depends(apr_pool_t * p, const char *path)
{
FILE *in = NULL;
bstring line = NULL;
in = fopen(path, "r");
check(in != NULL, "Failed to open downloaded depends: %s", path);
for (line = bgets((bNgetc) fgetc, in, '\n');
line != NULL;
line = bgets((bNgetc) fgetc, in, '\n'))
{
btrimws(line);
log_info("Processing depends: %s", bdata(line));
int rc = Command_install(p, bdata(line), NULL, NULL, NULL);
check(rc == 0, "Failed to install: %s", bdata(line));
bdestroy(line);
}
fclose(in);
return 0;
error:
if (line) bdestroy(line);
if (in) fclose(in);
return -1;
}
int Command_fetch(apr_pool_t * p, const char *url, int fetch_only)
{
apr_uri_t info = {.port = 0 };
int rc = 0;
const char *depends_file = NULL;
apr_status_t rv = apr_uri_parse(p, url, &info);
check(rv == APR_SUCCESS, "Failed to parse URL: %s", url);
if (apr_fnmatch(GIT_PAT, info.path, 0) == APR_SUCCESS) {
rc = Shell_exec(GIT_SH, "URL", url, NULL);
check(rc == 0, "git failed.");
} else if (apr_fnmatch(DEPEND_PAT, info.path, 0) == APR_SUCCESS) {
check(!fetch_only, "No point in fetching a DEPENDS file.");
if (info.scheme) {
depends_file = DEPENDS_PATH;
rc = Shell_exec(CURL_SH, "URL", url, "TARGET", depends_file,
NULL);
check(rc == 0, "Curl failed.");
} else {
depends_file = info.path;
}
// recursively process the devpkg list
log_info("Building according to DEPENDS: %s", url);
rv = Command_depends(p, depends_file);
check(rv == 0, "Failed to process the DEPENDS: %s", url);
// this indicates that nothing needs to be done
return 0;
} else if (apr_fnmatch(TAR_GZ_PAT, info.path, 0) == APR_SUCCESS) {
if (info.scheme) {
rc = Shell_exec(CURL_SH,
"URL", url, "TARGET", TAR_GZ_SRC, NULL);
check(rc == 0, "Failed to curl source: %s", url);
}
rv = apr_dir_make_recursive(BUILD_DIR,
APR_UREAD | APR_UWRITE |
APR_UEXECUTE, p);
check(rv == APR_SUCCESS, "Failed to make directory %s",
BUILD_DIR);
rc = Shell_exec(TAR_SH, "FILE", TAR_GZ_SRC, NULL);
check(rc == 0, "Failed to untar %s", TAR_GZ_SRC);
} else if (apr_fnmatch(TAR_BZ2_PAT, info.path, 0) == APR_SUCCESS) {
if (info.scheme) {
rc = Shell_exec(CURL_SH, "URL", url, "TARGET", TAR_BZ2_SRC,
NULL);
check(rc == 0, "Curl failed.");
}
apr_status_t rc = apr_dir_make_recursive(BUILD_DIR,
APR_UREAD | APR_UWRITE
| APR_UEXECUTE, p);
check(rc == 0, "Failed to make directory %s", BUILD_DIR);
rc = Shell_exec(TAR_SH, "FILE", TAR_BZ2_SRC, NULL);
check(rc == 0, "Failed to untar %s", TAR_BZ2_SRC);
} else {
sentinel("Don't now how to handle %s", url);
}
// indicates that an install needs to actually run
return 1;
error:
return -1;
}
int Command_build(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts)
{
int rc = 0;
check(access(BUILD_DIR, X_OK | R_OK | W_OK) == 0,
"Build directory doesn't exist: %s", BUILD_DIR);
// actually do an install
if (access(CONFIG_SCRIPT, X_OK) == 0) {
log_info("Has a configure script, running it.");
rc = Shell_exec(CONFIGURE_SH, "OPTS", configure_opts, NULL);
check(rc == 0, "Failed to configure.");
}
rc = Shell_exec(MAKE_SH, "OPTS", make_opts, NULL);
check(rc == 0, "Failed to build.");
rc = Shell_exec(INSTALL_SH,
"TARGET", install_opts ? install_opts : "install",
NULL);
check(rc == 0, "Failed to install.");
rc = Shell_exec(CLEANUP_SH, NULL);
check(rc == 0, "Failed to cleanup after build.");
rc = DB_update(url);
check(rc == 0, "Failed to add this package to the database.");
return 0;
error:
return -1;
}
int Command_install(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts)
{
int rc = 0;
check(Shell_exec(CLEANUP_SH, NULL) == 0,
"Failed to cleanup before building.");
rc = DB_find(url);
check(rc != -1, "Error checking the install database.");
if (rc == 1) {
log_info("Package %s already installed.", url);
return 0;
}
rc = Command_fetch(p, url, 0);
if (rc == 1) {
rc = Command_build(p, url, configure_opts, make_opts,
install_opts);
check(rc == 0, "Failed to build: %s", url);
} else if (rc == 0) {
// no install needed
log_info("Depends successfully installed: %s", url);
} else {
// had an error
sentinel("Install failed: %s", url);
}
Shell_exec(CLEANUP_SH, NULL);
return 0;
error:
Shell_exec(CLEANUP_SH, NULL);
return -1;
}
```
.\ex41\devpkg\commands.h
```c
#ifndef _commands_h
#define _commands_h
#include <apr_pools.h>
#define DEPENDS_PATH "/tmp/DEPENDS"
#define TAR_GZ_SRC "/tmp/pkg-src.tar.gz"
#define TAR_BZ2_SRC "/tmp/pkg-src.tar.bz2"
#define BUILD_DIR "/tmp/pkg-build"
#define GIT_PAT "*.git"
#define DEPEND_PAT "*DEPENDS"
#define TAR_GZ_PAT "*.tar.gz"
#define TAR_BZ2_PAT "*.tar.bz2"
#define CONFIG_SCRIPT "/tmp/pkg-build/configure"
enum CommandType {
COMMAND_NONE, COMMAND_INSTALL, COMMAND_LIST, COMMAND_FETCH,
COMMAND_INIT, COMMAND_BUILD
};
int Command_fetch(apr_pool_t * p, const char *url, int fetch_only);
int Command_install(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts);
int Command_depends(apr_pool_t * p, const char *path);
int Command_build(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts);
#endif
```
.\ex41\devpkg\db.c
```c
#include <unistd.h>
#include <apr_errno.h>
#include <apr_file_io.h>
#include "db.h"
#include "bstrlib.h"
#include "dbg.h"
static FILE *DB_open(const char *path, const char *mode)
{
return fopen(path, mode);
}
static void DB_close(FILE * db)
{
fclose(db);
}
static bstring DB_load()
{
FILE *db = NULL;
bstring data = NULL;
db = DB_open(DB_FILE, "r");
check(db, "Failed to open database: %s", DB_FILE);
data = bread((bNread) fread, db);
check(data, "Failed to read from db file: %s", DB_FILE);
DB_close(db);
return data;
error:
if (db)
DB_close(db);
if (data)
bdestroy(data);
return NULL;
}
int DB_update(const char *url)
{
if (DB_find(url)) {
log_info("Already recorded as installed: %s", url);
}
FILE *db = DB_open(DB_FILE, "a+");
check(db, "Failed to open DB file: %s", DB_FILE);
bstring line = bfromcstr(url);
bconchar(line, '\n');
int rc = fwrite(line->data, blength(line), 1, db);
check(rc == 1, "Failed to append to the db.");
return 0;
error:
if (db)
DB_close(db);
return -1;
}
int DB_find(const char *url)
{
bstring data = NULL;
bstring line = bfromcstr(url);
int res = -1;
data = DB_load();
check(data, "Failed to load: %s", DB_FILE);
if (binstr(data, 0, line) == BSTR_ERR) {
res = 0;
} else {
res = 1;
}
error: // fallthrough
if (data)
bdestroy(data);
if (line)
bdestroy(line);
return res;
}
int DB_init()
{
apr_pool_t *p = NULL;
apr_pool_initialize();
apr_pool_create(&p, NULL);
if (access(DB_DIR, W_OK | X_OK) == -1) {
apr_status_t rc = apr_dir_make_recursive(DB_DIR,
APR_UREAD | APR_UWRITE
| APR_UEXECUTE |
APR_GREAD | APR_GWRITE
| APR_GEXECUTE, p);
check(rc == APR_SUCCESS, "Failed to make database dir: %s",
DB_DIR);
}
if (access(DB_FILE, W_OK) == -1) {
FILE *db = DB_open(DB_FILE, "w");
check(db, "Cannot open database: %s", DB_FILE);
DB_close(db);
}
apr_pool_destroy(p);
return 0;
error:
apr_pool_destroy(p);
return -1;
}
int DB_list()
{
bstring data = DB_load();
check(data, "Failed to read load: %s", DB_FILE);
printf("%s", bdata(data));
bdestroy(data);
return 0;
error:
return -1;
}
```
.\ex41\devpkg\db.h
```c
#ifndef _db_h
#define _db_h
#define DB_FILE "/usr/local/.devpkg/db"
#define DB_DIR "/usr/local/.devpkg"
int DB_init();
int DB_list();
int DB_update(const char *url);
int DB_find(const char *url);
#endif
```
.\ex41\devpkg\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex41\devpkg\devpkg.c
```c
#include <stdio.h>
#include <apr_general.h>
#include <apr_getopt.h>
#include <apr_strings.h>
#include <apr_lib.h>
#include "dbg.h"
#include "db.h"
#include "commands.h"
int main(int argc, const char *argv[])
{
apr_pool_t *p = NULL;
apr_pool_initialize();
apr_pool_create(&p, NULL);
apr_getopt_t *opt;
apr_status_t rv;
char ch = '\0';
const char *optarg = NULL;
const char *config_opts = NULL;
const char *install_opts = NULL;
const char *make_opts = NULL;
const char *url = NULL;
enum CommandType request = COMMAND_NONE;
rv = apr_getopt_init(&opt, p, argc, argv);
while (apr_getopt(opt, "I:Lc:m:i:d:SF:B:", &ch, &optarg) ==
APR_SUCCESS) {
switch (ch) {
case 'I':
request = COMMAND_INSTALL;
url = optarg;
break;
case 'L':
request = COMMAND_LIST;
break;
case 'c':
config_opts = optarg;
break;
case 'm':
make_opts = optarg;
break;
case 'i':
install_opts = optarg;
break;
case 'S':
request = COMMAND_INIT;
break;
case 'F':
request = COMMAND_FETCH;
url = optarg;
break;
case 'B':
request = COMMAND_BUILD;
url = optarg;
break;
}
}
switch (request) {
case COMMAND_INSTALL:
check(url, "You must at least give a URL.");
Command_install(p, url, config_opts, make_opts, install_opts);
break;
case COMMAND_LIST:
DB_list();
break;
case COMMAND_FETCH:
check(url != NULL, "You must give a URL.");
Command_fetch(p, url, 1);
log_info("Downloaded to %s and in /tmp/", BUILD_DIR);
break;
case COMMAND_BUILD:
check(url, "You must at least give a URL.");
Command_build(p, url, config_opts, make_opts, install_opts);
break;
case COMMAND_INIT:
rv = DB_init();
check(rv == 0, "Failed to make the database.");
break;
default:
sentinel("Invalid command given.");
}
return 0;
error:
return 1;
}
```
.\ex41\devpkg\shell.c
```c
#include "shell.h"
#include "dbg.h"
#include <stdarg.h>
int Shell_exec(Shell template, ...)
{
apr_pool_t *p = NULL;
int rc = -1;
apr_status_t rv = APR_SUCCESS;
va_list argp;
const char *key = NULL;
const char *arg = NULL;
int i = 0;
rv = apr_pool_create(&p, NULL);
check(rv == APR_SUCCESS, "Failed to create pool.");
va_start(argp, template);
for (key = va_arg(argp, const char *);
key != NULL; key = va_arg(argp, const char *)) {
arg = va_arg(argp, const char *);
for (i = 0; template.args[i] != NULL; i++) {
if (strcmp(template.args[i], key) == 0) {
template.args[i] = arg;
break; // found it
}
}
}
rc = Shell_run(p, &template);
apr_pool_destroy(p);
va_end(argp);
return rc;
error:
if (p) {
apr_pool_destroy(p);
}
return rc;
}
int Shell_run(apr_pool_t * p, Shell * cmd)
{
apr_procattr_t *attr;
apr_status_t rv;
apr_proc_t newproc;
rv = apr_procattr_create(&attr, p);
check(rv == APR_SUCCESS, "Failed to create proc attr.");
rv = apr_procattr_io_set(attr, APR_NO_PIPE, APR_NO_PIPE,
APR_NO_PIPE);
check(rv == APR_SUCCESS, "Failed to set IO of command.");
rv = apr_procattr_dir_set(attr, cmd->dir);
check(rv == APR_SUCCESS, "Failed to set root to %s", cmd->dir);
rv = apr_procattr_cmdtype_set(attr, APR_PROGRAM_PATH);
check(rv == APR_SUCCESS, "Failed to set cmd type.");
rv = apr_proc_create(&newproc, cmd->exe, cmd->args, NULL, attr, p);
check(rv == APR_SUCCESS, "Failed to run command.");
rv = apr_proc_wait(&newproc, &cmd->exit_code, &cmd->exit_why,
APR_WAIT);
check(rv == APR_CHILD_DONE, "Failed to wait.");
check(cmd->exit_code == 0, "%s exited badly.", cmd->exe);
check(cmd->exit_why == APR_PROC_EXIT, "%s was killed or crashed",
cmd->exe);
return 0;
error:
return -1;
}
Shell CLEANUP_SH = {
.exe = "rm",
.dir = "/tmp",
.args = {"rm", "-rf", "/tmp/pkg-build", "/tmp/pkg-src.tar.gz",
"/tmp/pkg-src.tar.bz2", "/tmp/DEPENDS", NULL}
};
Shell GIT_SH = {
.dir = "/tmp",
.exe = "git",
.args = {"git", "clone", "URL", "pkg-build", NULL}
};
Shell TAR_SH = {
.dir = "/tmp/pkg-build",
.exe = "tar",
.args = {"tar", "-xzf", "FILE", "--strip-components", "1", NULL}
};
Shell CURL_SH = {
.dir = "/tmp",
.exe = "curl",
.args = {"curl", "-L", "-o", "TARGET", "URL", NULL}
};
Shell CONFIGURE_SH = {
.exe = "./configure",
.dir = "/tmp/pkg-build",
.args = {"configure", "OPTS", NULL}
,
};
Shell MAKE_SH = {
.exe = "make",
.dir = "/tmp/pkg-build",
.args = {"make", "OPTS", NULL}
};
Shell INSTALL_SH = {
.exe = "sudo",
.dir = "/tmp/pkg-build",
.args = {"sudo", "make", "TARGET", NULL}
};
```
.\ex41\devpkg\shell.h
```c
#ifndef _shell_h
#define _shell_h
#define MAX_COMMAND_ARGS 100
#include <apr_thread_proc.h>
typedef struct Shell {
const char *dir;
const char *exe;
apr_procattr_t *attr;
apr_proc_t proc;
apr_exit_why_e exit_why;
int exit_code;
const char *args[MAX_COMMAND_ARGS];
} Shell;
int Shell_run(apr_pool_t * p, Shell * cmd);
int Shell_exec(Shell cmd, ...);
extern Shell CLEANUP_SH;
extern Shell GIT_SH;
extern Shell TAR_SH;
extern Shell CURL_SH;
extern Shell CONFIGURE_SH;
extern Shell MAKE_SH;
extern Shell INSTALL_SH;
#endif
```
.\ex41\ex41.1.sh
```bash
set -e
## go somewhere safe
cd /tmp
## get the source to base APR 1.5.2
curl -L -O http://archive.apache.org/dist/apr/apr-1.5.2.tar.gz
## extract it and go into the source
tar -xzvf apr-1.5.2.tar.gz
cd apr-1.5.2
## you need this on OSX Yosemite
touch libtoolT
## configure, make, make install
./configure
make
sudo make install
## reset and cleanup
cd /tmp
rm -rf apr-1.5.2 apr-1.5.2.tar.gz
## do the same with apr-util
curl -L -O http://archive.apache.org/dist/apr/apr-util-1.5.4.tar.gz
## extract
tar -xzvf apr-util-1.5.4.tar.gz
cd apr-util-1.5.4
## you need this on OSX Yosemite
touch libtoolT
## configure, make, make install
./configure --with-apr=/usr/local/apr
## you need that extra parameter to configure because
## apr-util can't really find it because...who knows.
make
sudo make install
#cleanup
cd /tmp
rm -rf apr-util-1.5.4* apr-1.5.2*
```
.\ex41\ex41.2.sh
```bash
mkdir devpkg
cd devpkg
touch README Makefile
```
.\ex41\devpkg
.\ex41\devpkg\commands.c
```c
#include <apr_uri.h>
#include <apr_fnmatch.h>
#include <unistd.h>
#include "commands.h"
#include "dbg.h"
#include "bstrlib.h"
#include "db.h"
#include "shell.h"
int Command_depends(apr_pool_t * p, const char *path)
{
FILE *in = NULL;
bstring line = NULL;
in = fopen(path, "r");
check(in != NULL, "Failed to open downloaded depends: %s", path);
for (line = bgets((bNgetc) fgetc, in, '\n');
line != NULL;
line = bgets((bNgetc) fgetc, in, '\n'))
{
btrimws(line);
log_info("Processing depends: %s", bdata(line));
int rc = Command_install(p, bdata(line), NULL, NULL, NULL);
check(rc == 0, "Failed to install: %s", bdata(line));
bdestroy(line);
}
fclose(in);
return 0;
error:
if (line) bdestroy(line);
if (in) fclose(in);
return -1;
}
int Command_fetch(apr_pool_t * p, const char *url, int fetch_only)
{
apr_uri_t info = {.port = 0 };
int rc = 0;
const char *depends_file = NULL;
apr_status_t rv = apr_uri_parse(p, url, &info);
check(rv == APR_SUCCESS, "Failed to parse URL: %s", url);
if (apr_fnmatch(GIT_PAT, info.path, 0) == APR_SUCCESS) {
rc = Shell_exec(GIT_SH, "URL", url, NULL);
check(rc == 0, "git failed.");
} else if (apr_fnmatch(DEPEND_PAT, info.path, 0) == APR_SUCCESS) {
check(!fetch_only, "No point in fetching a DEPENDS file.");
if (info.scheme) {
depends_file = DEPENDS_PATH;
rc = Shell_exec(CURL_SH, "URL", url, "TARGET", depends_file,
NULL);
check(rc == 0, "Curl failed.");
} else {
depends_file = info.path;
}
// recursively process the devpkg list
log_info("Building according to DEPENDS: %s", url);
rv = Command_depends(p, depends_file);
check(rv == 0, "Failed to process the DEPENDS: %s", url);
// this indicates that nothing needs to be done
return 0;
} else if (apr_fnmatch(TAR_GZ_PAT, info.path, 0) == APR_SUCCESS) {
if (info.scheme) {
rc = Shell_exec(CURL_SH,
"URL", url, "TARGET", TAR_GZ_SRC, NULL);
check(rc == 0, "Failed to curl source: %s", url);
}
rv = apr_dir_make_recursive(BUILD_DIR,
APR_UREAD | APR_UWRITE |
APR_UEXECUTE, p);
check(rv == APR_SUCCESS, "Failed to make directory %s",
BUILD_DIR);
rc = Shell_exec(TAR_SH, "FILE", TAR_GZ_SRC, NULL);
check(rc == 0, "Failed to untar %s", TAR_GZ_SRC);
} else if (apr_fnmatch(TAR_BZ2_PAT, info.path, 0) == APR_SUCCESS) {
if (info.scheme) {
rc = Shell_exec(CURL_SH, "URL", url, "TARGET", TAR_BZ2_SRC,
NULL);
check(rc == 0, "Curl failed.");
}
apr_status_t rc = apr_dir_make_recursive(BUILD_DIR,
APR_UREAD | APR_UWRITE
| APR_UEXECUTE, p);
check(rc == 0, "Failed to make directory %s", BUILD_DIR);
rc = Shell_exec(TAR_SH, "FILE", TAR_BZ2_SRC, NULL);
check(rc == 0, "Failed to untar %s", TAR_BZ2_SRC);
} else {
sentinel("Don't now how to handle %s", url);
}
// indicates that an install needs to actually run
return 1;
error:
return -1;
}
int Command_build(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts)
{
int rc = 0;
check(access(BUILD_DIR, X_OK | R_OK | W_OK) == 0,
"Build directory doesn't exist: %s", BUILD_DIR);
// actually do an install
if (access(CONFIG_SCRIPT, X_OK) == 0) {
log_info("Has a configure script, running it.");
rc = Shell_exec(CONFIGURE_SH, "OPTS", configure_opts, NULL);
check(rc == 0, "Failed to configure.");
}
rc = Shell_exec(MAKE_SH, "OPTS", make_opts, NULL);
check(rc == 0, "Failed to build.");
rc = Shell_exec(INSTALL_SH,
"TARGET", install_opts ? install_opts : "install",
NULL);
check(rc == 0, "Failed to install.");
rc = Shell_exec(CLEANUP_SH, NULL);
check(rc == 0, "Failed to cleanup after build.");
rc = DB_update(url);
check(rc == 0, "Failed to add this package to the database.");
return 0;
error:
return -1;
}
int Command_install(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts)
{
int rc = 0;
check(Shell_exec(CLEANUP_SH, NULL) == 0,
"Failed to cleanup before building.");
rc = DB_find(url);
check(rc != -1, "Error checking the install database.");
if (rc == 1) {
log_info("Package %s already installed.", url);
return 0;
}
rc = Command_fetch(p, url, 0);
if (rc == 1) {
rc = Command_build(p, url, configure_opts, make_opts,
install_opts);
check(rc == 0, "Failed to build: %s", url);
} else if (rc == 0) {
// no install needed
log_info("Depends successfully installed: %s", url);
} else {
// had an error
sentinel("Install failed: %s", url);
}
Shell_exec(CLEANUP_SH, NULL);
return 0;
error:
Shell_exec(CLEANUP_SH, NULL);
return -1;
}
```
.\ex41\devpkg\commands.h
```c
#ifndef _commands_h
#define _commands_h
#include <apr_pools.h>
#define DEPENDS_PATH "/tmp/DEPENDS"
#define TAR_GZ_SRC "/tmp/pkg-src.tar.gz"
#define TAR_BZ2_SRC "/tmp/pkg-src.tar.bz2"
#define BUILD_DIR "/tmp/pkg-build"
#define GIT_PAT "*.git"
#define DEPEND_PAT "*DEPENDS"
#define TAR_GZ_PAT "*.tar.gz"
#define TAR_BZ2_PAT "*.tar.bz2"
#define CONFIG_SCRIPT "/tmp/pkg-build/configure"
enum CommandType {
COMMAND_NONE, COMMAND_INSTALL, COMMAND_LIST, COMMAND_FETCH,
COMMAND_INIT, COMMAND_BUILD
};
int Command_fetch(apr_pool_t * p, const char *url, int fetch_only);
int Command_install(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts);
int Command_depends(apr_pool_t * p, const char *path);
int Command_build(apr_pool_t * p, const char *url,
const char *configure_opts, const char *make_opts,
const char *install_opts);
#endif
```
.\ex41\devpkg\db.c
```c
#include <unistd.h>
#include <apr_errno.h>
#include <apr_file_io.h>
#include "db.h"
#include "bstrlib.h"
#include "dbg.h"
static FILE *DB_open(const char *path, const char *mode)
{
return fopen(path, mode);
}
static void DB_close(FILE * db)
{
fclose(db);
}
static bstring DB_load()
{
FILE *db = NULL;
bstring data = NULL;
db = DB_open(DB_FILE, "r");
check(db, "Failed to open database: %s", DB_FILE);
data = bread((bNread) fread, db);
check(data, "Failed to read from db file: %s", DB_FILE);
DB_close(db);
return data;
error:
if (db)
DB_close(db);
if (data)
bdestroy(data);
return NULL;
}
int DB_update(const char *url)
{
if (DB_find(url)) {
log_info("Already recorded as installed: %s", url);
}
FILE *db = DB_open(DB_FILE, "a+");
check(db, "Failed to open DB file: %s", DB_FILE);
bstring line = bfromcstr(url);
bconchar(line, '\n');
int rc = fwrite(line->data, blength(line), 1, db);
check(rc == 1, "Failed to append to the db.");
return 0;
error:
if (db)
DB_close(db);
return -1;
}
int DB_find(const char *url)
{
bstring data = NULL;
bstring line = bfromcstr(url);
int res = -1;
data = DB_load();
check(data, "Failed to load: %s", DB_FILE);
if (binstr(data, 0, line) == BSTR_ERR) {
res = 0;
} else {
res = 1;
}
error: // fallthrough
if (data)
bdestroy(data);
if (line)
bdestroy(line);
return res;
}
int DB_init()
{
apr_pool_t *p = NULL;
apr_pool_initialize();
apr_pool_create(&p, NULL);
if (access(DB_DIR, W_OK | X_OK) == -1) {
apr_status_t rc = apr_dir_make_recursive(DB_DIR,
APR_UREAD | APR_UWRITE
| APR_UEXECUTE |
APR_GREAD | APR_GWRITE
| APR_GEXECUTE, p);
check(rc == APR_SUCCESS, "Failed to make database dir: %s",
DB_DIR);
}
if (access(DB_FILE, W_OK) == -1) {
FILE *db = DB_open(DB_FILE, "w");
check(db, "Cannot open database: %s", DB_FILE);
DB_close(db);
}
apr_pool_destroy(p);
return 0;
error:
apr_pool_destroy(p);
return -1;
}
int DB_list()
{
bstring data = DB_load();
check(data, "Failed to read load: %s", DB_FILE);
printf("%s", bdata(data));
bdestroy(data);
return 0;
error:
return -1;
}
```
.\ex41\devpkg\db.h
```c
#ifndef _db_h
#define _db_h
#define DB_FILE "/usr/local/.devpkg/db"
#define DB_DIR "/usr/local/.devpkg"
int DB_init();
int DB_list();
int DB_update(const char *url);
int DB_find(const char *url);
#endif
```
.\ex41\devpkg\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex41\devpkg\devpkg.c
```c
#include <stdio.h>
#include <apr_general.h>
#include <apr_getopt.h>
#include <apr_strings.h>
#include <apr_lib.h>
#include "dbg.h"
#include "db.h"
#include "commands.h"
int main(int argc, const char *argv[])
{
apr_pool_t *p = NULL;
apr_pool_initialize();
apr_pool_create(&p, NULL);
apr_getopt_t *opt;
apr_status_t rv;
char ch = '\0';
const char *optarg = NULL;
const char *config_opts = NULL;
const char *install_opts = NULL;
const char *make_opts = NULL;
const char *url = NULL;
enum CommandType request = COMMAND_NONE;
rv = apr_getopt_init(&opt, p, argc, argv);
while (apr_getopt(opt, "I:Lc:m:i:d:SF:B:", &ch, &optarg) ==
APR_SUCCESS) {
switch (ch) {
case 'I':
request = COMMAND_INSTALL;
url = optarg;
break;
case 'L':
request = COMMAND_LIST;
break;
case 'c':
config_opts = optarg;
break;
case 'm':
make_opts = optarg;
break;
case 'i':
install_opts = optarg;
break;
case 'S':
request = COMMAND_INIT;
break;
case 'F':
request = COMMAND_FETCH;
url = optarg;
break;
case 'B':
request = COMMAND_BUILD;
url = optarg;
break;
}
}
switch (request) {
case COMMAND_INSTALL:
check(url, "You must at least give a URL.");
Command_install(p, url, config_opts, make_opts, install_opts);
break;
case COMMAND_LIST:
DB_list();
break;
case COMMAND_FETCH:
check(url != NULL, "You must give a URL.");
Command_fetch(p, url, 1);
log_info("Downloaded to %s and in /tmp/", BUILD_DIR);
break;
case COMMAND_BUILD:
check(url, "You must at least give a URL.");
Command_build(p, url, config_opts, make_opts, install_opts);
break;
case COMMAND_INIT:
rv = DB_init();
check(rv == 0, "Failed to make the database.");
break;
default:
sentinel("Invalid command given.");
}
return 0;
error:
return 1;
}
```
.\ex41\devpkg\shell.c
```c
#include "shell.h"
#include "dbg.h"
#include <stdarg.h>
int Shell_exec(Shell template, ...)
{
apr_pool_t *p = NULL;
int rc = -1;
apr_status_t rv = APR_SUCCESS;
va_list argp;
const char *key = NULL;
const char *arg = NULL;
int i = 0;
rv = apr_pool_create(&p, NULL);
check(rv == APR_SUCCESS, "Failed to create pool.");
va_start(argp, template);
for (key = va_arg(argp, const char *);
key != NULL; key = va_arg(argp, const char *)) {
arg = va_arg(argp, const char *);
for (i = 0; template.args[i] != NULL; i++) {
if (strcmp(template.args[i], key) == 0) {
template.args[i] = arg;
break; // found it
}
}
}
rc = Shell_run(p, &template);
apr_pool_destroy(p);
va_end(argp);
return rc;
error:
if (p) {
apr_pool_destroy(p);
}
return rc;
}
int Shell_run(apr_pool_t * p, Shell * cmd)
{
apr_procattr_t *attr;
apr_status_t rv;
apr_proc_t newproc;
rv = apr_procattr_create(&attr, p);
check(rv == APR_SUCCESS, "Failed to create proc attr.");
rv = apr_procattr_io_set(attr, APR_NO_PIPE, APR_NO_PIPE,
APR_NO_PIPE);
check(rv == APR_SUCCESS, "Failed to set IO of command.");
rv = apr_procattr_dir_set(attr, cmd->dir);
check(rv == APR_SUCCESS, "Failed to set root to %s", cmd->dir);
rv = apr_procattr_cmdtype_set(attr, APR_PROGRAM_PATH);
check(rv == APR_SUCCESS, "Failed to set cmd type.");
rv = apr_proc_create(&newproc, cmd->exe, cmd->args, NULL, attr, p);
check(rv == APR_SUCCESS, "Failed to run command.");
rv = apr_proc_wait(&newproc, &cmd->exit_code, &cmd->exit_why,
APR_WAIT);
check(rv == APR_CHILD_DONE, "Failed to wait.");
check(cmd->exit_code == 0, "%s exited badly.", cmd->exe);
check(cmd->exit_why == APR_PROC_EXIT, "%s was killed or crashed",
cmd->exe);
return 0;
error:
return -1;
}
Shell CLEANUP_SH = {
.exe = "rm",
.dir = "/tmp",
.args = {"rm", "-rf", "/tmp/pkg-build", "/tmp/pkg-src.tar.gz",
"/tmp/pkg-src.tar.bz2", "/tmp/DEPENDS", NULL}
};
Shell GIT_SH = {
.dir = "/tmp",
.exe = "git",
.args = {"git", "clone", "URL", "pkg-build", NULL}
};
Shell TAR_SH = {
.dir = "/tmp/pkg-build",
.exe = "tar",
.args = {"tar", "-xzf", "FILE", "--strip-components", "1", NULL}
};
Shell CURL_SH = {
.dir = "/tmp",
.exe = "curl",
.args = {"curl", "-L", "-o", "TARGET", "URL", NULL}
};
Shell CONFIGURE_SH = {
.exe = "./configure",
.dir = "/tmp/pkg-build",
.args = {"configure", "OPTS", NULL}
,
};
Shell MAKE_SH = {
.exe = "make",
.dir = "/tmp/pkg-build",
.args = {"make", "OPTS", NULL}
};
Shell INSTALL_SH = {
.exe = "sudo",
.dir = "/tmp/pkg-build",
.args = {"sudo", "make", "TARGET", NULL}
};
```
.\ex41\devpkg\shell.h
```c
#ifndef _shell_h
#define _shell_h
#define MAX_COMMAND_ARGS 100
#include <apr_thread_proc.h>
typedef struct Shell {
const char *dir;
const char *exe;
apr_procattr_t *attr;
apr_proc_t proc;
apr_exit_why_e exit_why;
int exit_code;
const char *args[MAX_COMMAND_ARGS];
} Shell;
int Shell_run(apr_pool_t * p, Shell * cmd);
int Shell_exec(Shell cmd, ...);
extern Shell CLEANUP_SH;
extern Shell GIT_SH;
extern Shell TAR_SH;
extern Shell CURL_SH;
extern Shell CONFIGURE_SH;
extern Shell MAKE_SH;
extern Shell INSTALL_SH;
#endif
```
.\ex41\ex41.1.sh
```bash
set -e
## go somewhere safe
cd /tmp
## get the source to base APR 1.5.2
curl -L -O http://archive.apache.org/dist/apr/apr-1.5.2.tar.gz
## extract it and go into the source
tar -xzvf apr-1.5.2.tar.gz
cd apr-1.5.2
## you need this on OSX Yosemite
touch libtoolT
## configure, make, make install
./configure
make
sudo make install
## reset and cleanup
cd /tmp
rm -rf apr-1.5.2 apr-1.5.2.tar.gz
## do the same with apr-util
curl -L -O http://archive.apache.org/dist/apr/apr-util-1.5.4.tar.gz
## extract
tar -xzvf apr-util-1.5.4.tar.gz
cd apr-util-1.5.4
## you need this on OSX Yosemite
touch libtoolT
## configure, make, make install
./configure --with-apr=/usr/local/apr
## you need that extra parameter to configure because
## apr-util can't really find it because...who knows.
make
sudo make install
#cleanup
cd /tmp
rm -rf apr-util-1.5.4* apr-1.5.2*
```
.\ex41\ex41.2.sh
```bash
mkdir devpkg
cd devpkg
touch README Makefile
```
The Plan
Create a handy little tool called devpkg.
This will be a *lot* of work, so this video is more complete.
Demonstration
I'll demonstrate how devpkg works so you get a better idea.
Read the book's description as well for more details.
The Apache Portable Runtime
Review of the APR and installing it.
The Analysis
Walk through the code, where everything is, and what to watch out for.
Getting My Code
If you get stuck you can check out the learn-c-the-hard-way-lectures project:
And look in ex41/devpkg for the code.
Extra Credit
* Compare your code to my code available online. Starting with 100%,
remove 1% for each line you got wrong.
* Take the notes.txt file that you previously created and implement your improvements to the the code and functionality
of ``devpkg``.
* Write an alternative version of ``devpkg`` using your other
favorite language or the one you think can do this the best. Compare
the two, then improve your *C* version of ``devpkg`` based on what
you've learned.
### Exercise 42 Stacks and Queues
The Plan
Create a Stack and Queue data structure from just the unit tests.
PAUSE!
WARNING! Stop the video now and try to solve this yourself!
I'll show you how I did it after you try it (or you can cheat).
Code Review
.\ex42\queue_tests.c
```c
#include "minunit.h"
#include <lcthw/queue.h>
#include <assert.h>
static Queue *queue = NULL;
char *tests[] = { "test1 data", "test2 data", "test3 data" };
#define NUM_TESTS 3
char *test_create()
{
queue = Queue_create();
mu_assert(queue != NULL, "Failed to create queue.");
return NULL;
}
char *test_destroy()
{
mu_assert(queue != NULL, "Failed to make queue #2");
Queue_destroy(queue);
return NULL;
}
char *test_send_recv()
{
int i = 0;
for (i = 0; i < NUM_TESTS; i++) {
Queue_send(queue, tests[i]);
mu_assert(Queue_peek(queue) == tests[0], "Wrong next value.");
}
mu_assert(Queue_count(queue) == NUM_TESTS, "Wrong count on send.");
QUEUE_FOREACH(queue, cur) {
debug("VAL: %s", (char *)cur->value);
}
for (i = 0; i < NUM_TESTS; i++) {
char *val = Queue_recv(queue);
mu_assert(val == tests[i], "Wrong value on recv.");
}
mu_assert(Queue_count(queue) == 0, "Wrong count after recv.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_create);
mu_run_test(test_send_recv);
mu_run_test(test_destroy);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex42\stack_tests.c
```c
#include "minunit.h"
#include <lcthw/stack.h>
#include <assert.h>
static Stack *stack = NULL;
char *tests[] = { "test1 data", "test2 data", "test3 data" };
#define NUM_TESTS 3
char *test_create()
{
stack = Stack_create();
mu_assert(stack != NULL, "Failed to create stack.");
return NULL;
}
char *test_destroy()
{
mu_assert(stack != NULL, "Failed to make stack #2");
Stack_destroy(stack);
return NULL;
}
char *test_push_pop()
{
int i = 0;
for (i = 0; i < NUM_TESTS; i++) {
Stack_push(stack, tests[i]);
mu_assert(Stack_peek(stack) == tests[i], "Wrong next value.");
}
mu_assert(Stack_count(stack) == NUM_TESTS, "Wrong count on push.");
STACK_FOREACH(stack, cur) {
debug("VAL: %s", (char *)cur->value);
}
for (i = NUM_TESTS - 1; i >= 0; i--) {
char *val = Stack_pop(stack);
mu_assert(val == tests[i], "Wrong value on pop.");
}
mu_assert(Stack_count(stack) == 0, "Wrong count after pop.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_create);
mu_run_test(test_push_pop);
mu_run_test(test_destroy);
return NULL;
}
RUN_TESTS(all_tests);
```
Extra Credit
* Implement ``Stack`` using ``DArray`` instead of ``List``, but without changing the unit test. That means you'll have to create your own ``STACK_FOREACH``.
### Exercise 43 A Simple Statistics Engine
.\ex43\stats.h
```c
#ifndef lcthw_stats_h
#define lcthw_stats_h
typedef struct Stats {
double sum;
double sumsq;
unsigned long n;
double min;
double max;
} Stats;
Stats *Stats_recreate(double sum, double sumsq, unsigned long n,
double min, double max);
Stats *Stats_create();
double Stats_mean(Stats * st);
double Stats_stddev(Stats * st);
void Stats_sample(Stats * st, double s);
void Stats_dump(Stats * st);
#endif
```
.\ex43\stats.c
```c
#include <math.h>
#include <lcthw/stats.h>
#include <stdlib.h>
#include <lcthw/dbg.h>
Stats *Stats_recreate(double sum, double sumsq, unsigned long n,
double min, double max)
{
Stats *st = malloc(sizeof(Stats));
check_mem(st);
st->sum = sum;
st->sumsq = sumsq;
st->n = n;
st->min = min;
st->max = max;
return st;
error:
return NULL;
}
Stats *Stats_create()
{
return Stats_recreate(0.0, 0.0, 0L, 0.0, 0.0);
}
double Stats_mean(Stats * st)
{
return st->sum / st->n;
}
double Stats_stddev(Stats * st)
{
return sqrt((st->sumsq - (st->sum * st->sum / st->n)) /
(st->n - 1));
}
void Stats_sample(Stats * st, double s)
{
st->sum += s;
st->sumsq += s * s;
if (st->n == 0) {
st->min = s;
st->max = s;
} else {
if (st->min > s)
st->min = s;
if (st->max < s)
st->max = s;
}
st->n += 1;
}
void Stats_dump(Stats * st)
{
fprintf(stderr,
"sum: %f, sumsq: %f, n: %ld, "
"min: %f, max: %f, mean: %f, stddev: %f",
st->sum, st->sumsq, st->n, st->min, st->max, Stats_mean(st),
Stats_stddev(st));
}
```
.\ex43\stats_tests.c
```c
#include "minunit.h"
#include <lcthw/stats.h>
#include <math.h>
const int NUM_SAMPLES = 10;
double samples[] = {
6.1061334, 9.6783204, 1.2747090, 8.2395131, 0.3333483,
6.9755066, 1.0626275, 7.6587523, 4.9382973, 9.5788115
};
Stats expect = {
.sumsq = 425.1641,
.sum = 55.84602,
.min = 0.333,
.max = 9.678,
.n = 10,
};
double expect_mean = 5.584602;
double expect_stddev = 3.547868;
#define EQ(X,Y,N) (round((X) * pow(10, N)) == round((Y) * pow(10, N)))
char *test_operations()
{
int i = 0;
Stats *st = Stats_create();
mu_assert(st != NULL, "Failed to create stats.");
for (i = 0; i < NUM_SAMPLES; i++) {
Stats_sample(st, samples[i]);
}
Stats_dump(st);
mu_assert(EQ(st->sumsq, expect.sumsq, 3), "sumsq not valid");
mu_assert(EQ(st->sum, expect.sum, 3), "sum not valid");
mu_assert(EQ(st->min, expect.min, 3), "min not valid");
mu_assert(EQ(st->max, expect.max, 3), "max not valid");
mu_assert(EQ(st->n, expect.n, 3), "max not valid");
mu_assert(EQ(expect_mean, Stats_mean(st), 3), "mean not valid");
mu_assert(EQ(expect_stddev, Stats_stddev(st), 3),
"stddev not valid");
return NULL;
}
char *test_recreate()
{
Stats *st = Stats_recreate(
expect.sum, expect.sumsq, expect.n, expect.min, expect.max);
mu_assert(st->sum == expect.sum, "sum not equal");
mu_assert(st->sumsq == expect.sumsq, "sumsq not equal");
mu_assert(st->n == expect.n, "n not equal");
mu_assert(st->min == expect.min, "min not equal");
mu_assert(st->max == expect.max, "max not equal");
mu_assert(EQ(expect_mean, Stats_mean(st), 3), "mean not valid");
mu_assert(EQ(expect_stddev, Stats_stddev(st), 3),
"stddev not valid");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_operations);
mu_run_test(test_recreate);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex43\stats.h
```c
#ifndef lcthw_stats_h
#define lcthw_stats_h
typedef struct Stats {
double sum;
double sumsq;
unsigned long n;
double min;
double max;
} Stats;
Stats *Stats_recreate(double sum, double sumsq, unsigned long n,
double min, double max);
Stats *Stats_create();
double Stats_mean(Stats * st);
double Stats_stddev(Stats * st);
void Stats_sample(Stats * st, double s);
void Stats_dump(Stats * st);
#endif
```
.\ex43\stats.c
```c
#include <math.h>
#include <lcthw/stats.h>
#include <stdlib.h>
#include <lcthw/dbg.h>
Stats *Stats_recreate(double sum, double sumsq, unsigned long n,
double min, double max)
{
Stats *st = malloc(sizeof(Stats));
check_mem(st);
st->sum = sum;
st->sumsq = sumsq;
st->n = n;
st->min = min;
st->max = max;
return st;
error:
return NULL;
}
Stats *Stats_create()
{
return Stats_recreate(0.0, 0.0, 0L, 0.0, 0.0);
}
double Stats_mean(Stats * st)
{
return st->sum / st->n;
}
double Stats_stddev(Stats * st)
{
return sqrt((st->sumsq - (st->sum * st->sum / st->n)) /
(st->n - 1));
}
void Stats_sample(Stats * st, double s)
{
st->sum += s;
st->sumsq += s * s;
if (st->n == 0) {
st->min = s;
st->max = s;
} else {
if (st->min > s)
st->min = s;
if (st->max < s)
st->max = s;
}
st->n += 1;
}
void Stats_dump(Stats * st)
{
fprintf(stderr,
"sum: %f, sumsq: %f, n: %ld, "
"min: %f, max: %f, mean: %f, stddev: %f",
st->sum, st->sumsq, st->n, st->min, st->max, Stats_mean(st),
Stats_stddev(st));
}
```
.\ex43\stats_tests.c
```c
#include "minunit.h"
#include <lcthw/stats.h>
#include <math.h>
const int NUM_SAMPLES = 10;
double samples[] = {
6.1061334, 9.6783204, 1.2747090, 8.2395131, 0.3333483,
6.9755066, 1.0626275, 7.6587523, 4.9382973, 9.5788115
};
Stats expect = {
.sumsq = 425.1641,
.sum = 55.84602,
.min = 0.333,
.max = 9.678,
.n = 10,
};
double expect_mean = 5.584602;
double expect_stddev = 3.547868;
#define EQ(X,Y,N) (round((X) * pow(10, N)) == round((Y) * pow(10, N)))
char *test_operations()
{
int i = 0;
Stats *st = Stats_create();
mu_assert(st != NULL, "Failed to create stats.");
for (i = 0; i < NUM_SAMPLES; i++) {
Stats_sample(st, samples[i]);
}
Stats_dump(st);
mu_assert(EQ(st->sumsq, expect.sumsq, 3), "sumsq not valid");
mu_assert(EQ(st->sum, expect.sum, 3), "sum not valid");
mu_assert(EQ(st->min, expect.min, 3), "min not valid");
mu_assert(EQ(st->max, expect.max, 3), "max not valid");
mu_assert(EQ(st->n, expect.n, 3), "max not valid");
mu_assert(EQ(expect_mean, Stats_mean(st), 3), "mean not valid");
mu_assert(EQ(expect_stddev, Stats_stddev(st), 3),
"stddev not valid");
return NULL;
}
char *test_recreate()
{
Stats *st = Stats_recreate(
expect.sum, expect.sumsq, expect.n, expect.min, expect.max);
mu_assert(st->sum == expect.sum, "sum not equal");
mu_assert(st->sumsq == expect.sumsq, "sumsq not equal");
mu_assert(st->n == expect.n, "n not equal");
mu_assert(st->min == expect.min, "min not equal");
mu_assert(st->max == expect.max, "max not equal");
mu_assert(EQ(expect_mean, Stats_mean(st), 3), "mean not valid");
mu_assert(EQ(expect_stddev, Stats_stddev(st), 3),
"stddev not valid");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_operations);
mu_run_test(test_recreate);
return NULL;
}
RUN_TESTS(all_tests);
```
The Plan
* A fun and handy little statistics engine for simple analysis.
* Comparing it to the same in R.
Comparing Test vs. R
I'll use R to show you how this works vs. normal calculations using all data.
Breaking It
Easiest way to break this is to just feed it bad data once then the whole
stream is broken.
Extra Credit
* Convert the ``Stats_stddev`` and ``Stats_mean`` to ``static inline`` functions in the ``stats.h`` file instead of in the ``stats.c`` file.
* Use this code to write a performance test of the ``string_algos_test.c``.
Make it optional, and have it run the base test as a series of samples, and then report
the results.
* Write a version of this in another programming language you know. Confirm that this
version is correct based on what I have here.
Extra Credit
* Write a little program that can take a file full of numbers and spit these statistics
out for them.
* Make the program accept a table of data that has headers on one line, then all
of the other numbers on lines after it are separated by any number of spaces. Your program
should then print out these statistics for each column by the header name.
### Exercise 44 Ring Buffer
The Plan
Learn about a handy data structure for I/O processing:
Ring Buffers
The Code
.\ex44\netclient.c
```c
#undef NDEBUG
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int main(int argc, char *argv[])
{
fd_set allreads;
fd_set readmask;
int socket = 0;
int rc = 0;
RingBuffer *in_rb = RingBuffer_create(1024 * 10);
RingBuffer *sock_rb = RingBuffer_create(1024 * 10);
check(argc == 3, "USAGE: netclient host port");
socket = client_connect(argv[1], argv[2]);
check(socket >= 0, "connect to %s:%s failed.", argv[1], argv[2]);
FD_ZERO(&allreads);
FD_SET(socket, &allreads);
FD_SET(0, &allreads);
while (1) {
readmask = allreads;
rc = select(socket + 1, &readmask, NULL, NULL, NULL);
check(rc >= 0, "select failed.");
if (FD_ISSET(0, &readmask)) {
rc = read_some(in_rb, 0, 0);
check_debug(rc != -1, "Failed to read from stdin.");
}
if (FD_ISSET(socket, &readmask)) {
rc = read_some(sock_rb, socket, 0);
check_debug(rc != -1, "Failed to read from socket.");
}
while (!RingBuffer_empty(sock_rb)) {
rc = write_some(sock_rb, 1, 0);
check_debug(rc != -1, "Failed to write to stdout.");
}
while (!RingBuffer_empty(in_rb)) {
rc = write_some(in_rb, socket, 1);
check_debug(rc != -1, "Failed to write to socket.");
}
}
return 0;
error:
return -1;
}
```
It's basically a DArray with dynamic start and end settings.
You can *also* use a Queue of bstrings to do almost the same thing.
Code Review
.\ex44\netclient.c
```c
#undef NDEBUG
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int main(int argc, char *argv[])
{
fd_set allreads;
fd_set readmask;
int socket = 0;
int rc = 0;
RingBuffer *in_rb = RingBuffer_create(1024 * 10);
RingBuffer *sock_rb = RingBuffer_create(1024 * 10);
check(argc == 3, "USAGE: netclient host port");
socket = client_connect(argv[1], argv[2]);
check(socket >= 0, "connect to %s:%s failed.", argv[1], argv[2]);
FD_ZERO(&allreads);
FD_SET(socket, &allreads);
FD_SET(0, &allreads);
while (1) {
readmask = allreads;
rc = select(socket + 1, &readmask, NULL, NULL, NULL);
check(rc >= 0, "select failed.");
if (FD_ISSET(0, &readmask)) {
rc = read_some(in_rb, 0, 0);
check_debug(rc != -1, "Failed to read from stdin.");
}
if (FD_ISSET(socket, &readmask)) {
rc = read_some(sock_rb, socket, 0);
check_debug(rc != -1, "Failed to read from socket.");
}
while (!RingBuffer_empty(sock_rb)) {
rc = write_some(sock_rb, 1, 0);
check_debug(rc != -1, "Failed to write to stdout.");
}
while (!RingBuffer_empty(in_rb)) {
rc = write_some(in_rb, socket, 1);
check_debug(rc != -1, "Failed to write to socket.");
}
}
return 0;
error:
return -1;
}
```
The Analysis
* Watch a ring buffer work in the debugger.
* Draw it visually to explore it.
* The purpose is to efficiently add and remove data when the amount added and removed is random.
Pause!
I will next review the unit test I wrote so if you want to attempt
solving it yourself then pause now.
The Unit Test
Here's my version of the unit test.
Breaking It
* The biggest mistake you'll make with a ring buffer is off-by-one errors.
* This is why the RingBuffer\_commit\_ and other macros exist.
* Another common mistake is to use it between threads, but that's a whole other book.
Extra Credit
* Create an alternative implementation of ``RingBuffer`` that uses
the POSIX trick and a unit test for it.
* Add a performance comparison test to this unit test that compares the
two versions by fuzzing them with random data and random read/write operations.
Make sure that you set up this fuzzing so that the same operations are done
to each version, and you can compare them between runs.
### Exercise 45 A Simple TCP/IP Client
The Plan
* Learn to use the *select* method and a RingBuffer to write a simple command line network client.
How select Works
Code Review
Improving It
These read functions are useful so I can put them in RingBuffer.
Extra Credit
* As I mentioned, there are quite a few functions you may not know, so
look them up. In fact, look them all up even if you think you know
them.
* Go back through and add various defensive programming checks to
the functions to improve them.
* Use the *getopt* function to allow the user
the option *not* to translate *\n* to *\r\n*. This
is only needed on protocols that require it for line endings, like HTTP.
Sometimes you don't want the translation, so give the user the option.
### Exercise 46 Ternary Search Tree
The Plan
Learn about my favorite data structure ever:
Ternary Search Tree
The Code
.\ex46\tstree.h
```c
#ifndef _lcthw_TSTree_h
#define _lcthw_TSTree_h
#include <stdlib.h>
#include <lcthw/darray.h>
typedef struct TSTree {
char splitchar;
struct TSTree *low;
struct TSTree *equal;
struct TSTree *high;
void *value;
} TSTree;
void *TSTree_search(TSTree * root, const char *key, size_t len);
void *TSTree_search_prefix(TSTree * root, const char *key, size_t len);
typedef void (*TSTree_traverse_cb) (void *value, void *data);
TSTree *TSTree_insert(TSTree * node, const char *key, size_t len,
void *value);
void TSTree_traverse(TSTree * node, TSTree_traverse_cb cb, void *data);
void TSTree_destroy(TSTree * root);
#endif
```
.\ex46\tstree.c
```c
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <lcthw/dbg.h>
#include <lcthw/tstree.h>
static inline TSTree *TSTree_insert_base(TSTree * root, TSTree * node,
const char *key, size_t len,
void *value)
{
if (node == NULL) {
node = (TSTree *) calloc(1, sizeof(TSTree));
if (root == NULL) {
root = node;
}
node->splitchar = *key;
}
if (*key < node->splitchar) {
node->low = TSTree_insert_base(
root, node->low, key, len, value);
} else if (*key == node->splitchar) {
if (len > 1) {
node->equal = TSTree_insert_base(
root, node->equal, key + 1, len - 1, value);
} else {
assert(node->value == NULL && "Duplicate insert into tst.");
node->value = value;
}
} else {
node->high = TSTree_insert_base(
root, node->high, key, len, value);
}
return node;
}
TSTree *TSTree_insert(TSTree * node, const char *key, size_t len,
void *value)
{
return TSTree_insert_base(node, node, key, len, value);
}
void *TSTree_search(TSTree * root, const char *key, size_t len)
{
TSTree *node = root;
size_t i = 0;
while (i < len && node) {
if (key[i] < node->splitchar) {
node = node->low;
} else if (key[i] == node->splitchar) {
i++;
if (i < len)
node = node->equal;
} else {
node = node->high;
}
}
if (node) {
return node->value;
} else {
return NULL;
}
}
void *TSTree_search_prefix(TSTree * root, const char *key, size_t len)
{
if (len == 0)
return NULL;
TSTree *node = root;
TSTree *last = NULL;
size_t i = 0;
while (i < len && node) {
if (key[i] < node->splitchar) {
node = node->low;
} else if (key[i] == node->splitchar) {
i++;
if (i < len) {
if (node->value)
last = node;
node = node->equal;
}
} else {
node = node->high;
}
}
node = node ? node : last;
// traverse until we find the first value in the equal chain
// this is then the first node with this prefix
while (node && !node->value) {
node = node->equal;
}
return node ? node->value : NULL;
}
void TSTree_traverse(TSTree * node, TSTree_traverse_cb cb, void *data)
{
if (!node)
return;
if (node->low)
TSTree_traverse(node->low, cb, data);
if (node->equal) {
TSTree_traverse(node->equal, cb, data);
}
if (node->high)
TSTree_traverse(node->high, cb, data);
if (node->value)
cb(node->value, data);
}
void TSTree_destroy(TSTree * node)
{
if (node == NULL)
return;
if (node->low)
TSTree_destroy(node->low);
if (node->equal) {
TSTree_destroy(node->equal);
}
if (node->high)
TSTree_destroy(node->high);
free(node);
}
```
.\ex46\tstree_tests.c
```c
#include "minunit.h"
#include <lcthw/tstree.h>
#include <string.h>
#include <assert.h>
#include <lcthw/bstrlib.h>
TSTree *node = NULL;
char *valueA = "VALUEA";
char *valueB = "VALUEB";
char *value2 = "VALUE2";
char *value4 = "VALUE4";
char *reverse = "VALUER";
int traverse_count = 0;
struct tagbstring test1 = bsStatic("TEST");
struct tagbstring test2 = bsStatic("TEST2");
struct tagbstring test3 = bsStatic("TSET");
struct tagbstring test4 = bsStatic("T");
char *test_insert()
{
node = TSTree_insert(node, bdata(&test1), blength(&test1), valueA);
mu_assert(node != NULL, "Failed to insert into tst.");
node = TSTree_insert(node, bdata(&test2), blength(&test2), value2);
mu_assert(node != NULL,
"Failed to insert into tst with second name.");
node = TSTree_insert(node, bdata(&test3), blength(&test3), reverse);
mu_assert(node != NULL,
"Failed to insert into tst with reverse name.");
node = TSTree_insert(node, bdata(&test4), blength(&test4), value4);
mu_assert(node != NULL,
"Failed to insert into tst with second name.");
return NULL;
}
char *test_search_exact()
{
// tst returns the last one inserted
void *res = TSTree_search(node, bdata(&test1), blength(&test1));
mu_assert(res == valueA,
"Got the wrong value back, should get A not B.");
// tst does not find if not exact
res = TSTree_search(node, "TESTNO", strlen("TESTNO"));
mu_assert(res == NULL, "Should not find anything.");
return NULL;
}
char *test_search_prefix()
{
void *res = TSTree_search_prefix(
node, bdata(&test1), blength(&test1));
debug("result: %p, expected: %p", res, valueA);
mu_assert(res == valueA, "Got wrong valueA by prefix.");
res = TSTree_search_prefix(node, bdata(&test1), 1);
debug("result: %p, expected: %p", res, valueA);
mu_assert(res == value4, "Got wrong value4 for prefix of 1.");
res = TSTree_search_prefix(node, "TE", strlen("TE"));
mu_assert(res != NULL, "Should find for short prefix.");
res = TSTree_search_prefix(node, "TE--", strlen("TE--"));
mu_assert(res != NULL, "Should find for partial prefix.");
return NULL;
}
void TSTree_traverse_test_cb(void *value, void *data)
{
assert(value != NULL && "Should not get NULL value.");
assert(data == valueA && "Expecting valueA as the data.");
traverse_count++;
}
char *test_traverse()
{
traverse_count = 0;
TSTree_traverse(node, TSTree_traverse_test_cb, valueA);
debug("traverse count is: %d", traverse_count);
mu_assert(traverse_count == 4, "Didn't find 4 keys.");
return NULL;
}
char *test_destroy()
{
TSTree_destroy(node);
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_insert);
mu_run_test(test_search_exact);
mu_run_test(test_search_prefix);
mu_run_test(test_traverse);
mu_run_test(test_destroy);
return NULL;
}
RUN_TESTS(all_tests);
```
Similar to a Binary Search Tree, but it has 3 branches per node based on
the characters in strings.
Advantages
* Find any string comparing at most N characters.
* Detect *missing* strings as fast, usually faster.
* Find all strings that start with, or contain, any substring as fast.
* Find all similar known strings quickly.
Disadvantages
* Delete is a pain, as in most trees.
* Uses lots of memory to store keys, so bad for sets of large keys.
* Kind of weird for most programmers.
Improving It
* You could allow duplicates by using a *DArray* instead of the
*value*.
* As I mentioned earlier, deleting is hard, but you could simulate it by setting
the values to *NULL* so that they are effectively gone.
* There are no ways to collect all of the possible matching values. I'll have
you implement that in an extra credit.
* There are other algorithms that are more complex but have slightly
better properties. Take a look at suffix array, suffix tree, and
radix tree structures.
Extra Credit
* Implement a *TSTree_collect* that returns a *DArray* containing
all of the keys that match the given prefix.
* Implement *TSTree_search_suffix* and a *TSTree_insert_suffix*
so you can do suffix searches and inserts.
* Use the debugger to see how this structure is used in memory
compared to the *BSTree* and *Hashmap*.
### Exercise 47 A Fast URL Router
The Plan
Use the *TSTree* to do something useful:
Route URLs
.\ex47\ex47_urls.txt
```
/test.tst TestHandler
/ IndexHandler
/test/this/out/index.html PageHandler
/index.html PageHandler
/and/then/i/have/things/to/test.html PageHandler
```
Code Review
.\ex47\urlor.c
```c
#include <lcthw/tstree.h>
#include <lcthw/bstrlib.h>
TSTree *add_route_data(TSTree * routes, bstring line)
{
struct bstrList *data = bsplit(line, ' ');
check(data->qty == 2, "Line '%s' does not have 2 columns",
bdata(line));
routes = TSTree_insert(routes,
bdata(data->entry[0]),
blength(data->entry[0]),
bstrcpy(data->entry[1]));
bstrListDestroy(data);
return routes;
error:
return NULL;
}
TSTree *load_routes(const char *file)
{
TSTree *routes = NULL;
bstring line = NULL;
FILE *routes_map = NULL;
routes_map = fopen(file, "r");
check(routes_map != NULL, "Failed to open routes: %s", file);
while ((line = bgets((bNgetc) fgetc, routes_map, '\n')) != NULL) {
check(btrimws(line) == BSTR_OK, "Failed to trim line.");
routes = add_route_data(routes, line);
check(routes != NULL, "Failed to add route.");
bdestroy(line);
}
fclose(routes_map);
return routes;
error:
if (routes_map) fclose(routes_map);
if (line) bdestroy(line);
return NULL;
}
bstring match_url(TSTree * routes, bstring url)
{
bstring route = TSTree_search(routes, bdata(url), blength(url));
if (route == NULL) {
printf("No exact match found, trying prefix.\n");
route = TSTree_search_prefix(routes, bdata(url), blength(url));
}
return route;
}
bstring read_line(const char *prompt)
{
printf("%s", prompt);
bstring result = bgets((bNgetc) fgetc, stdin, '\n');
check_debug(result != NULL, "stdin closed.");
check(btrimws(result) == BSTR_OK, "Failed to trim.");
return result;
error:
return NULL;
}
void bdestroy_cb(void *value, void *ignored)
{
(void)ignored;
bdestroy((bstring) value);
}
void destroy_routes(TSTree * routes)
{
TSTree_traverse(routes, bdestroy_cb, NULL);
TSTree_destroy(routes);
}
int main(int argc, char *argv[])
{
bstring url = NULL;
bstring route = NULL;
TSTree *routes = NULL;
check(argc == 2, "USAGE: urlor <urlfile>");
routes = load_routes(argv[1]);
check(routes != NULL, "Your route file has an error.");
while (1) {
url = read_line("URL> ");
check_debug(url != NULL, "goodbye.");
route = match_url(routes, url);
if (route) {
printf("MATCH: %s == %s\n", bdata(url), bdata(route));
} else {
printf("FAIL: %s\n", bdata(url));
}
bdestroy(url);
}
destroy_routes(routes);
return 0;
error:
destroy_routes(routes);
return 1;
}
```
The Analysis
Watch me play with it and then tell you how it's working.
Improving It
* Collect all possible matches then choose the longest as winner.
* Use TSTree to find prefixes, then regex to choose winner.
Extra Credit
* Instead of just storing the string for the handler, create an actual engine that uses a
*Handler* struct to store the application. The structure would store the URL to which it's attached, the name, and anything else you'd need to make an actual routing system.
Extra Credit
* Instead of mapping URLs to arbitrary names, map them to .so files and use the *dlopen*
system to load handlers on the fly and call callbacks they contain. Put these callbacks that
in your *Handler* struct, and then you have yourself a fully dynamic callback
handler system in C.
### Exercise 48a A Simple Network Server:
Project Description
The Plan
Start your first long running project:
statserve
The Purpose
You'll get the project started and get a minimum first hack going.
The Requirements
1. Create a simple network server that accepts a connection on port 7899 from
*netclient* or the *nc* command, and echoes back anything you type.
2. You'll need to learn how to bind a port, listen on the socket, and answer it.
Use your research skills to study how this is done and attempt to implement it
yourself.
The Requirements
3. The more important part of this project is laying out the project directory
from the *c-skeleton*, and making sure you can build everything and get it
working.
4. Don't worry about things like daemons or anything else. Your server just has
to run from the command line and keep running.
The Clues
I will now give you some clues:
* USE liblcthw!
* Remember you did a client already, you just need to make a server.
* Do NOT use select! Use fork() for the server.
* Keep it *simple*. Don't worry about anything other than accepting a connection and closing.
* Stay small, build slowly.
Important References
* Research online for "echo server in C".
* Read man (2) pages for *accept*, *bind*, *listen*, *connect*, *select*, *socket*, and *shutdown*.
Encouragement
This will be *HARD*! Try it your best, and take it piece by piece. You can do it, but remember if you give up the next video (48b) will show you the code to my solution and how to solve it. You can peek there then come back when you're stuck.
### Exercise 48b A Simple Network Server:
.\ex48b\c-skeleton
.\ex48b\c-skeleton\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\c-skeleton\src\libex29.c
```c
#include <stdio.h>
#include <ctype.h>
#include "dbg.h"
int print_a_message(const char *msg)
{
printf("A STRING: %s\n", msg);
return 0;
}
int uppercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", toupper(msg[i]));
}
printf("\n");
return 0;
}
int lowercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", tolower(msg[i]));
}
printf("\n");
return 0;
}
int fail_on_purpose(const char *msg)
{
return 1;
}
```
.\ex48b\c-skeleton\tests\libex29_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
typedef int (*lib_function) (const char *data);
char *lib_file = "build/libYOUR_LIBRARY.so";
void *lib = NULL;
int check_function(const char *func_to_run, const char *data,
int expected)
{
lib_function func = dlsym(lib, func_to_run);
check(func != NULL,
"Did not find %s function in the library %s: %s", func_to_run,
lib_file, dlerror());
int rc = func(data);
check(rc == expected, "Function %s return %d for data: %s",
func_to_run, rc, data);
return 1;
error:
return 0;
}
char *test_dlopen()
{
lib = dlopen(lib_file, RTLD_NOW);
mu_assert(lib != NULL, "Failed to open the library to test.");
return NULL;
}
char *test_functions()
{
mu_assert(check_function("print_a_message", "Hello", 0),
"print_a_message failed.");
mu_assert(check_function("uppercase", "Hello", 0),
"uppercase failed.");
mu_assert(check_function("lowercase", "Hello", 0),
"lowercase failed.");
return NULL;
}
char *test_failures()
{
mu_assert(check_function("fail_on_purpose", "Hello", 1),
"fail_on_purpose should fail.");
return NULL;
}
char *test_dlclose()
{
int rc = dlclose(lib);
mu_assert(rc == 0, "Failed to close lib.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dlopen);
mu_run_test(test_functions);
mu_run_test(test_failures);
mu_run_test(test_dlclose);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex48b\statserve
.\ex48b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 3, "USAGE: statserve host port");
const char *host = argv[1];
const char *port = argv[2];
check(echo_server(host, port), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex48b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
```
.\ex48b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
#endif
```
.\ex48b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
const int RB_SIZE = 1024 * 10;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
void client_handler(int client_fd)
{
int rc = 0;
// need a ringbuffer for the input
RingBuffer *sock_rb = RingBuffer_create(RB_SIZE);
// read_some in a loop
while(read_some(sock_rb, client_fd, 1) != -1) {
// write_it back off the ringbuffer
if(write_some(sock_rb, client_fd, 1) == -1) {
debug("Client closed.");
break;
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(sock_rb) RingBuffer_destroy(sock_rb);
exit(0); // just exit the child process
}
int echo_server(const char *host, const char *port)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex48b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
int echo_server(const char *host, const char *port);
#endif
```
.\ex48b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
char *test_dummy()
{
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dummy);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex48b\c-skeleton
.\ex48b\c-skeleton\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\c-skeleton\src\libex29.c
```c
#include <stdio.h>
#include <ctype.h>
#include "dbg.h"
int print_a_message(const char *msg)
{
printf("A STRING: %s\n", msg);
return 0;
}
int uppercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", toupper(msg[i]));
}
printf("\n");
return 0;
}
int lowercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", tolower(msg[i]));
}
printf("\n");
return 0;
}
int fail_on_purpose(const char *msg)
{
return 1;
}
```
.\ex48b\c-skeleton\tests\libex29_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
typedef int (*lib_function) (const char *data);
char *lib_file = "build/libYOUR_LIBRARY.so";
void *lib = NULL;
int check_function(const char *func_to_run, const char *data,
int expected)
{
lib_function func = dlsym(lib, func_to_run);
check(func != NULL,
"Did not find %s function in the library %s: %s", func_to_run,
lib_file, dlerror());
int rc = func(data);
check(rc == expected, "Function %s return %d for data: %s",
func_to_run, rc, data);
return 1;
error:
return 0;
}
char *test_dlopen()
{
lib = dlopen(lib_file, RTLD_NOW);
mu_assert(lib != NULL, "Failed to open the library to test.");
return NULL;
}
char *test_functions()
{
mu_assert(check_function("print_a_message", "Hello", 0),
"print_a_message failed.");
mu_assert(check_function("uppercase", "Hello", 0),
"uppercase failed.");
mu_assert(check_function("lowercase", "Hello", 0),
"lowercase failed.");
return NULL;
}
char *test_failures()
{
mu_assert(check_function("fail_on_purpose", "Hello", 1),
"fail_on_purpose should fail.");
return NULL;
}
char *test_dlclose()
{
int rc = dlclose(lib);
mu_assert(rc == 0, "Failed to close lib.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dlopen);
mu_run_test(test_functions);
mu_run_test(test_failures);
mu_run_test(test_dlclose);
return NULL;
}
RUN_TESTS(all_tests);
```
.\ex48b\statserve
.\ex48b\statserve\bin\statserve.c
```c
#include <stdio.h>
#include <lcthw/dbg.h>
#include "statserve.h"
#include "net.h"
int main(int argc, char *argv[])
{
check(argc == 3, "USAGE: statserve host port");
const char *host = argv[1];
const char *port = argv[2];
check(echo_server(host, port), "Failed to run the echo server.");
return 0;
error:
return 1;
}
```
.\ex48b\statserve\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex48b\statserve\src\net.c
```c
#include <stdlib.h>
#include <sys/select.h>
#include <stdio.h>
#include <lcthw/ringbuffer.h>
#include <lcthw/dbg.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include "net.h"
struct tagbstring NL = bsStatic("\n");
struct tagbstring CRLF = bsStatic("\r\n");
int nonblock(int fd)
{
int flags = fcntl(fd, F_GETFL, 0);
check(flags >= 0, "Invalid flags on nonblock.");
int rc = fcntl(fd, F_SETFL, flags | O_NONBLOCK);
check(rc == 0, "Can't set nonblocking.");
return 0;
error:
return -1;
}
int client_connect(char *host, char *port)
{
int rc = 0;
struct addrinfo *addr = NULL;
rc = getaddrinfo(host, port, NULL, &addr);
check(rc == 0, "Failed to lookup %s:%s", host, port);
int sock = socket(AF_INET, SOCK_STREAM, 0);
check(sock >= 0, "Cannot create a socket.");
rc = connect(sock, addr->ai_addr, addr->ai_addrlen);
check(rc == 0, "Connect failed.");
rc = nonblock(sock);
check(rc == 0, "Can't set nonblocking.");
freeaddrinfo(addr);
return sock;
error:
freeaddrinfo(addr);
return -1;
}
int read_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
if (RingBuffer_available_data(buffer) == 0) {
buffer->start = buffer->end = 0;
}
if (is_socket) {
rc = recv(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer), 0);
} else {
rc = read(fd, RingBuffer_starts_at(buffer),
RingBuffer_available_space(buffer));
}
check(rc >= 0, "Failed to read from fd: %d", fd);
RingBuffer_commit_write(buffer, rc);
return rc;
error:
return -1;
}
int write_some(RingBuffer * buffer, int fd, int is_socket)
{
int rc = 0;
bstring data = RingBuffer_get_all(buffer);
check(data != NULL, "Failed to get from the buffer.");
check(bfindreplace(data, &NL, &CRLF, 0) == BSTR_OK,
"Failed to replace NL.");
if (is_socket) {
rc = send(fd, bdata(data), blength(data), 0);
} else {
rc = write(fd, bdata(data), blength(data));
}
check(rc == blength(data), "Failed to write everything to fd: %d.",
fd);
bdestroy(data);
return rc;
error:
return -1;
}
int attempt_listen(struct addrinfo *info)
{
int sockfd = -1; // default fail
int rc = -1;
int yes = 1;
check(info != NULL, "Invalid addrinfo.");
// create a socket with the addrinfo
sockfd = socket(info->ai_family, info->ai_socktype,
info->ai_protocol);
check_debug(sockfd != -1, "Failed to bind to address. Trying more.");
// set the SO_REUSEADDR option on the socket
rc = setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int));
check_debug(rc == 0, "Failed to set SO_REUSADDR.");
// attempt to bind to it
rc = bind(sockfd, info->ai_addr, info->ai_addrlen);
check_debug(rc == 0, "Failed to find socket.");
// finally listen with a backlog
rc = listen(sockfd, BACKLOG);
check_debug(rc == 0, "Failed to listen to socket.");
return sockfd;
error:
return -1;
}
int server_listen(const char *host, const char *port)
{
int rc = 0;
int sockfd = -1; // default fail value
struct addrinfo *info = NULL;
struct addrinfo *next_p = NULL;
struct addrinfo addr = {
.ai_family = AF_UNSPEC,
.ai_socktype = SOCK_STREAM,
.ai_flags = AI_PASSIVE
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// get the address info for host and port
rc = getaddrinfo(NULL, port, &addr, &info);
check(rc == 0, "Failed to get address info for connect.");
// cycle through the available list to find one
for(next_p = info; next_p != NULL; next_p = next_p->ai_next)
{
// attempt to listen to each one
sockfd = attempt_listen(next_p);
if(sockfd != -1) break;
}
// either we found one and were able to listen or nothing.
check(sockfd != -1, "All possible addresses failed.");
error: //fallthrough
if(info) freeaddrinfo(info);
// this gets set by the above to either -1 or valid
return sockfd;
}
```
.\ex48b\statserve\src\net.h
```c
#ifndef _net_h
#define _net_h
#include <lcthw/ringbuffer.h>
#define BACKLOG 10
int nonblock(int fd);
int client_connect(char *host, char *port);
int read_some(RingBuffer * buffer, int fd, int is_socket);
int write_some(RingBuffer * buffer, int fd, int is_socket);
int server_listen(const char *host, const char *port);
#endif
```
.\ex48b\statserve\src\statserve.c
```c
#include <stdio.h>
#include <ctype.h>
#include <lcthw/dbg.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
#include <sys/wait.h>
#include "net.h"
#include <netdb.h>
const int RB_SIZE = 1024 * 10;
void handle_sigchild(int sig) {
sig = 0; // ignore it
while(waitpid(-1, NULL, WNOHANG) > 0) {
}
}
void client_handler(int client_fd)
{
int rc = 0;
// need a ringbuffer for the input
RingBuffer *sock_rb = RingBuffer_create(RB_SIZE);
// read_some in a loop
while(read_some(sock_rb, client_fd, 1) != -1) {
// write_it back off the ringbuffer
if(write_some(sock_rb, client_fd, 1) == -1) {
debug("Client closed.");
break;
}
}
// close the socket
rc = close(client_fd);
check(rc != -1, "Failed to close the socket.");
error: // fallthrough
if(sock_rb) RingBuffer_destroy(sock_rb);
exit(0); // just exit the child process
}
int echo_server(const char *host, const char *port)
{
int rc = 0;
struct sockaddr_in client_addr;
socklen_t sin_size = sizeof(client_addr);
int server_socket = 0;
int client_fd = 0;
struct sigaction sa = {
.sa_handler = handle_sigchild,
.sa_flags = SA_RESTART | SA_NOCLDSTOP
};
check(host != NULL, "Invalid host.");
check(port != NULL, "Invalid port.");
// create a sigaction that handles SIGCHLD
sigemptyset(&sa.sa_mask);
rc = sigaction(SIGCHLD, &sa, 0);
check(rc != -1, "Failed to setup signal handler for child processes.");
// listen on the given port and host
server_socket = server_listen(host, port);
check(server_socket >= 0, "bind to %s:%s failed.", host, port);
while(1) {
// accept the connection
client_fd = accept(server_socket, (struct sockaddr *)&client_addr, &sin_size);
check(client_fd >= 0, "Failed to accept connection.");
debug("Client connected.");
rc = fork();
if(rc == 0) {
// child process
close(server_socket); // don't need this
// handle the client
client_handler(client_fd);
} else {
// server process
close(client_fd); // don't need this
}
}
error: // fallthrough
return -1;
}
```
.\ex48b\statserve\src\statserve.h
```c
#ifndef _statserve_h
#define _statserve_h
int echo_server(const char *host, const char *port);
#endif
```
.\ex48b\statserve\tests\statserve_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
#include "statserve.h"
char *test_dummy()
{
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dummy);
return NULL;
}
RUN_TESTS(all_tests);
```
Solution
The Plan
Show you how I solved the *statserve* project.
The Purpose
Watch me solve the first project quickly, then review the code.
The Setup
First I need to install liblcthw since I'll be using that.
Then I make the project skeleton and get something, anything going.
The Server
Then I just get it accepting a connection.
The Echo
Then I decided to just make it echo back what I type.
The Final Code
<file_sep>+++
title = "Java Note - 3: Path and Files"
description="Some good part of Java 7 - Path and Files API"
+++
## Prerequisites
>*Java 7+*
## Good stuff from not shiny Java 7
If you are planning to refactor your code, please give a second thought. It is time to dump to try these new features. When the Java 7 was released, I was kind of disappointed without lambda, jigsaw as most developers, but when I tried new Path, Files API, I found that is great improvement. The enhancement of this new IO is really useful. It save so much effort for Java developer.
To be hoenst, before Java 7, Coding file manipulation in Java is very headache task. I say "headache" it doesn't mean it is difficult. Just comparing with other program lanugage, you had to take much more effort to take care of the boilerplate, and all are tedious job. That is why sometimes I prefer cmd in Window or bash in Linux to complete the task instead of using Java to handle file manipulation. Now I think I can refactor old file manipulation coding and make it much more elegant.
### Better file visitor implementation
Following is simple customizaed file visitor which has been the part of my old util.
```java
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
public class CustomFileVisitor extends SimpleFileVisitor<Path> {
@Override
public FileVisitResult postVisitDirectory(Path dir , IOException arg1) throws IOException {
System.out.println( "post visit dir : "+ dir );
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult preVisitDirectory(Path dir , IOException arg1) throws IOException {
System.out.println( "post visit dir : "+ dir );
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attr)
throws IOException {
if ( attr.isSymbolicLink() )
{ System.out.println( " symbolic link : "+ file );
}else if ( attr.isSymbolicLink() ){
System.out.println( " regular file : "+ file );
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc)
throws IOException {
System.err.println( exc.getMessage());
return FileVisitResult.CONTINUE;
}
}
```
To use this customized is so easy. Just 3 lines coding you can test it by yourself.
```
CustomFileVisitor fileVisitor = new CustomFileVisitor();
Path path = Paths.get("TestDir");
Files.walkFileTree(path, fileVisitor);
```
### ARM
Automatic resource management is another attractive features of Java 7 and project coin. As name itself implies that now JVM is going to be handling all the external resource and make programmer free to bother about resource management, especially for people like me miss the `using` statement in C#. Sometimes I wonder why Java is such stubborn not to learn some good features from C#. As we know, C# comes after Java and copies most concept at the early stage, but it really pushed Object Oriented Concept (OOC) to a new level and inspired Java world a lot with its many good feature. I really hope someday I can code in Java as simple as C#. Wise men learn by other men's mistakes; fools by their own.
In the past, java programmers use any external resources like file, printer or any devices to close after my program execution complete. Normally we close the resources which we have open in beginning of our program or we decide that if program finish normally how to manage the resource or if our program finish abnormally how to close the resource. Following are comparison of old and new style.
*Snippet of old style*
```java
FileInputStream exchangeCurrencyReader= null;
FileOutputStream exchangeCurrencyWriter = null;
try {
exchangeCurrencyReader = new FileInputStream("AUDvsUSD.txt");
exchangeCurrencyWriter = new FileOutputStream("AUDvsUSD.txt");
int var;
while (var = exchangeCurrencyReader.read()) != -1)
exchangeCurrencyWriter.write(var);
}
finally {
if (exchangeCurrencyReader!= null)
exchangeCurrencyReader.close();
if (exchangeCurrencyWriter!= null)
exchangeCurrencyWriter.close();
}
```
*Code in Java 7*
```java
try ( FileInputStream exchangeCurrencyReader = new FileInputStream("AUDvsUSD.txt");
FileOutputStream exchangeCurrencyWriter = new FileOutputStream("AUDvsUSD.txt")){
int var;
while((var= exchangeCurrencyReader.read()) != -1 )
exchangeCurrencyWriter.write();
}
```
In the code above we have declare two file stream one is input file we are reading from one file and writing to another file. After the whole process both streams will be closed automatically either the code has been executed properly or not. Both exchangeCurrencyReader.close() and exchangeCurrencyWriter.close() methods will be called automatically which is the best part of ARM. We should not miss good part from Java 7.
### New file change monitor service
After some homework for new features of Java 7, I am tring to use file watch serviice from Java 7 to replace old file monitor program. It is great and quite simple to use. I have updated to production.
Usually most Java based system somehow needs such monitor function, there will be separate process or thread to run this service, and there should be a call back handler triggered by this service. Everytime there is any file amended, the service will trigger the call back handler to complete some tasks.
Following is sample of file watching service. I removed call back part which is relevant to the business.
```java
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.HashMap;
import java.util.Map;
public class FileWatchService {
public static void watchFileUpdate() {
try (WatchService service = FileSystems.getDefault().newWatchService()) {
Map<WatchKey, Path> eventMap = new HashMap<>();
Path dir = Paths.get("TestDir");
eventMap.put(dir.register(service, StandardWatchEventKinds.ENTRY_MODIFY),dir);
WatchKey key;
do {
key = service.take();
Path eventPath = eventMap.get(key);
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind<?> kind = event.kind();
Path path = (Path) event.context();
System.out.println(eventPath + " : " + kind + " : " + path);
}
} while (key.reset());
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main (String [] args ){
watchFileUpdate();
}
}
```
The sample above shows one kind of events. Actually if you check the API doc, there are another two kinds of event. One is StandardWatchEventKinds.ENTRY\_CREATE , the other is StandardWatchEventKinds.ENTRY\_DELETE. These events cover almostly all business requirements.
<file_sep>+++
title = "F# Task"
description = "F# Computation - Task expressions"
weight = 13
+++
## Tasks expressions
Asynchronous code is normally authored using async expressions. Using task expressions is preferred when interoperating extensively with .NET libraries that create or consume .NET tasks. Task expressions can also improve performance and the debugging experience. However, task expressions come with some limitations.
`task { expression }`
The task is started immediately after this code is executed and runs on the current thread until its first asynchronous operation is performed (for example, an asynchronous sleep, asynchronous I/O, or other primitive asynchronous operation). The type of the expression is `Task<'T>`, where `'T` is the type returned by the expression when the return keyword is used.
### Binding by using let!
In a task expression, some expressions and operations are synchronous, and some are asynchronous. When you await the result of an asynchronous operation, instead of an ordinary let binding, you use let!. The effect of let! is to enable execution to continue on other computations or threads as the computation is being performed. After the right side of the let! binding returns, the rest of the task resumes execution.
The following code shows the difference between let and let!. The line of code that uses let just creates a task as an object that you can await later by using, for example, task.Wait() or task.Result. The line of code that uses let! starts the task and awaits its result.
```fsharp
// let just stores the result as a task.
let (result1 : Task<int>) = stream.ReadAsync(buffer, offset, count, cancellationToken)
// let! completes the asynchronous operation and returns the data.
let! (result2 : int) = stream.ReadAsync(buffer, offset, count, cancellationToken)
```
F# `task { }` expressions can await the following kinds of asynchronous operations:
- .NET tasks, Task<TResult> and the non-generic Task.
- .NET value tasks, ValueTask<TResult> and the non-generic ValueTask.
- F# async computations Async<T>.
- Any object following the "GetAwaiter" pattern specified in F# RFC FS-1097.
### return expressions
Within task expressions, `return` expr is used to return the result of a task.
### return! expressions
Within task expressions, `return!` expr is used to return the result of another task. It is equivalent to using let! and then immediately returning the result.
Control flow
Task expressions can include the control-flow constructs `for .. in .. do, while .. do, try .. with .., try .. finally .., if .. then .. else, and if .. then ...` These may in turn include further task constructs, except for the with and finally handlers, which execute synchronously. If you need an asynchronous `try .. finally ..`, use a use binding in combination with an object of type `IAsyncDisposable`.
`use` and `use!` bindings
Within task expressions, use bindings can bind to values of type `IDisposable` or `IAsyncDisposable`. For the latter, the disposal cleanup operation is executed asynchronously.
In addition to let!, you can use `use!` to perform asynchronous bindings. The difference between let! and `use!` is the same as the difference between let and use. For use!, the object is disposed of at the close of the current scope. Note that in F# 6, `use!` does not allow a value to be initialized to null, even though use does.
### Value Tasks
Value tasks are structs used to avoid allocations in task-based programming. A value task is an ephemeral value that's turned into a real task by using `.AsTask()`.
To create a value task from a task expression, `use |> ValueTask<ReturnType> or |> ValueTask`. For example:
F#
```fsharp
let makeTask() =
task { return 1 }
makeTask() |> ValueTask<int>
```
### Adding cancellation tokens and cancellation checks
Unlike F# async expressions, task expressions do not implicitly pass a cancellation token and don't implicitly perform cancellation checks. If your code requires a cancellation token, you should specify the cancellation token as a parameter.
```fsharp
open System.Threading
let someTaskCode (cancellationToken: CancellationToken) =
task {
cancellationToken.ThrowIfCancellationRequested()
printfn $"continuing..."
}
```
If you intend to correctly make your code cancelable, carefully check that you pass the cancellation token through to all .NET library operations that support cancellation. For example, Stream.ReadAsync has multiple overloads, one of which accepts a cancellation token. If you do not use this overload, that specific asynchronous read operation will not be cancelable.
### Background tasks
By default, .NET tasks are scheduled using `SynchronizationContext.Current` if present. This allows tasks to serve as cooperative, interleaved agents executing on a user interface thread without blocking the UI. If not present, task continuations are scheduled to the .NET thread pool.
In practice, it's often desirable that library code that generates tasks ignores the synchronization context and instead always switches to the .NET thread pool, if necessary. You can achieve this using `backgroundTask { }`:
`backgroundTask { expression }`
A background task ignores any `SynchronizationContext.Current` in the following sense: if started on a thread with non-null `SynchronizationContext.Current`, it switches to a background thread in the thread pool using Task.Run. If started on a thread with null `SynchronizationContext.Current`, it executes on that same thread.
### Note
In practice, this means that calls to `ConfigureAwait(false)` are not typically needed in F# task code. Instead, tasks that are intended to run in the background should be authored using `backgroundTask { ... }`. Any outer task binding to a background task will resynchronize to the `SynchronizationContext.Current` on completion of the background task.
### Limitations of tasks regarding tailcalls
Unlike F# async expressions, task expressions do not support tailcalls. That is, when `return!` is executed, the current task is registered as awaiting the task whose result is being returned. This means that recursive functions and methods implemented using task expressions may create unbounded chains of tasks, and these may use unbounded stack or heap. For example, consider the following code:
F#
```fsharp
let rec taskLoopBad (count: int) : Task<string> =
task {
if count = 0 then
return "done!"
else
printfn $"looping..., count = {count}"
return! taskLoopBad (count-1)
}
let t = taskLoopBad 10000000
t.Wait()
```
This coding style should not be used with task expressions—it will create a chain of 10000000 tasks and cause a `StackOverflowException`. If an asynchronous operation is added on each loop invocation, the code will use an essentially unbounded heap. Consider switching this code to use an explicit loop, for example:
```fsharp
let taskLoopGood (count: int) : Task<string> =
task {
for i in count .. 1 do
printfn $"looping... count = {count}"
return "done!"
}
let t = taskLoopGood 10000000
t.Wait()
```
If asynchronous tailcalls are required, use an F# async expression, which does support tailcalls. For example:
```fsharp
let rec asyncLoopGood (count: int) =
async {
if count = 0 then
return "done!"
else
printfn $"looping..., count = {count}"
return! asyncLoopGood (count-1)
}
let t = asyncLoopGood 1000000 |> Async.StartAsTask
t.Wait()
```
<file_sep>+++
title = "Adv Bash - 3"
description = "Reference Cards - Files"
+++
### Operators: Files
Operator |Tests Whether|Operator |Tests Whether
:--|------|-----|------
| -e |File exists | -s | File is not zero size
| -f |File is a regular file
| -d |File is a directory | -r | File has read permission
| -h |File is a symbolic link | -w | File has write permission
| -L |File is a symbolic link | -x | File has execute permission
| -b |File is a block device ||
| -c |File is a character device | -g | sgid flag set
| -p |File is a pipe | -u | suid flag set
| -S |File is a socket | -k | "sticky bit" set
| -t |File is associated with a terminal ||
| -N |File modified since it was last read | F1 -nt F2 | File F1 is newer than F2 *
| -O |You own the file | F1 -ot F2 | File F1 is older than F2 *
| -G |Group id of file same as yours | F1 -ef F2 | Files F1 and F2 are hard links to the same file *
| ! | NOT (inverts sense of above tests) ||
### Sample
```
function is_symbolic_link(){
echo "Is the file $1 a symbolic link?";
if [ -f $1 ] && [ -h $1 ];
then
echo 'true' ;
else
echo 'false';
fi;
}
function is_file_empty () {
echo "Is the file $1 empty?";
if [ -f $1 ] && [ ! -s $1 ];
then
echo 'true' ;
else
echo 'false';
fi;
}
```
<file_sep>+++
title = "AWS: ECS - 1"
description = "Getting started with Fargate"
draft="true"
+++
## ECS
Amazon Elastic Container Service (Amazon ECS) is a highly scalable, fast, container management service that makes it easy to run, stop, and manage Docker containers on a cluster. You can host your cluster on a serverless infrastructure that is managed by Amazon ECS by launching your services or tasks using the Fargate launch type. For more control you can host your tasks on a cluster of Amazon Elastic Compute Cloud (Amazon EC2) instances that you manage by using the EC2 launch type.
Amazon ECS lets you launch and stop container-based applications with simple API calls, allows you to get the state of your cluster from a centralized service, and gives you access to many familiar Amazon EC2 features.
Amazon ECS can be used to create a consistent deployment and build experience, manage, and scale batch and Extract-Transform-Load (ETL) workloads, and build sophisticated application architectures on a microservices model.
### Installation
* MacOS
sudo curl -o /usr/local/bin/ecs-cli https://amazon-ecs-cli.s3.amazonaws.com/ecs-cli-darwin-amd64-latest
* Linux
sudo curl -o /usr/local/bin/ecs-cli https://amazon-ecs-cli.s3.amazonaws.com/ecs-cli-linux-amd64-latest
### Launcth with Fargate
* Create a cluster configuration
ecs-cli configure --cluster pg-far \
--default-launch-type FARGATE \
--config-name pg-far --region ap-southeast-2
* Create a CLI profile
ecs-cli configure profile \
--access-key AWS_ACCESS_KEY_ID \
--secret-key AWS_SECRET_ACCESS_KEY \
--profile-name pg-far-profile
* Create a Cluster
ecs-cli up --cluster-config pg-far \
--ecs-profile pg-far-profile
* Configure the Security Group.
# VPC_ID is from command above
aws ec2 describe-security-groups \
--filters Name=vpc-id,Values=<VPC_ID> \
--region ap-southeast-2
* Add a rule to allow inbound access on port 80.
aws ec2 authorize-security-group-ingress \
--group-id <security_group_id> \
--protocol tcp --port 80 \
--cidr 0.0.0.0/0 --region ap-southeast-2
* Create a Compose File - docker-compose.yml
```yaml
version: '3'
services:
web:
image: amazon/amazon-ecs-sample
ports:
- "80:80"
logging:
driver: awslogs
options:
awslogs-group: pg-far
awslogs-region: ap-southeast-2
awslogs-stream-prefix: web
```
* Create a parameters specific file - ecs-params.yml
```yaml
version: 1
task_definition:
task_execution_role: ecsTaskExecutionRole
ecs_network_mode: awsvpc
task_size:
mem_limit: 0.5GB
cpu_limit: 256
run_params:
network_configuration:
awsvpc_configuration:
subnets:
- "subnet ID 1"
- "subnet ID 2"
security_groups:
- "security group ID"
assign_public_ip: ENABLED
```
* Deploy the Compose File to a Cluster
ecs-cli compose --project-name pg-far \
service up --create-log-groups \
--cluster-config pg-far \
--ecs-profile pg-far-profile
* View the container
ecs-cli compose --project-name pg-far \
service ps --cluster-config pg-far \
--ecs-profile pg-far-profile
* View the log
ecs-cli logs --task-id 0c2862e6e39e4eff92ca3e4f843c5b9a \
--follow --cluster-config pg-far \
--ecs-profile pg-far-profile
<file_sep>+++
title = "F# Computations 1"
description = "F# Computation expressions "
weight = 10
+++
## Computation expressions
Computation expressions in F# provide a convenient syntax for writing computations that can be sequenced and combined using control flow constructs and bindings. Depending on the kind of computation expression, they can be thought of as a way to express monads, monoids, monad transformers, and applicative functors. However, unlike other languages (such as do-notation in Haskell), they are not tied to a single abstraction, and do not rely on macros or other forms of metaprogramming to accomplish a convenient and context-sensitive syntax.
### Syntax overview
```fsharp
builder-expr { cexper }
expr { let! ... }
expr { do! ... }
expr { yield ... }
expr { yield! ... }
expr { return ... }
expr { return! ... }
expr { match! ... }
```
#### let!
The let! keyword binds the result of a call to another computation expression to a name. let! is defined by the Bind(x, f) member on the builder type.
#### do!
The do! keyword is for calling a computation expression that returns a unit-like type (defined by the Zero member on the builder)
### Built-in computation expressions
The F# core library defines four built-in computation expressions: Sequence Expressions, Async expressions, Task expressions, and Query Expressions.
### Custom computation expression
Every computation expression is backed by a builder type. The builder type defines the operations that are available for the computation expression. Following table shows how to create a custom computation expression.
Method | Typical signature(s) | Description
---|---|---
Bind | M<'T> * ('T -> M<'U>) -> M<'U> | Called for let! and do! in computation expressions.
Delay | (unit -> M<'T>) -> Delayed<'T> | Wraps a computation expression as a function. Delayed<'T> can be any type, commonly M<'T> or unit -> M<'T> are used. The default implementation returns a M<'T>.
Return | 'T -> M<'T> | Called for return in computation expressions.
ReturnFrom | M<'T> -> M<'T> | Called for return! in computation expressions.
Run | Delayed<'T> -> M<'T> or M<'T> -> 'T | Executes a computation expression.
Combine | M<'T> * Delayed<'T> -> M<'T> or M<unit> * M<'T> -> M<'T> | Called for sequencing in computation expressions.
For | seq<'T> * ('T -> M<'U>) -> M<'U> or seq<'T> * ('T -> M<'U>) -> seq<M<'U>> | Called for for...do expressions in computation expressions.
TryFinally | Delayed<'T> * (unit -> unit) -> M<'T> | Called for try...finally expressions in computation expressions.
TryWith | Delayed<'T> * (exn -> M<'T>) -> M<'T> | Called for try...with expressions in computation expressions.
Using | 'T * ('T -> M<'U>) -> M<'U> when 'T :> IDisposable | Called for use bindings in computation expressions.
While | (unit -> bool) * Delayed<'T> -> M<'T>or (unit -> bool) * Delayed<unit> -> M<unit> | Called for while...do expressions in computation expressions.
Yield | 'T -> M<'T> | Called for yield expressions in computation expressions.
YieldFrom | M<'T> -> M<'T> | Called for yield! expressions in computation expressions.
Zero | unit -> M<'T> | Called for empty else branches of if...then expressions in computation expressions.
Quote | Quotations.Expr<'T> -> Quotations.Expr<'T> | Indicates that the computation expression is passed to the Run member as a quotation. It translates all instances of a computation into a quotation.
The following code example shows a computation expression that encapsulates a computation as a series of steps that can be evaluated one step at a time. A discriminated union type, OkOrException, encodes the error state of the expression as evaluated so far. This code demonstrates several typical patterns that you can use in your computation expressions, such as boilerplate implementations of some of the builder methods.
```fsharp
/// Represents computations that can be run step by step
type Eventually<'T> =
| Done of 'T
| NotYetDone of (unit -> Eventually<'T>)
module Eventually =
/// Bind a computation using 'func'.
let rec bind func expr =
match expr with
| Done value -> func value
| NotYetDone work -> NotYetDone (fun () -> bind func (work()))
/// Return the final value
let result value = Done value
/// The catch for the computations. Stitch try/with throughout
/// the computation, and return the overall result as an OkOrException.
let rec catch expr =
match expr with
| Done value -> result (Ok value)
| NotYetDone work ->
NotYetDone (fun () ->
let res = try Ok(work()) with | exn -> Error exn
match res with
| Ok cont -> catch cont // note, a tailcall
| Error exn -> result (Error exn))
/// The delay operator.
let delay func = NotYetDone (fun () -> func())
/// The stepping action for the computations.
let step expr =
match expr with
| Done _ -> expr
| NotYetDone func -> func ()
/// The tryFinally operator.
/// This is boilerplate in terms of "result", "catch", and "bind".
let tryFinally expr compensation =
catch (expr)
|> bind (fun res ->
compensation();
match res with
| Ok value -> result value
| Error exn -> raise exn)
/// The tryWith operator.
/// This is boilerplate in terms of "result", "catch", and "bind".
let tryWith exn handler =
catch exn
|> bind (function Ok value -> result value | Error exn -> handler exn)
/// The whileLoop operator.
/// This is boilerplate in terms of "result" and "bind".
let rec whileLoop pred body =
if pred() then body |> bind (fun _ -> whileLoop pred body)
else result ()
/// The sequential composition operator.
/// This is boilerplate in terms of "result" and "bind".
let combine expr1 expr2 =
expr1 |> bind (fun () -> expr2)
/// The using operator.
/// This is boilerplate in terms of "tryFinally" and "Dispose".
let using (resource: #System.IDisposable) func =
tryFinally (func resource) (fun () -> resource.Dispose())
/// The forLoop operator.
/// This is boilerplate in terms of "catch", "result", and "bind".
let forLoop (collection:seq<_>) func =
let ie = collection.GetEnumerator()
tryFinally
(whileLoop
(fun () -> ie.MoveNext())
(delay (fun () -> let value = ie.Current in func value)))
(fun () -> ie.Dispose())
/// The builder class.
type EventuallyBuilder() =
member x.Bind(comp, func) = Eventually.bind func comp
member x.Return(value) = Eventually.result value
member x.ReturnFrom(value) = value
member x.Combine(expr1, expr2) = Eventually.combine expr1 expr2
member x.Delay(func) = Eventually.delay func
member x.Zero() = Eventually.result ()
member x.TryWith(expr, handler) = Eventually.tryWith expr handler
member x.TryFinally(expr, compensation) = Eventually.tryFinally expr compensation
member x.For(coll:seq<_>, func) = Eventually.forLoop coll func
member x.Using(resource, expr) = Eventually.using resource expr
let eventually = new EventuallyBuilder()
let comp =
eventually {
for x in 1..2 do
printfn $" x = %d{x}"
return 3 + 4
}
/// Try the remaining lines in F# interactive to see how this
/// computation expression works in practice.
let step x = Eventually.step x
// returns "NotYetDone <closure>"
comp |> step
// prints "x = 1"
// returns "NotYetDone <closure>"
comp |> step |> step
// prints "x = 1"
// prints "x = 2"
// returns "Done 7"
comp |> step |> step |> step |> step
```
### Translation of expression
Expression|Translation
-----|------
{ let binding in cexpr }|let binding in {\| cexpr \|}
{ let! pattern = expr in cexpr }|builder.Bind(expr, (fun pattern -> {\| cexpr \|}))
{ do! expr in cexpr }|builder.Bind(expr, (fun () -> {\| cexpr \|}))
{ yield expr }|builder.Yield(expr)
{ yield! expr }|builder.YieldFrom(expr)
{ return expr }|builder.Return(expr)
{ return! expr }|builder.ReturnFrom(expr)
{ use pattern = expr in cexpr }|builder.Using(expr, (fun pattern -> {\| cexpr \|}))
{ use! value = expr in cexpr }|builder.Bind(expr, (fun value -> builder.Using(value, (fun value -> { cexpr }))))
{ if expr then cexpr0 |}|if expr then { cexpr0 } else builder.Zero()
{ if expr then cexpr0 else cexpr1 |}|if expr then { cexpr0 } else { cexpr1 }
{ match expr with \| pattern_i -> cexpr_i }|match expr with \| pattern_i -> { cexpr_i }
{ for pattern in expr do cexpr }|builder.For(enumeration, (fun pattern -> { cexpr }))
{ for identifier = expr1 to expr2 do cexpr }|builder.For(enumeration, (fun identifier -> { cexpr }))
{ while expr do cexpr }|builder.While(fun () -> expr, builder.Delay({ cexpr }))
{ try cexpr with \| pattern_i -> expr_i }|builder.TryWith(builder.Delay({ cexpr }), (fun value -> match value with \| pattern_i -> expr_i \| exn -> System.Runtime.ExceptionServices.ExceptionDispatchInfo.Capture(exn).Throw())))
{ try cexpr finally expr }|builder.TryFinally(builder.Delay( { cexpr }), (fun () -> expr))
{ cexpr1; cexpr2 }|builder.Combine({ cexpr1 }, { cexpr2 })
{ other-expr; cexpr }|expr; { cexpr }
{ other-expr }|expr; builder.Zero()
<file_sep>+++
title = "Exception & Assertion"
description = "Exception & Assertion"
weight=7
+++
## Exception & Assertion
* Avoid bad practices in Python exception handling.
* Always specify an exception type with except, but don't be too general.
* Don't Use Assertions for checking arguments
* EXamples
* lookup exception
```python
def lookups():
s = [1, 4, 6]
try:
item = s[5]
except LookupError:
print("Handled IndexError")
d = dict(a=65, b=66, c=67)
try:
value = d['x']
except LookupError:
print("Handled KeyError")
if __name__ == '__main__':
lookups()
## test result
## Handled IndexError
## Handled KeyError
```
* unicode exception
```python
def unicode_exception():
try:
b'\x81'.decode('utf-8')
except UnicodeError as e:
print(e)
print("encoding:", e.encoding)
print("reason:", e.reason)
print("object:", e.object)
print("start:", e.start)
print("end", e.end)
if __name__ == '__main__':
unicode_exception()
## test result
## 'utf-8' codec can't decode byte 0x81 in position 0: invalid start byte
## encoding: utf-8
## reason: invalid start byte
## object: b'\x81'
## start: 0
## end 1
```
* customized exception with parameters
```python
import sys
import io
class TriangleError(Exception):
def __init__(self, text, sides):
super().__init__(text)
self._sides = tuple(sides)
@property
def sides(self):
return self._sides
def __str__(self):
return "'{}' for sides {}".format(self.args[0], self._sides)
def __repr__(self):
return "TriangleError({!r}, {!r}".format(self.args[0], self._sides)
def triangle_area(a, b, c):
sides = sorted((a, b, c))
if sides[2] > sides[0] + sides[1]:
raise TriangleError("Illegal triangle", sides)
p = (a + b + c) / 2
a = math.sqrt(p * (p - a) * (p - b) * (p - c))
return a
def triangle_exception():
try:
a = triangle_area(3, 4, 10)
print(a)
except TriangleError as e:
try:
print(e, file=sys.stdin)
except io.UnsupportedOperation as f:
print(e)
print(f)
print(f.__context__ is e)
if __name__ == '__main__':
triangle_exception()
## test result
## 'Illegal triangle' for sides (3, 4, 10)
## not writable
## True
```
* chaining & trackback
```python
import math
import traceback
class InclinationError(Exception):
pass
def inclination(dx, dy):
try:
return math.degrees(math.atan(dy / dx))
except ZeroDivisionError as e:
raise InclinationError("Slope cannot be vertical") from e
def traceback_inclination():
try:
inclination(0, 5)
except InclinationError as e:
print(e.__traceback__)
traceback.print_tb(e.__traceback__)
s = traceback.format_tb(e.__traceback__)
print(s)
if __name__ == '__main__':
traceback_inclination()
print("Done.")
## test result
#<traceback object at 0x000000BE3B4F5108>
## File "/path/to/your_project/__main__.py", line 190, in traceback_inclination
## inclination(0, 5)
## File "/path/to/your_project/__main__.py", line 185, in inclination
## raise InclinationError("Slope cannot be vertical") from e
#[' File "/path/to/your_project/__main__.py", line 190,
## in traceback_inclination\n
## inclination(0,## 5)\n',
## ' File "/path/to/your_project/__main__.py", line 185,
## in inclination\n
## raise
## InclinationError("Slope cannot be vertical") from e\n']
```
* assertion & exception
```python
from pprint import pprint as pp
def wrap(text, line_length):
"""Wrap a string to a specified line length.
Args:
text: The string to wrap.
line_length: The line length in characters.
Returns:
A wrapped string.
Raises:
ValueError: If line_length is not positive.
"""
if line_length < 1:
raise ValueError("line_length {} is not positive".format(line_length))
words = text.split()
if max(map(len, words)) > line_length:
raise ValueError("line_length must be at least as long as the longest word")
lines_of_words = []
current_line_length = line_length
for word in words:
if current_line_length + len(word) > line_length:
lines_of_words.append([]) ## new line
current_line_length = 0
lines_of_words[-1].append(word)
current_line_length += len(word) + len(' ')
lines = [' '.join(line_of_words) for line_of_words in lines_of_words]
result = '\n'.join(lines)
assert all(len(line) <= line_length for line in result.splitlines())
return result
wealth_of_nations = "The annual labour of every nation is the fund which or" \
"iginally supplies it with all the necessaries and conveniencies of life wh" \
"ich it annually consumes, and which consist always either in the immediate" \
" produce of that labour, or in what is purchased with that produce from ot" \
"her nations. According, therefore, as this produce, or what is purchased w" \
"ith it, bears a greater or smaller proportion to the number of those who a" \
"re to consume it, the nation will be better or worse supplied with all the" \
" necessaries and conveniencies for which it has occasion."
if __name__ == "__main__":
pp(wrap( wealth_of_nations, 40))
## test result
## ('The annual labour of every nation is the\n'
## 'fund which originally supplies it with\n'
## 'all the necessaries and conveniencies of\n'
## 'life which it annually consumes, and\n'
## 'which consist always either in the\n'
## 'immediate produce of that labour, or in\n'
## 'what is purchased with that produce from\n'
## 'other nations. According, therefore, as\n'
## 'this produce, or what is purchased with\n'
## 'it, bears a greater or smaller\n'
## 'proportion to the number of those who\n'
## 'are to consume it, the nation will be\n'
## 'better or worse supplied with all the\n'
## 'necessaries and conveniencies for which\n'
## 'it has occasion.')
```
<file_sep>+++
title = "JS & ES Note - 2"
description="The equal operator doesn't always mean equivalent"
+++
## The equal operator doesn't always mean equivalent
> Have you got confused by the equal or not equal expression in the JavaScript? I will say you are definitely not the only one. Even many senior developers come from back-end programming background, they all scratch the head to find out why the equal or not-equal expression doesn't work as they expect. The truth is those expression are really different from other programming language.
### "===" is not the same as "=="
* JavaScript has both strict and type–converting comparisons. A strict comparison (e.g., ===) is only true if the operands are of the same type and the contents match. The more commonly-used abstract comparison (e.g. ==) converts the operands to the same type before making the comparison.
* The equality operator (==) converts the operands if they are not of the same type, then applies strict comparison. If both operands are objects, then JavaScript compares internal references which are equal when operands refer to the same object in memory.
* The identity operator (===) returns true if the operands are strictly equal.
### "!=" is not the same as "!=="
* The inequality operator (!=) returns true if the operands are not equal. If the two operands are not of the same type, JavaScript attempts to convert the operands to an appropriate type for the comparison.
* The non-identity operator (!==) returns true if the operands are not equal and/or not of the same type.
### Follow are samples to show difference
> Try to test yourself before checking the answers
#### Sample 1
```js
console.log( " 1==true : ", 1==true)
console.log( " ''==true : ", ''==true)
console.log( " '1'==true : ", '1'==true)
console.log( " \"1\"==true : ", "1"==true)
console.log( " {}==true : ", [{}]==true)
console.log( " []==true : ", ['1']==true)
```
#### Answer of Sample 1
```js
console.log( " 1==true : ", 1==true) // 1==true : true
console.log( " ''==true : ", ''==true) // ''==true : false
console.log( " '1'==true : ", '1'==true) // '1'==true : true
console.log( " \"1\"==true : ", "1"==true)// "1"==true : true
console.log( " {}==true : ", [{}]==true) // {}==true : false
console.log( " ['1']==true : ", ['1']==true) // []==true : true
```
#### Sample 2
```js
console.log( " 0==false : ", 0==false)
console.log( " 1==false : ", 1==false)
console.log( " ''==false : ", ''==false)
console.log( " '1'==false : ", '1'==false)
console.log( " \"\"==false : ", ""==false)
console.log( " {}==false : ", {}==false)
console.log( " []==false : ", []==false)
console.log( " ['0']==false : ", ['0']==false)
```
#### Answer of Sample 2
```js
console.log( " 0==false : ", 0==false) // 0==false : true
console.log( " 1==false : ", 1==false) // 1==false : false
console.log( " ''==false : ", ''==false) // ''==false : true
console.log( " '1'==false : ", '1'==false)// '1'==false : false
console.log( " \"\"==false : ", ""==false)// ""==false : true
console.log( " {}==false : ", {}==false) // {}==false : false
console.log( " []==false : ", []==false) // []==false : true
console.log( " ['0']==false : ", ['0']==false) // ['0']==false : true
```
#### Sample 3
```js
console.log( " null==false : ", null==false)
console.log( " undefined==false : ", undefined==false)
console.log( " undefined==null : ", null==undefined)
```
#### Answer of Sample 3
```js
console.log( " null==false : ", null==false)
// null==false: false
console.log( " undefined==false : ", undefined==false)
// undefined==false: false
console.log( " undefined==null : ", null==undefined)
// undefined==null: true
```
### Tricks to compare arrays of numbers
* Compare two arrays to sure both contain the same numbers
```js
const arr1= [1,2.20,-3.5]
const arr2 = [1.0,-3.5,2.2]
console.log( " arr1 = arr2 ? ", obj1.sort().toString()===obj2.sort().toString())
```
<file_sep>+++
title = "F# Active Patterns"
description = "F# Active Patterns"
weight = 9
+++
## Active Patterns
Active patterns enable you to define named partitions that subdivide input data, so that you can use these names in a pattern matching expression just as you would for a discriminated union. You can use active patterns to decompose data in a customized manner for each partition.
### Syntax
```fsharp
// Active pattern of one choice.
let (|identifier|) [arguments] valueToMatch = expression
// Active Pattern with multiple choices.
// Uses a FSharp.Core.Choice<_,...,_> based on the number of case names. In F#, the limitation n <= 7 applies.
let (|identifier1|identifier2|...|) valueToMatch = expression
// Partial active pattern definition.
// Uses a FSharp.Core.option<_> to represent if the type is satisfied at the call site.
let (|identifier|_|) [arguments] valueToMatch = expression
```
### Examples
- Multiple choices
```fsharp
let (|Even|Odd|) input = if input % 2 = 0 then Even else Odd
let TestNumber input =
match input with
| Even -> printfn "%d is even" input
| Odd -> printfn "%d is odd" input
TestNumber 7
TestNumber 11
TestNumber 32
// output
// 7 is odd
// 11 is odd
// 32 is even
```
- Decompose data types
```fsharp
open System.Drawing
let (|RGB|) (col : System.Drawing.Color) =
( col.R, col.G, col.B )
let (|HSB|) (col : System.Drawing.Color) =
( col.GetHue(), col.GetSaturation(), col.GetBrightness() )
let printRGB (col: System.Drawing.Color) =
match col with
| RGB(r, g, b) -> printfn " Red: %d Green: %d Blue: %d" r g b
let printHSB (col: System.Drawing.Color) =
match col with
| HSB(h, s, b) -> printfn " Hue: %f Saturation: %f Brightness: %f" h s b
let printAll col colorString =
printfn "%s" colorString
printRGB col
printHSB col
printAll Color.Red "Red"
printAll Color.Black "Black"
printAll Color.White "White"
printAll Color.Gray "Gray"
printAll Color.BlanchedAlmond "BlanchedAlmond"
// Red
// Red: 255 Green: 0 Blue: 0
// Hue: 360.000000 Saturation: 1.000000 Brightness: 0.500000
// Black
// Red: 0 Green: 0 Blue: 0
// Hue: 0.000000 Saturation: 0.000000 Brightness: 0.000000
// White
// Red: 255 Green: 255 Blue: 255
// Hue: 0.000000 Saturation: 0.000000 Brightness: 1.000000
// Gray
// Red: 128 Green: 128 Blue: 128
// Hue: 0.000000 Saturation: 0.000000 Brightness: 0.501961
// BlanchedAlmond
// Red: 255 Green: 235 Blue: 205
// Hue: 36.000000 Saturation: 1.000000 Brightness: 0.901961
```
### Partial Active Patterns
Active patterns that do not always produce a value are called partial active patterns; they have a return value that is an option type. To define a partial active pattern, you use a wildcard character (_) at the end of the list of patterns inside the banana clips.
```fsharp
let (|Integer|_|) (str: string) =
let mutable intvalue = 0
if System.Int32.TryParse(str, &intvalue) then Some(intvalue)
else None
let (|Float|_|) (str: string) =
let mutable floatvalue = 0.0
if System.Double.TryParse(str, &floatvalue) then Some(floatvalue)
else None
let parseNumeric str =
match str with
| Integer i -> printfn "%d : Integer" i
| Float f -> printfn "%f : Floating point" f
| _ -> printfn "%s : Not matched." str
parseNumeric "1.1"
parseNumeric "0"
parseNumeric "0.0"
parseNumeric "10"
parseNumeric "Something else"
// output
// 1.100000 : Floating point
// 0 : Integer
// 0.000000 : Floating point
// 10 : Integer
// Something else : Not matched.
let err = 1.e-10
let isNearlyIntegral (x:float) = abs (x - round(x)) < err
let (|Square|_|) (x : int) =
if isNearlyIntegral (sqrt (float x)) then Some(x)
else None
let (|Cube|_|) (x : int) =
if isNearlyIntegral ((float x) ** ( 1.0 / 3.0)) then Some(x)
else None
let findSquareCubes x =
match x with
| Cube x & Square _ -> printfn "%d is a cube and a square" x
| Cube x -> printfn "%d is a cube" x
| _ -> ()
[ 1 .. 1000 ] |> List.iter (fun elem -> findSquareCubes elem)
// output
// 1 is a cube and a square
// 8 is a cube
// 27 is a cube
// 64 is a cube and a square
// 125 is a cube
// 216 is a cube
// 343 is a cube
// 512 is a cube
// 729 is a cube and a square
// 1000 is a cube
```
### Parameterized Active Patterns
Active patterns always take at least one argument for the item being matched, but they may take additional arguments as well, in which case the name parameterized active pattern applies. Additional arguments allow a general pattern to be specialized.
```fsharp
open System.Text.RegularExpressions
// ParseRegex parses a regular expression and returns a list of the strings that match each group in
// the regular expression.
// List.tail is called to eliminate the first element in the list, which is the full matched expression,
// since only the matches for each group are wanted.
let (|ParseRegex|_|) regex str =
let m = Regex(regex).Match(str)
if m.Success
then Some (List.tail [ for x in m.Groups -> x.Value ])
else None
// Three different date formats are demonstrated here. The first matches two-
// digit dates and the second matches full dates. This code assumes that if a two-digit
// date is provided, it is an abbreviation, not a year in the first century.
let parseDate str =
match str with
| ParseRegex "(\d{1,2})/(\d{1,2})/(\d{1,2})$" [Integer m; Integer d; Integer y]
-> new System.DateTime(y + 2000, m, d)
| ParseRegex "(\d{1,2})/(\d{1,2})/(\d{3,4})" [Integer m; Integer d; Integer y]
-> new System.DateTime(y, m, d)
| ParseRegex "(\d{1,4})-(\d{1,2})-(\d{1,2})" [Integer y; Integer m; Integer d]
-> new System.DateTime(y, m, d)
| _ -> new System.DateTime()
let dt1 = parseDate "12/22/08"
let dt2 = parseDate "1/1/2009"
let dt3 = parseDate "2008-1-15"
let dt4 = parseDate "1995-12-28"
printfn "12/22/08 -> %s" (dt1.ToString())
printfn "1/1/2009 -> %s" (dt2.ToString())
printfn "2008-1-15 -> %s" (dt3.ToString())
printfn "1995-12-28 -> %s" (dt4.ToString())
// output
// 12/22/08 -> 22/12/2008 12:00:00 am
// 1/1/2009 -> 1/1/2009 12:00:00 am
// 2008-1-15 -> 15/1/2008 12:00:00 am
// 1995-12-28 -> 28/12/1995 12:00:00 am
// 2000.12.21 -> 1/1/0001 12:00:00 am
```
Active patterns are not restricted only to pattern matching expressions, you can also use them on let-bindings.
```fsharp
let (|Default|) onNone value =
match value with
| None -> onNone
| Some e -> e
let greet (Default "random citizen" name) =
printfn "Hello, %s!" name
greet None
greet (Some "George")
// Hello, random citizen!
// Hello, George!
```
<file_sep>+++
date = "2016-12-07T16:56:21+11:00"
title = "React Redux CRM Project"
description = "React-Crm is reusable CRM starter project for real-world business based on React 15.4"
+++
## Summary
**React-Crm** is reusable CRM starter project for real-world business based on React 15.4, React-Redux & Material-UI.
The goal of this starter project is to create reusable project for real-world business. To achieve this target, we need a solution which should include authentication process, restful API feature with token support and simple but elegant UI design.
## __Features__
* This project is built on the top of React/Redux.
* The UI part of this project uses Material-UI.
* This project uses Redux-Thunk to support backend API.
* It uses Json-Server as fake Restful API. (You can simple replace it with your own API)
## Structure of React-Crm
``` ini
path\to\ng4crm
+---config <-// configuration of dev or prod environment
+---db <-// json files for json-server
| +---db.json <-// dummy db
| \---routes.json <-// configure fake restful api
+---screenshots
+---src <-// vue components
| +---app
| | +---_gurad <-// auth guard for authentication
| | +---_models <-// common models for whole app
| | +---_services <-// common services for whole app
| | +---about <-// about component
| | +---customer <-// customer component
| | +---dashboard <-// dashboard component
| | +---notfoundpage <-// notfoundpage component
| | +---login <-// login component
| | +---order <-// customer component
| | +---root <-// root component
| | +---shared <-// common component for whole app
| | +---app.component.ts
| | +---app.module.ts
| | +---app.routes.ts
| | +---app.services.ts
| | +---environment.ts
| | \---...
| +---assets <-// images and css from third parties
| +---styles <-// customized css files
| +---main.browser.aot.ts
| +---main.browser.ts
| +---polyfills.browser.ts
| \---...
\...
```
## Screenshots
> 
> 
> 
> 
## Browse [Repository](https://github.com/harryho/react-crm.git)
## __Alternatives__
There are two similar projects respectively built on the Vue.js and Angular. If you have interests in those technical stacks. You can find and clone those projects below.
* [Vue2 Crm](/projects/vue2-crm/).
* [Angular4 Crm](/projects/angular4-crm).
<file_sep>+++
title = "Java Note - 7: Stream API"
description="Java Stream"
hidden="true"
+++
## Stream
### Iterations
### Collections and Maps
## Filtering
## Mapping
### LambdaCollectionDemo
### Lambda Collection Map Demo
```java
public class LambdaCollectionMapDemo {
public static void main(String[] args) {
// FunctionalInterface
System.out.println("x + y:" + engine((x, y) -> x + y));// w w w .j av a 2s. c om
System.out.println("x - y:" + engine((x, y) -> x * y));
System.out.println("x * y:" + engine((x, y) -> x / y));
System.out.println("x / y:" + engine((x, y) -> x % y));
String[] strArray = new String[] { "abc", "klm", "xyz", "pqr" };
List list = Arrays.asList(strArray);
// Default Methods
list.forEach(System.out::println);
Arrays.sort(strArray, (first, second) -> first.compareToIgnoreCase(second));
list = Arrays.asList(strArray);
System.out.println("After sorting ... ");
list.forEach(System.out::println);
// Common Functional Interfaces
// Runnable
repeat(5, () -> System.out.println("Hello")) ;
// UnaryOperator
UnaryOperator<String> upperCase = str -> str.toUpperCase();
// BiUnaryOperator
BinaryOperator<String> concat = (left,right) -> left + right;
System.out.println( " UnaryOperator upperCase "+upperCase.apply( "hello") );
System.out.println( " BinaryOperator<String> concat "+ concat.apply("hello","world"));
}
private static int engine(Calculator calculator) {
int x = 2, y = 4;
return calculator.calculate(x, y);
}
public static void repeat(int n, Runnable action) {
for (int i = 0; i < n; i++)
action.run();
}
}
@FunctionalInterface
interface Calculator {
int calculate(int x, int y);
}
```
<file_sep>+++
title = "AWS: VPC - 2"
description = "VPC - Route table, Security Group, Network ACL"
weight=4
+++
## VPC Part 2
VPC has an implicit router (implied router), and you use route tables to control where network traffic is directed.
### Route table
* Have up to 200 route tables per VPC
* Have up to 50 route entries per route table
* Each subnet must be associated with only one route table
* The subent (when created) will be associated with main (default) VPC route table
* Can change the subnet association to another route table
* Can NOT delete the main route table
* Every route table in a VPC comes with a default rule that allows all VPC subnets to comminunicate with one another. This rule can NOT be modified or deleted.
### Security Group
* It is a virtual firewall
* It controls traffic at the EC2ss level
* Up to 5 security groups per EC2
* Stateful, return traffic, of allowed inbound traffic, is allowed, even if there are no rules to allow it.
* Can only have permit rules, can NOT have deny rules
* Implicit deny rule at the end
* Security group is associated wth EC2's network interface
* Any change on security group takes effect immediately
* Default security groupd can not be deleted
* It is VPC resource, hence, different EC2 in differenet AZs within the same VPC, can have the same security group applied to them.
* It can NOT block a certain range of IP addresses from Internet from gettting to EC2 fleets
#### Default vs Customized Group
The default security group has inbound and outbound rules when created. The inboud rule allows all traffics in from the same security group. The outbound rule allows all traffics to any destination. The customized security group has outbound rule only by default.
### Network ACL
* A default NACL allows all traffic inbound and outbound.
* A cutomized NACL blocks/denies all traffic inbound / outbound by default.
* A NACL can block certain ranges of IP address from a large pool(Internet address for instance)
* A network ACL can associate with multiple subnets. However, a subnet can be associated with ONLY ONE network ACL at a time. When you associate a network ACL with a subnet, the previous association is removed.
### Security Group vs NACL
Security Group | NACL
---------------|----------
Operates at the instance level| Operates at the subnet level
Supports allow rules only| Supports allow rules and deny rules
Is stateful: Return traffic is automatically allowed, regardless of any rules| Is stateless: Return traffic must be explicitly allowed by rules
We evaluate all rules before deciding whether to allow traffic| We process rules in number order when deciding whether to allow traffic
Applies to an instance only if someone specifies the security group when launching the instance, or associates the security group with the instance later on| Automatically applies to all instances in the subnets that it's associated with (therefore, it provides an additional layer of defense if the security group rules are too permissive)
### NAT instance
A network address translation (NAT) instance in a public subnet in your VPC to enable instances in the private subnet to initiate outbound IPv4 traffic to the Internet or other AWS services, but prevent the instances from receiving inbound traffic initiated by someone on the Internet.
Amazon provides Amazon Linux AMIs that are configured to run as NAT instances. These AMIs include the string amzn-ami-vpc-nat in their names, so you can search for them in the Amazon EC2 console.
#### Route table & Security Group setting
* Assumption
- The CIDR of VPC is 10.0.0.0/16
- Public subnet is 10.0.1.0/24
- Private subnet is 10.0.2.0/24
- Identifier of Nat Instance is __nat-instance-id__
* Custom route table of public subnet
Destination | Target
---|----
10.0.0.0/16 | local
0.0.0.0/0 | igw-id
* Main route table
Destination | Target
----- | ----
10.0.0.0/16 | local
0.0.0.0/0 | nat-instance-id
### NAT Gateway
A network address translation (NAT) gateway to enable instances in a private subnet to connect to the internet or other AWS services, but prevent the internet from initiating a connection with those instances.
### NAT Instance vs NAT Gateway
Attribute | NAT gateway | NAT instance
-----------|------------------|--------------
Availability | Highly available. NAT gateways in each Availability Zone are implemented with redundancy. Create a NAT gateway in each Availability Zone to ensure zone-independent architecture. | Use a script to manage failover between instances.
Bandwidth | Can scale up to 45 Gbps. | Depends on the bandwidth of the instance type.
Maintenance | Managed by AWS. You do not need to perform any maintenance. | Managed by you, for example, by installing software updates or operating system patches on the instance.
Performance | Software is optimized for handling NAT traffic. | A generic Amazon Linux AMI that's configured to perform NAT.
Cost | Charged depending on the number of NAT gateways you use, duration of usage, and amount of data that you send through the NAT gateways. | Charged depending on the number of NAT instances that you use, duration of usage, and instance type and size.
Type and size | Uniform offering; you don’t need to decide on the type or size. | Choose a suitable instance type and size, according to your predicted workload.
Public IP addresses | Choose the Elastic IP address to associate with a NAT gateway at creation. | Use an Elastic IP address or a public IP address with a NAT instance. You can change the public IP address at any time by associating a new Elastic IP address with the instance.
Private IP addresses | Automatically selected from the subnet's IP address range when you create the gateway. | Assign a specific private IP address from the subnet's IP address range when you launch the instance.
Security groups | Cannot be associated with a NAT gateway. You can associate security groups with your resources behind the NAT gateway to control inbound and outbound traffic. | Associate with your NAT instance and the resources behind your NAT instance to control inbound and outbound traffic.
Network ACLs | Use a network ACL to control the traffic to and from the subnet in which your NAT gateway resides. | Use a network ACL to control the traffic to and from the subnet in which your NAT instance resides.
Flow logs | Use flow logs to capture the traffic. | Use flow logs to capture the traffic.
Port forwarding | Not supported. | Manually customize the configuration to support port forwarding.
Bastion servers | Not supported. | Use as a bastion server.
Traffic metrics | View CloudWatch metrics for the NAT gateway. | View CloudWatch metrics for the instance.
Timeout behavior | When a connection times out, a NAT gateway returns an RST packet to any resources behind the NAT gateway that attempt to continue the connection (it does not send a FIN packet). | When a connection times out, a NAT instance sends a FIN packet to resources behind the NAT instance to close the connection.
IP fragmentation | Supports forwarding of IP fragmented packets for the UDP protocol. Does not support fragmentation for the TCP and ICMP protocols. Fragmented packets for these protocols will get dropped. | Supports reassembly of IP fragmented packets for the UDP, TCP, and ICMP protocols.
<file_sep>+++
date = "2011-03-09T10:59:31+11:00"
title = "Scrum VS Kanban"
description="Comparison between Scrum & Kanban"
weight=1
+++
## Scrum VS Kanban
Kanban and scrum are frameworks that help teams adhere to agile principles and get stuff done. Both frameworks will help you build better products(and services) with fewer headaches. While the practices differ, the principles are largely the same.
### Agile
Agile is a structured and iterative approach to project management and product development. t recognizes the volatility of product development, and provides a methodology for self-organizing teams to respond to change without going off the rails.
### Scrum
Scrum teams adopt specific roles, create special artifacts, and hold regular ceremonies to keep things moving forward. Their goal is to create learning loops to quickly gather and integrate customer feedback.
### Kanban
Kanban is all about visualizing your work, limiting work in progress, and maximizing efficiency(or flow). Kanban teams focus on reducing the time it takes to take a project(or user story) from start to finish. They do this by using a kanban board and continuously improving their flow of work.
### Difference
||Scrum|Kanban
|---| --- | --- |
Cadence| Regular fixed length sprints (ie, 2 weeks)| Continuous flow
Release methodology| At the end of each sprint| Continuous delivery
Roles| Product owner, scrum master, development team| No required roles
Key metrics| Velocity| Lead time, cycle time, WIP
Change philosophy| Teams should not make changes during the sprint.| Change can happen at any time
<file_sep>Kogan.com Recruitment Quiz
Total Questions: 15
1)
Seven violin students — A, B, C, D, E, F, and G — are to give a recital, and their instructor is deciding the order in which they will perform. Each student will play exactly one piece, a violin solo. In deciding the order of performance, the instructor must observe the following restrictions:
i) E cannot play first or second.
ii) D cannot play until E has played.
iii) Neither A nor F can play seventh.
iv) Either F or G must play immediately after D plays.
v) C must play either immediately after or immediately before B plays.
If C plays first, which one of the following must be true?
- [ ] A plays sixth.
- [ ] E plays third.
- [X] G plays seventh.
- [ ] A plays immediately after F.
- [ ] D plays immediately after E.
2)
For the above question, if B plays third, what is the latest position in which F can play?
- [ ] First
- [ ] Second
- [ ] Fifth
- [x] Sixth
- [ ] Seventh
3)
Arun leaves his house to go for a run. He starts off by heading 1.5 kilometres north. He then turns to his right and runs a further 500 metres, and then turns to his left. After running a further 1 kilometre, he turns left and runs another 2 kilometres. He finally turns left again and runs for a further 2.5 kilometres.
How far away from his house is he, and in what direction?
- [ ] 1 kilometre west
- [ ] 1.5 kilometres east
- [ ] 1 kilometre east
- [ ] He's back at his house
- [x] 1.5 kilometres west
4)
Professor Smith lets his students choose their own lab partners in science class, his only rule is that no two students may work together for more than seven consecutive classes. Alex and Barry have been partners for the past seven consecutive classes. Colin and Doug have worked together for the past three classes. Colin doesn't like Alex and refuses to partner with him.
Who should be assigned to work with Barry?
- [x] Colin
- [ ] Doug
- [ ] <NAME>
- [ ] Alex
1)
A Work Health & Safety committee needs to appoint a chairperson, which will be determined based on length of tenure in the organisation. Raj joined the company before Jorge, but after Felicity. Harvey has been an employee for longer than Jorge, but not as long as Raj. Samuel was the last employee hired prior to Raj joining the business. Felicity doesn't want the job, as she is already the chairperson for the social club committee.
Who will be the new Work Health & Safety committee chairperson?
- [ ] Raj
- [ ] Jorge
- [x] Samuel
- [ ] Harvey
- [ ] Felicity
6)
Six school children J, K, L, M, N and O are standing in a circle. J is between N and L. M is to the left of O. K is between O and L.
Who is between J and M?
- [ ] K
- [ ] O
- [ ] L
- [x] N
7)
What day comes two days after the day which comes three days after the day which comes immediately after the day which comes three days after Wednesday?
- [ ] Saturday
- [ ] Wednesday
- [x] Thursday
- [ ] Tuesday
- [ ] Sunday
- [ ] Friday
- [ ] Monday
8)
Amanda loves to eat chocolate but not ice cream while Tim loves to eat potato chips and ice cream. Sally loves to eat potato chips but not the chocolate and Steve loves to eat ice cream but not the potato chips.
If each child loves to eat two of the three snacks, which child has the same preference as Tim?
- [ ] Amanda
- [ ] None of them
- [ ] Steve
- [x] Sally
9)
Jeremy states: When I went fishing the other day, every fish that I caught was a tuna, and every tuna I saw I caught.
Of the following statements listed below, which one can be concluded from the observations of Jeremy?
- [ ] Jeremy did not see any other fish while he was fishing.
- [x] While Jeremy was fishing, he caught no fish other than tuna.
- [ ] In the area that Jeremy fished, there were no other fish.
- [ ] All of the fish that Jeremy saw he caught.
- [ ] Tuna was the only fish that Jeremy saw while he was fishing.
10)
Four defenders in a football match — Defender 1, Defender 2, Defender 3 & Defender 4 — take their positions in this order in a row from right to left. During the match, Defender 1 changes places with Defender 3 and then Defender 3 changes places with Defender 2.
Which defender is now at the left end of the row?
- [ ] Defender 3
- [ ] Defender 2
- [ ] Defender 1
- [x] Defender 4
11)
Identify the two statements necessary to make the following conclusion true:
__Roger is swimming.__
i) Roger is in the pool during the winter.
ii) Roger is in the pool.
iii) Swimming takes place in the pool during the winter.
iv) All the boys in the pool are swimming.
- [ ] i & ii
- [x] ii & iv
- [ ] iii & iv
- [ ] iv & i
12)
A mountain climber is slowly making his way up a steep path over difficult terrain. Each day he climbs 3 kilometres, however by the time he wakes up each morning he has somehow slipped backwards by 2 kilometres over the course of the night.
If the path is 30 kilometres long, how many days will it take him to reach the end?
- [ ] 27
- [x] 28
- [ ] 29
- [ ] 30
13)
Four strangers saw someone attacked on the street. Each person gave a different description of the assailant to the police.
Which description is most likely correct?
- [ ] He was short, overweight and middle aged
- [ ] He was tall, overweight and young
- [x] He was short, overweight and young
- [ ] He was short, thin and young
14)
Six food items are stacked in a cupboard divided into several racks. Mustard sits on the rack right above the salt and the sugar is in the rack right below the apple juice. Apple juice is in the rack right above the mustard and the salt shares the rack with the coffee. Cashews are kept on the lowest rack.
Which item is on the topmost rack?
- [x] Apple juice
- [ ] Coffee
- [ ] Mustard
- [ ] Salt
15)
In the question above, which two other items (aside from salt & coffee) share a rack?
- [ ] Mustard & apple juice
- [ ] Cashews & sugar
- [X] Sugar & mustard
- [ ] None of above is correct
<file_sep>+++
title = "Test"
description="Rustlang Introduction: Test"
weight = 5
draft = true
+++
### Test
#### Introduction of test
Tests are Rust functions that verify that the non-test code is functioning in the expected manner. The bodies of test functions typically perform these three actions:
* Set up any needed data or state.
* Run the code you want to test.
* Assert the results are what you expect.
#### Simple test sample
```rs
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
```
#### The assert Macro
The assert! macro, provided by the standard library, is useful when you want to ensure that some condition in a test evaluates to true.
However, this is such a common test that the standard library provides a pair of macros—assert_eq! and assert_ne!—to perform conveniently such test, which is to compare the result of the code under test to the value you expect the code to return.
Under the surface, the assert_eq! and assert_ne! macros use the operators == and !=, respectively. When the assertions fail, these macros print their arguments using debug formatting, which means the values being compared must implement the PartialEq and Debug traits. All the primitive types and most of the standard library types implement these traits. For structs and enums that you define, you’ll need to implement PartialEq to assert that values of those types are equal or not equal. You’ll need to implement Debug to print the values when the assertion fails.
#### Failure Messages
Custom messages are useful to document what an assertion means; when a test fails, you’ll have a better idea of what the problem is with the code.
```rs
#[test]
fn greeting_contains_name() {
let result = greeting("Carol");
assert!(
result.contains("Carol"),
"Greeting did not contain name, value was `{}`", result
);
}
```
#### Use should_panic and message
Tests that use should_panic can be imprecise because they only indicate that the code has caused some panic. A should_panic test would pass even if the test panics for a different reason from the one we were expecting to happen. To make should_panic tests more precise, we can add an optional expected parameter to the should_panic attribute. The test harness will make sure that the failure message contains the provided text.
```rs
impl Guess {
pub fn new(value: i32) -> Guess {
if value < 1 {
panic!("Guess value must be greater than or equal to 1, got {}.",
value);
} else if value > 100 {
panic!("Guess value must be less than or equal to 100, got {}.",
value);
}
Guess {
value
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[should_panic(expected = "Guess value must be less than or equal to 100")]
fn greater_than_100() {
Guess::new(200);
}
}
```
#### Using Result<T, E>
Writing tests so they return a Result<T, E> enables you to use the question mark operator in the body of tests, which can be a convenient way to write tests that should fail if any operation within them returns an Err variant.
```rs
#[cfg(test)]
mod tests {
#[test]
fn it_works() -> Result<(), String> {
if 2 + 2 == 4 {
Ok(())
} else {
Err(String::from("two plus two does not equal four"))
}
}
}
```
### How to run test
<file_sep>+++
title = "ABC"
description = "ABC - Abstract Base Classes"
weight=9
+++
## ABC - Abstract Base Classes
Abstract base classes complement duck-typing by providing a way to define interfaces when other techniques like hasattr() would be clumsy or subtly wrong (for example with magic methods). ABCs introduce virtual subclasses, which are classes that don’t inherit from a class but are still recognized by isinstance() and issubclass(); see the abc module documentation. Python comes with many built-in ABCs for data structures (in the collections module), numbers (in the numbers module), and streams (in the io module). You can create your own ABCs with the abc module.
## ABCMeta
Metaclass for defining Abstract Base Classes (ABCs).
Use this metaclass to create an ABC. An ABC can be subclassed directly, and then acts as a mix-in class. You can also register unrelated concrete classes (even built-in classes) and unrelated ABCs as “virtual subclasses” – these and their descendants will be considered subclasses of the registering ABC by the built-in issubclass() function, but the registering ABC won’t show up in their MRO (Method Resolution Order) nor will method implementations defined by the registering ABC be callable (not even via super()).
### register
```python
from abc import ABCMeta
class MyABC(metaclass=ABCMeta):
pass
MyABC.register(tuple)
assert issubclass(tuple, MyABC)
assert isinstance((), MyABC)
```
### __subclasshook__
Check whether subclass is considered a subclass of this ABC. This means that you can customize the behavior of issubclass further without the need to call register() on every class you want to consider a subclass of the ABC.
```python
## TODO
```
### @abc.abstractmethod
A decorator indicating abstract methods.
Using this decorator requires that the class’s metaclass is ABCMeta or is derived from it. A class that has a metaclass derived from ABCMeta cannot be instantiated unless all of its abstract methods and properties are overridden. The abstract methods can be called using any of the normal ‘super’ call mechanisms. abstractmethod() may be used to declare abstract methods for properties and descriptors.
Dynamically adding abstract methods to a class, or attempting to modify the abstraction status of a method or class once it is created, are not supported. The abstractmethod() only affects subclasses derived using regular inheritance; “virtual subclasses” registered with the ABC’s register() method are not affected.
### Sample code
```python
# Source: https://github.com/PythonCharmers/python-future/blob/466bfb2dfa36d865285dc31fe2b0c0a53ff0f181/future/utils/__init__.py#L102-L134
def with_metaclass(meta, *bases):
"""
Function from jinja2/_compat.py. License: BSD.
Use it like this::
class BaseForm(object):
pass
class FormType(type):
pass
class Form(with_metaclass(FormType, BaseForm)):
pass
This requires a bit of explanation: the basic idea is to make a
dummy metaclass for one level of class instantiation that replaces
itself with the actual metaclass. Because of internal type checks
we also need to make sure that we downgrade the custom metaclass
for one level to something closer to type (that's why __call__ and
__init__ comes back from type etc.).
This has the advantage over six.with_metaclass of not introducing
dummy classes into the final MRO.
"""
class Metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return Metaclass('temporary_class', None, {})
class Storage(with_metaclass(ABCMeta, object)):
"""
The abstract base class for all Storages.
A Storage (de)serializes the current state of the database and stores it in
some place (memory, file on disk, ...).
"""
# Using ABCMeta as metaclass allows instantiating only storages that have
# implemented read and write
@abstractmethod
def read(self):
"""
Read the last stored state.
Any kind of deserialization should go here.
Return ``None`` here to indicate that the storage is empty.
:rtype: dict
"""
raise NotImplementedError('To be overridden!')
@abstractmethod
def write(self, data):
"""
Write the current state of the database to the storage.
Any kind of serialization should go here.
:param data: The current state of the database.
:type data: dict
"""
raise NotImplementedError('To be overridden!')
def close(self):
"""
Optional: Close open file handles, etc.
"""
pass
class JSONStorage(Storage):
"""
Store the data in a JSON file.
"""
def __init__(self, path, create_dirs=False, **kwargs):
"""
Create a new instance.
Also creates the storage file, if it doesn't exist.
:param path: Where to store the JSON data.
:type path: str
"""
super(JSONStorage, self).__init__()
touch(path, create_dirs=create_dirs) # Create file if not exists
self.kwargs = kwargs
self._handle = open(path, 'r+')
def close(self):
self._handle.close()
def read(self):
# Get the file size
self._handle.seek(0, os.SEEK_END)
size = self._handle.tell()
if not size:
# File is empty
return None
else:
self._handle.seek(0)
return json.load(self._handle)
def write(self, data):
self._handle.seek(0)
serialized = json.dumps(data, **self.kwargs)
self._handle.write(serialized)
self._handle.flush()
self._handle.truncate()
class MemoryStorage(Storage):
"""
Store the data as JSON in memory.
"""
def __init__(self,*args, **kwargs):
"""
Create a new instance.
"""
super(MemoryStorage, self).__init__()
self.memory = None
def read(self):
return self.memory
def write(self, data):
self.memory = data
```
<file_sep>+++
title = "F# C.U.R.S."
description = "F# Class, Unions, Record & Structure"
weight = 4
+++
## Class
Classes are types that represent objects that can have properties, methods, and events.
### Syntax
```
// Class definition:
type [access-modifier] type-name [type-params] [access-modifier] ( parameter-list ) [ as identifier ] =
[ class ]
[ inherit base-type-name(base-constructor-args) ]
[ let-bindings ]
[ do-bindings ]
member-list
...
[ end ]
// Mutually recursive class definitions:
type [access-modifier] type-name1 ...
and [access-modifier] type-name2 ...
...
```
### Constructors
The constructor is code that creates an instance of the class type. In an F# class, there is always a primary constructor whose arguments are described in the parameter-list that follows the type name, and whose body consists of the let (and let rec) bindings at the start of the class declaration and the do bindings that follow. The arguments of the primary constructor are in scope throughout the class declaration.
```fsharp
type MyClass1(x: int, y: int) =
do printfn "%d %d" x y
new() = MyClass1(0, 0)
```
### Self Identifier
To define a self identifier for the whole class, use the as keyword after the closing parentheses of the constructor parameter list, and specify the identifier name.
```fsharp
type MyClass2(dataIn) as self =
let data = dataIn
do
self.PrintMessage()
member this.PrintMessage() =
printf "Creating MyClass2 with Data %d" data
```
### Generic Type
Generic type parameters are specified in angle brackets (< and >), in the form of a single quotation mark followed by an identifier. Multiple generic type parameters are separated by commas.
### Recursive Type
```fsharp
open System.IO
type Folder(pathIn: string) =
let path = pathIn
let filenameArray : string array = Directory.GetFiles(path)
member this.FileArray = Array.map (fun elem -> new File(elem, this)) filenameArray
and File(filename: string, containingFolder: Folder) =
member this.Name = filename
member this.ContainingFolder = containingFolder
let folder1 = new Folder(".")
for file in folder1.FileArray do
printfn "%s" file.Name
```
## Union
Discriminated unions provide support for values that can be one of a number of named cases, possibly each with different values and types. Discriminated unions are useful for heterogeneous data; data that can have special cases, including valid and error cases; data that varies in type from one instance to another; and as an alternative for small object hierarchies.
### Syntax
```fsharp
[ attributes ]
type [accessibility-modifier] type-name =
| case-identifier1 [of [ fieldname1 : ] type1 [ * [ fieldname2 : ] type2 ...]
| case-identifier2 [of [fieldname3 : ]type3 [ * [ fieldname4 : ]type4 ...]
[ member-list ]
```
### Remarks
```fsharp
type Shape =
| Rectangle of width : float * length : float
| Circle of radius : float
| Prism of width : float * float * height : float
```
### Using Discriminated Unions
```fsharp
type Shape =
// The value here is the radius.
| Circle of float
// The value here is the side length.
| EquilateralTriangle of double
// The value here is the side length.
| Square of double
// The values here are the height and width.
| Rectangle of double * double
let pi = 3.141592654
let area myShape =
match myShape with
| Circle radius -> pi * radius * radius
| EquilateralTriangle s -> (sqrt 3.0) / 4.0 * s * s
| Square s -> s * s
| Rectangle (h, w) -> h * w
let radius = 15.0
let myCircle = Circle(radius)
printfn "Area of circle that has radius %f: %f" radius (area myCircle)
let squareSide = 10.0
let mySquare = Square(squareSide)
printfn "Area of square that has side %f: %f" squareSide (area mySquare)
let height, width = 5.0, 10.0
let myRectangle = Rectangle(height, width)
printfn "Area of rectangle that has height %f and width %f is %f" height width (area myRectangle)
// ---- OUTPUT ----
// Area of circle that has radius 15.000000: 706.858347
// Area of square that has side 10.000000: 100.000000
// Area of rectangle that has height 5.000000 and width 10.000000 is 50.000000
```
### Tree Data Structures
Discriminated unions can be recursive, meaning that the union itself can be included in the type of one or more cases. Recursive discriminated unions can be used to create tree structures.
```fsharp
type Tree =
| Tip
| Node of int * Tree * Tree
let rec sumTree tree =
match tree with
| Tip -> 0
| Node(value, left, right) ->
value + sumTree(left) + sumTree(right)
let myTree = Node(0, Node(1, Node(2, Tip, Tip), Node(3, Tip, Tip)), Node(4, Tip, Tip))
let resultSumTree = sumTree myTree
printfn "%A" resultSumTree
```
{{<mermaid >}}
graph TD;
A[0] --- B[1]
A[0] --- C[4]
B[1] --- D[2]
B[1] --- E[3]
{{</mermaid >}}
Discriminated unions work well if the nodes in the tree are heterogeneous.
```fsharp
type Expression =
| Number of int
| Add of Expression * Expression
| Multiply of Expression * Expression
| Variable of string
let rec Evaluate (env:Map<string,int>) exp =
match exp with
| Number n -> n
| Add (x, y) -> Evaluate env x + Evaluate env y
| Multiply (x, y) -> Evaluate env x * Evaluate env y
| Variable id -> env[id]
let environment = Map [ "a", 1; "b", 2; "c", 3 ]
// Create an expression tree that represents
// the expression: a + 2 * b.
let expressionTree1 = Add(Variable "a", Multiply(Number 2, Variable "b"))
// Evaluate the expression a + 2 * b, given the
// table of values for the variables.
let result = Evaluate environment expressionTree1
```
### Members
```fsharp
**open System
type IPrintable =
abstract Print: unit -> unit
type Shape =
| Circle of float
| EquilateralTriangle of float
| Square of float
| Rectangle of float * float
member this.Area =
match this with
| Circle r -> Math.PI * (r ** 2.0)
| EquilateralTriangle s -> s * s * sqrt 3.0 / 4.0
| Square s -> s * s
| Rectangle(l, w) -> l * w
interface IPrintable with
member this.Print () =
match this with
| Circle r -> printfn $"Circle with radius %f{r}"
| EquilateralTriangle s -> printfn $"Equilateral Triangle of side %f{s}"
| Square s -> printfn $"Square with side %f{s}"
| Rectangle(l, w) -> printfn $"Rectangle with length %f{l} and width %f{w}"
```
## Record
Records represent simple aggregates of named values, optionally with members. They can either be structs or reference types. They are reference types by default.
### Syntax
```fsharp
[ attributes ]
type [accessibility-modifier] typename =
{ [ mutable ] label1 : type1;
[ mutable ] label2 : type2;
... }
[ member-list ]
```
### Remarks
You can use the [<Struct>] attribute to create a struct record rather than a record which is a reference type.
```fsharp
// Labels are separated by semicolons when defined on the same line.
type Point = { X: float; Y: float; Z: float; }
// You can define labels on their own line with or without a semicolon.
type Customer =
{ First: string
Last: string;
SSN: uint32
AccountNumber: uint32; }
// A struct record.
[<Struct>]
type StructPoint =
{ X: float
Y: float
Z: float }
```
### Pattern Matching
```fsharp
type Point3D = { X: float; Y: float; Z: float }
let evaluatePoint (point: Point3D) =
match point with
| { X = 0.0; Y = 0.0; Z = 0.0 } -> printfn "Point is at the origin."
| { X = xVal; Y = 0.0; Z = 0.0 } -> printfn "Point is on the x-axis. Value is %f." xVal
| { X = 0.0; Y = yVal; Z = 0.0 } -> printfn "Point is on the y-axis. Value is %f." yVal
| { X = 0.0; Y = 0.0; Z = zVal } -> printfn "Point is on the z-axis. Value is %f." zVal
| { X = xVal; Y = yVal; Z = zVal } -> printfn "Point is at (%f, %f, %f)." xVal yVal zVal
evaluatePoint { X = 0.0; Y = 0.0; Z = 0.0 }
evaluatePoint { X = 100.0; Y = 0.0; Z = 0.0 }
evaluatePoint { X = 10.0; Y = 0.0; Z = -1.0 }
// ---- OUTPUT ----
// Point is at the origin.
// Point is on the x-axis. Value is 100.000000.
// Point is at (10.000000, 0.000000, -1.000000).
```
### Members
A common approach is to define a Default static member for easy record construction.
```fsharp
type Person =
{ Name: string
Age: int
Address: string }
static member Default =
{ Name = "Phillip"
Age = 12
Address = "123 happy fun street" }
let defaultPerson = Person.Default
```
If you use a self identifier, that identifier refers to the instance of the record whose member is called:
```fsharp
type Person =
{ Name: string
Age: int
Address: string }
member this.WeirdToString() =
this.Name + this.Address + string this.Age
let p = { Name = "a"; Age = 12; Address = "abc123" }
let weirdString = p.WeirdToString()
```
### Differences Between Records and Classes
Record fields differ from class fields in that they are automatically exposed as properties, and they are used in the creation and copying of records. Record construction also differs from class construction. In a record type, you cannot define a constructor. Instead, the construction syntax described in this topic applies. Classes have no direct relationship between constructor parameters, fields, and properties.
## Structure
A structure is a compact object type that can be more efficient than a class for types that have a small amount of data and simple behavior.
### Remarks
```fsharp
// In Point3D, three immutable values are defined.
// x, y, and z will be initialized to 0.0.
type Point3D =
struct
val x: float
val y: float
val z: float
end
// In Point2D, two immutable values are defined.
// It also has a member which computes a distance between itself and another Point2D.
// Point2D has an explicit constructor.
// You can create zero-initialized instances of Point2D, or you can
// pass in arguments to initialize the values.
type Point2D =
struct
val X: float
val Y: float
new(x: float, y: float) = { X = x; Y = y }
member this.GetDistanceFrom(p: Point2D) =
let dX = (p.X - this.X) ** 2.0
let dY = (p.Y - this.Y) ** 2.0
dX + dY
|> sqrt
end
```
### ByRefLike structs
A "byref-like" struct in F# is a stack-bound value type. It is never allocated on the managed heap. A byref-like struct is useful for high-performance programming, as it is enforced with set of strong checks about lifetime and non-capture. The rules are:
They can be used as function parameters, method parameters, local variables, method returns.
They cannot be static or instance members of a class or normal struct.
They cannot be captured by any closure construct (async methods or lambda expressions).
They cannot be used as a generic parameter.
```fsharp
open System
open System.Runtime.CompilerServices
[<IsByRefLike; Struct>]
type S(count1: Span<int>, count2: Span<int>) =
member x.Count1 = count1
member x.Count2 = count2
```
### ReadOnly structs
You can annotate structs with the IsReadOnlyAttribute attribute.
```fsharp
[<IsReadOnly; Struct>]
type S(count1: int, count2: int) =
member x.Count1 = count1
member x.Count2 = count2
```
## When to Use Classes, Unions, Records, and Structures
Given the variety of types to choose from, you need to have a good understanding of what each type is designed for to select the appropriate type for a particular situation. Classes are designed for use in object-oriented programming contexts. Object-oriented programming is the dominant paradigm used in applications that are written for the .NET Framework. If your F# code has to work closely with the .NET Framework or another object-oriented library, and especially if you have to extend from an object-oriented type system such as a UI library, classes are probably appropriate.
If you are not interoperating closely with object-oriented code, or if you are writing code that is self-contained and therefore protected from frequent interaction with object-oriented code, you should consider using a mix of classes, records and discriminated unions. A single, well thought–out discriminated union, together with appropriate pattern matching code, can often be used as a simpler alternative to an object hierarchy.
Records have the advantage of being simpler than classes, but records are not appropriate when the demands of a type exceed what can be accomplished with their simplicity. Records are basically simple aggregates of values, without separate constructors that can perform custom actions, without hidden fields, and without inheritance or interface implementations. Although members such as properties and methods can be added to records to make their behavior more complex, the fields stored in a record are still a simple aggregate of values.
Structures are also useful for small aggregates of data, but they differ from classes and records in that they are .NET value types. Classes and records are .NET reference types. The semantics of value types and reference types are different in that value types are passed by value. This means that they are copied bit for bit when they are passed as a parameter or returned from a function. They are also stored on the stack or, if they are used as a field, embedded inside the parent object instead of stored in their own separate location on the heap. Therefore, structures are appropriate for frequently accessed data when the overhead of accessing the heap is a problem. For more information about structures, see Structs.
<file_sep>+++
date = "2016-08-11T11:59:31+11:00"
title = "iText with C#"
draft = true
+++<file_sep>+++
date = "2016-04-10T14:59:31+11:00"
title = "Database notes, Part-1"
draft = true
+++
## MySql
### Start mysql server
```
mysqld
```
### Start mysql client
```
mysql -p 3306
```
### stop mysql server
```
mysqladmin -u root -p shutdown
```
### Change mysql root user password
```
mysql --user=root --pass=''
mysql -e "update user set password=password('') \
where user='root'; flush privileges;"
```
## Back database as SQL Dump
If it's an entire DB, then:
```bash
mysqldump -u [uname] -p[pass] db_name > db_backup.sql
```
* If it's specific tables within a DB, then:
```bash
$ mysqldump -u [uname] -p[pass] db_name table1 table2 > table_backup.sql
```
## Import database
```bash
mysql -u username -p -h localhost YOUR-DATABASE < data.sql
```
### Create database YOUR-DATABASE
```sql
-- Drop database if it exists
DROP DATABASE /*!32312 IF EXISTS*/ YOUR-DATABASE ;
-- Create database with UTF-8 CHARSET
CREATE DATABASE YOUR-DATABASE CHAR SET utf8 COLLATE 'utf8_unicode_ci';
```
## PostGreSQL
### Start db
```
e:\db\pgsql9\bin\pg_ctl.exe -D "e:\db\pgsql9\data" -l logFile start
e:\db\pgsql9\bin\pg_ctl.exe -D "e:\db\pgsql9\data" stop
```
## SQL Server
## Collation precedence
```
SELECT *
FROM TestTab
WHERE GreekCol = LatinCol COLLATE greek_ci_as;
```
<file_sep>+++
title = "Golang snippets"
description="Golang snippets"
weight=99
+++
### Convenient logging methods
#### Stringer
```go
package main
import (
"fmt"
)
// Animal has a Name and an Age to represent an animal.
type Animal struct {
Name string
Age uint
}
// String makes Animal satisfy the Stringer interface.
func (a Animal) String() string {
return fmt.Sprintf("%v (%d)", a.Name, a.Age)
}
func main() {
a := Animal{
Name: "Gopher",
Age: 2,
}
fmt.Println(a) // Gopher (2)
}
```
#### GoStringer
```go
package main
import (
"fmt"
)
// Address has a City, State and a Country.
type Address struct {
City string
State string
Country string
}
// Person has a Name, Age and Address.
type Person struct {
Name string
Age uint
Addr *Address
}
// GoString makes Person satisfy the GoStringer interface.
// The return value is valid Go code that can be used to reproduce the Person struct.
func (p Person) GoString() string {
if p.Addr != nil {
return fmt.Sprintf(
"Person{Name: %q, Age: %d, Addr: &Address{City: %q, State: %q, Country: %q}}",
p.Name, int(p.Age), p.Addr.City, p.Addr.State, p.Addr.Country)
}
return fmt.Sprintf("Person{Name: %q, Age: %d}", p.Name, int(p.Age))
}
func main() {
p1 := Person{
Name: "Warren",
Age: 31,
Addr: &Address{
City: "Denver",
State: "CO",
Country: "U.S.A.",
},
}
// If GoString() wasn't implemented, the output of `fmt.Printf("%#v", p1)` would be similar to
// Person{Name:"Warren", Age:0x1f, Addr:(*main.Address)(0x10448240)}
fmt.Printf("%#v\n", p1)
p2 := Person{
Name: "Theia",
Age: 4,
}
// If GoString() wasn't implemented, the output of `fmt.Printf("%#v", p2)` would be similar to
// Person{Name:"Theia", Age:0x4, Addr:(*main.Address)(nil)}
fmt.Printf("%#v\n", p2)
}
// ----- Output ------
// Person{Name: "Warren", Age: 31, Addr: &Address{City: "Denver", State: "CO", Country: "U.S.A."}}
// Person{Name: "Theia", Age: 4}
```
### File
#### Read file
```go
func main() {
file, err := os.Open("input.dat")
if err != nil {
fmt.Printf("An error occurred on opening the input file\n" +
"Does the file exist?\n" +
"Have you got acces to it?\n")
return
}
defer file.Close()
iReader := bufio.NewReader(file)
for {
str, err := iReader.ReadString('\n')
if err != nil {
return // error or EOF
}
fmt.Printf("The input was: %s", str)
}
}
```
#### Read & Write with sliced buffer
```go
func cat(f *os.File) {
const NBUF = 512
var buf [NBUF]byte
for {
switch nr, er := f.Read(buf[:]); true {
case nr < 0:
fmt.Fprintf(os.Stderr, "cat: error reading from %s: %s\n", f, er)
os.Exit(1)
case nr == 0: // EOF
return
case nr > 0:
if nw, ew := os.Stdout.Write(buf[0:nr]); nw != nr {
fmt.Fprintf(os.Stderr, "cat: error writing from %s: %s\n", f, ew)
}
}
}
}
```
### TCP Client/Sever
#### Client
```go
func main() {
arguments := os.Args
if len(arguments) == 1 {
fmt.Println("Please provide host:port.")
return
}
CONNECT := arguments[1]
c, err := net.Dial("tcp", CONNECT)
if err != nil {
fmt.Println(err)
return
}
for {
reader := bufio.NewReader(os.Stdin)
fmt.Print(">> ")
text, _ := reader.ReadString('\n')
fmt.Fprintf(c, text+"\n")
message, _ := bufio.NewReader(c).ReadString('\n')
fmt.Print("->: " + message)
if strings.TrimSpace(string(text)) == "STOP" {
fmt.Println("TCP client exiting...")
return
}
}
}
```
#### Server
```go
func main() {
arguments := os.Args
if len(arguments) == 1 {
fmt.Println("Please provide port number")
return
}
PORT := ":" + arguments[1]
l, err := net.Listen("tcp", PORT)
if err != nil {
fmt.Println(err)
return
}
defer l.Close()
c, err := l.Accept()
if err != nil {
fmt.Println(err)
return
}
for {
netData, err := bufio.NewReader(c).ReadString('\n')
if err != nil {
fmt.Println(err)
return
}
if strings.TrimSpace(string(netData)) == "STOP" {
fmt.Println("Exiting TCP server!")
return
}
fmt.Print("-> ", string(netData))
t := time.Now()
myTime := t.Format(time.RFC3339) + "\n"
c.Write([]byte(myTime))
}
}
```
<file_sep>+++
title = "MySql: SP & Func"
description="MySql Stored Proc & Function "
+++
## Function - UDF
> For the UDF mechanism to work, functions must be written in C or C++ and your operating system must support dynamic loading. MySQL source distributions include a file sql/udf_example.cc that defines five UDF functions. Consult this file to see how UDF calling conventions work. The include/mysql_com.h header file defines UDF-related symbols and data structures, although you need not include this header file directly; it is included by mysql.h.
> A UDF contains code that becomes part of the running server, so when you write a UDF, you are bound by any and all constraints that apply to writing server code. For example, you may have problems if you attempt to use functions from the libstdc++ library. T
### Function sample
* Check the input string is OZ land line
```sql
DROP FUNCTION IF EXISTS isAusLandLine;
DELIMITER $$
CREATE FUNCTION isAusLandLine (in_value VARCHAR(500)) RETURNS TINYINT
BEGIN
DECLARE vv_number VARCHAR(500);
SELECT REPLACE(in_value, '+','') INTO vv_number;
RETURN
CASE
WHEN LEFT(vv_number, 2) IN ('02','03','07','08') ss
THEN LENGTH(vv_number) = 10 ELSE 0
END;
END $$
DELIMITER ;
-- SELECT isAusLandLine('03123322') from dual; -- TRUE
-- SELECT isAusLandLine('06123322') from dual; -- FALSE
```
* EXtract the Json to String
```sql
DROP FUNCTION IF EXISTS json_extract_string;
DELIMITER $
CREATE FUNCTION `json_extract_string`(
p_json text,
p_key text
) RETURNS varchar(40) CHARSET latin1
BEGIN
SET p_json = replace(p_json, '\\"', '"');
SET p_json = replace(p_json, '" :', '":');
SET p_json = replace(p_json, ': "', ':"');
SET p_json = replace(p_json, ': [', ':[');
SET @pattern_start_type = '"';
SET @pattern_end_type = '"';
SET @pattern = CONCAT('"', p_key, '":',@pattern_start_type);
IF LOCATE(@pattern, p_json) > 0 THEN
SET @start_i = LOCATE(@pattern, p_json) + CHAR_LENGTH(@pattern);
ELSE
SET @pattern_start_type = '[';
SET @pattern_end_type = ']';
SET @pattern = CONCAT('"', p_key, '":',@pattern_start_type);
SET @start_i = LOCATE(@pattern, p_json) + CHAR_LENGTH(@pattern);
END IF;
IF @start_i = CHAR_LENGTH(@pattern) THEN
SET @end_i = 0;
ELSE
SET @end_i = LOCATE(@pattern_end_type, p_json, @start_i) - @start_i;
END IF;
RETURN SUBSTR(p_json, @start_i, @end_i);
END $
DELIMITER ;
-- SELECT json_extract_string('{"key": 123}' ) from dual; -- 123
```
## Stord procedure
> MySQL supports stored routines (procedures and functions). A stored routine is a set of SQL statements that can be stored in the server. Once this has been done, clients don't need to keep reissuing the individual statements but can refer to the stored routine instead.
Stored routines can be particularly useful in certain situations:
* When multiple client applications are written in different languages or work on different platforms, but need to perform the same database operations.
* When security is paramount. Banks, for example, use stored procedures and functions for all common operations. This provides a consistent and secure environment, and routines can ensure that each operation is properly logged. In such a setup, applications and users would have no access to the database tables directly, but can only execute specific stored routines.
### Stored proc sample
* Create a stored proc to execute dynamic SQL script based on the previous execution result
```sql
DROP PROCEDURE IF EXISTS RunIf;
DELIMITER $$
CREATE PROCEDURE RunIf(ifExpr MEDIUMTEXT, execStmt MEDIUMTEXT)
BEGIN
SET @sql = concat('select @result := (', ifExpr, ')');
PREPARE stmt from @sql;
EXECUTE stmt;
DEALLOCATE prepare stmt;
IF (@result = true) THEN
SET @sql = execStmt;
PREPARE stmt FROM @sql;
EXECUTE stmt;
DEALLOCATE prepare stmt;
END IF;
END
$$
DELIMITER ;
-- CALL( CALL RunIf('EXISTS (SELECT * FROM information_schema.columns
-- WHERE table_schema = DATABASE() AND table_name = \'TargeTable\'
-- AND column_name = \'description\')',
-- 'ALTER TABLE TargeTable DROP COLUMN description');)
```
<file_sep>+++
title = "F# Collections 2"
description = "F# List, Seq & Map"
weight = 5
+++
## List
A list in F# is an ordered, immutable series of elements of the same type. To perform basic operations on lists, use the functions in the List module.
### Creating & appending
Creating and Initializing Lists
```fsharp
// --- creating ---
let list123 = [ 1; 2; 3 ]
// You can also put line breaks between elements, in which case the semicolons are optional.
let list123 = [
1
2
3 ]
// Normally, all list elements must be the same type.
// An exception is that a list in which the elements are specified to be a base type
// can have elements that are derived types. Thus the following is acceptable,
// because both Button and CheckBox derive from Control.
let myControlList : Control list = [ new Button(); new CheckBox() ]
// using a range indicated by integers separated by the range operator (..)
let list1 = [ 1 .. 10 ]
// An empty list.
let listEmpty = []
// use a sequence expression to create a list.
let listOfSquares = [ for i in 1 .. 10 -> i*i ]
// ---- appending -------
// You can attach elements to a list by using the :: (cons) operator.
// If list1 is [2; 3; 4], the following code creates list2 as [100; 2; 3; 4].
let list2 = 100 :: list1
// You can concatenate lists that have compatible types by using the @ operator, as in the following code.
// If list1 is [2; 3; 4] and list2 is [100; 2; 3; 4], this code creates list3 as [2; 3; 4; 100; 2; 3; 4].
let list3 = list1 @ list2
```
### Properties
The list type supports the following properties:
Property | Type | Description
---| ----| ---
Head |'T | The first element.
Empty | 'T list | A static property that returns an empty list of the appropriate type.
IsEmpty | bool | true if the list has no elements.
Item | 'T | The element at the specified index (zero-based).
Length |int |The number of elements.
Tail | 'T list | The list without the first element.
```fsharp
let list1 = [ 1; 2; 3 ]
// Properties
printfn "list1.IsEmpty is %b" (list1.IsEmpty)
printfn "list1.Length is %d" (list1.Length)
printfn "list1.Head is %d" (list1.Head)
printfn "list1.Tail.Head is %d" (list1.Tail.Head)
printfn "list1.Tail.Tail.Head is %d" (list1.Tail.Tail.Head)
printfn "list1.Item(1) is %d" (list1.Item(1))
```
### Recursion
```fsharp
// simple implmentaton for small list
let rec sum list =
match list with
| head :: tail -> head + sum tail
| [] -> 0
// The previous code works well for small lists, but for larger lists,
// it could overflow the stack. The following code improves on this
// code by using an accumulator argument, a standard technique for
// working with recursive functions.
let sum list =
let rec loop list acc =
match list with
| head :: tail -> loop tail (acc + head)
| [] -> acc
loop list 0
// The function RemoveAllMultiples is a recursive function that takes two lists.
// The first list contains the numbers whose multiples will be removed, and
// the second list is the list from which to remove the numbers.
// Uses this recursive function to eliminate all the non-prime numbers
// from a list, leaving a list of prime numbers as the result.
let IsPrimeMultipleTest n x =
x = n || x % n <> 0
let rec RemoveAllMultiples listn listx =
match listn with
| head :: tail -> RemoveAllMultiples tail (List.filter (IsPrimeMultipleTest head) listx)
| [] -> listx
let GetPrimesUpTo n =
let max = int (sqrt (float n))
RemoveAllMultiples [ 2 .. max ] [ 1 .. n ]
printfn "Primes Up To %d:\n %A" 100 (GetPrimesUpTo 100)
// output
// Primes Up To 100:
// [2; 3; 5; 7; 11; 13; 17; 19; 23; 29; 31; 37; 41; 43; 47; 53; 59; 61; 67; 71; 73; 79; 83; 89; 97]
```
### Functions
#### Exists
```fsharp
// Use List.exists to determine whether there is an element of a list satisfies a given Boolean expression.
// containsNumber returns true if any of the elements of the supplied list match
// the supplied number.
let containsNumber number list = List.exists (fun elem -> elem = number) list
let list0to3 = [0 .. 3]
printfn "For list %A, contains zero is %b" list0to3 (containsNumber 0 list0to3)
// The output is as follows
// For list [0; 1; 2; 3], contains zero is true
// Use List.exists2 to compare elements in two lists.
// isEqualElement returns true if any elements at the same position in two supplied
// lists match.
let isEqualElement list1 list2 = List.exists2 (fun elem1 elem2 -> elem1 = elem2) list1 list2
let list1to5 = [ 1 .. 5 ]
let list5to1 = [ 5 .. -1 .. 1 ]
if (isEqualElement list1to5 list5to1) then
printfn "Lists %A and %A have at least one equal element at the same position." list1to5 list5to1
else
printfn "Lists %A and %A do not have an equal element at the same position." list1to5 list5to1
// The output is as follows
// Lists [1; 2; 3; 4; 5] and [5; 4; 3; 2; 1] have at least one equal element at the same position.
// You can use List.forall if you want to test whether all the elements of a list meet a condition.
let isAllZeroes list = List.forall (fun elem -> elem = 0.0) list
printfn "%b" (isAllZeroes [0.0; 0.0])
printfn "%b" (isAllZeroes [0.0; 1.0])
// The output is as follows
// true
// false
// Similarly, List.forall2 determines whether all elements in the corresponding
// positions in two lists satisfy a Boolean expression that involves each pair of elements.
let listEqual list1 list2 = List.forall2 (fun elem1 elem2 -> elem1 = elem2) list1 list2
printfn "%b" (listEqual [0; 1; 2] [0; 1; 2])
printfn "%b" (listEqual [0; 0; 0] [0; 1; 0])
// The output is as follows
// true
// false
```
#### Sort
```fsharp
// use of List.sort.
let sortedList1 = List.sort [1; 4; 8; -2; 5]
printfn "%A" sortedList1
// The output is as follows
// [-2; 1; 4; 5; 8]
// use of List.sortBy.
let sortedList2 = List.sortBy (fun elem -> abs elem) [1; 4; 8; -2; 5]
printfn "%A" sortedList2
// The output is as follows:
// [1; -2; 4; 5; 8]
// The next example demonstrates the use of List.sortWith. In this example, the custom comparison function compareWidgets is used to first compare one field of a custom type, and then another when the values of the first field are equal.
F#
type Widget = { ID: int; Rev: int }
let compareWidgets widget1 widget2 =
if widget1.ID < widget2.ID then -1 else
if widget1.ID > widget2.ID then 1 else
if widget1.Rev < widget2.Rev then -1 else
if widget1.Rev > widget2.Rev then 1 else
0
let listToCompare = [
{ ID = 92; Rev = 1 }
{ ID = 110; Rev = 1 }
{ ID = 100; Rev = 5 }
{ ID = 100; Rev = 2 }
{ ID = 92; Rev = 1 }
]
let sortedWidgetList = List.sortWith compareWidgets listToCompare
printfn "%A" sortedWidgetList
// The output is as follows:
// [{ID = 92;
// Rev = 1;}; {ID = 92;
// Rev = 1;}; {ID = 100;
// Rev = 2;}; {ID = 100;
// Rev = 5;}; {ID = 110;
// Rev = 1;}]
```
#### Search
```fsharp
// The simplest, List.find, enables you to find the first element that matches a given condition.
let isDivisibleBy number elem = elem % number = 0
let result = List.find (isDivisibleBy 5) [ 1 .. 100 ]
printfn "%d " result
// output
// 5
// If the elements must be transformed first, call List.pick, which takes
// a function that returns an option, and looks for the first option
// value that is Some(x). Instead of returning the element, List.pick
// returns the result x. If no matching element is found, List.pick throws
// System.Collections.Generic.KeyNotFoundException.
let valuesList = [ ("a", 1); ("b", 2); ("c", 3) ]
let resultPick = List.pick (fun elem ->
match elem with
| (value, 2) -> Some value
| _ -> None) valuesList
printfn "%A" resultPick
The output is as follows:
Console
"b"
// Another group of search operations, List.tryFind and related functions,
// return an option value. The List.tryFind function returns the first element
// of a list that satisfies a condition if such an element exists, but
// the option value None if not. The variation List.tryFindIndex returns
// the index of the element, if one is found, rather than the element
// itself. These functions are illustrated in the following code.
let list1d = [1; 3; 7; 9; 11; 13; 15; 19; 22; 29; 36]
let isEven x = x % 2 = 0
match List.tryFind isEven list1d with
| Some value -> printfn "The first even value is %d." value
| None -> printfn "There is no even value in the list."
match List.tryFindIndex isEven list1d with
| Some value -> printfn "The first even value is at position %d." value
| None -> printfn "There is no even value in the list."
// The output is as follows:
// The first even value is 22.
// The first even value is at position 8.
```
### List & Tuple
Lists that contain tuples can be manipulated by zip and unzip functions. These functions combine two lists of single values into one list of tuples or separate one list of tuples into two lists of single values.
```fsharp
// ------------------ zip ---------------------
// The simplest List.zip function takes two lists of single elements and
// produces a single list of tuple pairs.
let list1 = [ 1; 2; 3 ]
let list2 = [ -1; -2; -3 ]
let listZip = List.zip list1 list2
printfn "%A" listZip
// The output is as follows
// [(1, -1); (2, -2); (3; -3)]
// Another version, List.zip3, takes three lists of single elements and produces a
// single list of tuples that have three elements.
let list3 = [ 0; 0; 0]
let listZip3 = List.zip3 list1 list2 list3
printfn "%A" listZip3
// The output is as follows
// [(1, -1, 0); (2, -2, 0); (3, -3, 0)]
// ------------------ unzip ---------------------
// use of List.unzip.
let lists = List.unzip [(1,2); (3,4)]
printfn "%A" lists
printfn "%A %A" (fst lists) (snd lists)
// The output is as follows
// ([1; 3], [2; 4])
// [1; 3] [2; 4]
// use of List.unzip3.
let listsUnzip3 = List.unzip3 [(1,2,3); (4,5,6)]
printfn "%A" listsUnzip3
// The output is as follows
// ([1; 4], [2; 5], [3; 6])
```
### Operating
```fsharp
// The simplest is List.iter, which enables you to call a function on every
// element of a list. Variations include List.iter2, which enables you to
// perform an operation on elements of two lists, List.iteri, which is like
// List.iter except that the index of each element is passed as an argument
// to the function that is called for each element, and List.iteri2, which
// is a combination of the functionality of List.iter2 and List.iteri.
let list1 = [1; 2; 3]
let list2 = [4; 5; 6]
List.iter (fun x -> printfn "List.iter: element is %d" x) list1
List.iteri(fun i x -> printfn "List.iteri: element %d is %d" i x) list1
List.iter2 (fun x y -> printfn "List.iter2: elements are %d %d" x y) list1 list2
List.iteri2 (fun i x y ->
printfn "List.iteri2: element %d of list1 is %d element %d of list2 is %d"
i x i y)
list1 list2
// The output is as follows:
// List.iter: element is 1
// List.iter: element is 2
// List.iter: element is 3
// List.iteri: element 0 is 1
// List.iteri: element 1 is 2
// List.iteri: element 2 is 3
// List.iter2: elements are 1 4
// List.iter2: elements are 2 5
// List.iter2: elements are 3 6
// List.iteri2: element 0 of list1 is 1; element 0 of list2 is 4
// List.iteri2: element 1 of list1 is 2; element 1 of list2 is 5
// List.iteri2: element 2 of list1 is 3; element 2 of list2 is 6
// Another frequently used function that transforms list elements is List.map,
// which enables you to apply a function to each element of a list and put all
// the results into a new list.
// List.map2 and List.map3 are variations that take multiple lists.
// The only difference between List.mapi2 and List.mapi is that List.mapi2
// works with two lists. The following example illustrates List.map.
let list1 = [1; 2; 3]
let newList = List.map (fun x -> x + 1) list1
printfn "%A" newList
// The output is as follows:
// [2; 3; 4]
// use of List.map2.
let list1 = [1; 2; 3]
let list2 = [4; 5; 6]
let sumList = List.map2 (fun x y -> x + y) list1 list2
printfn "%A" sumList
// The output is as follows
// [5; 7; 9]
// use of List.map3.
let newList2 = List.map3 (fun x y z -> x + y + z) list1 list2 [2; 3; 4]
printfn "%A" newList2
// The output is as follows:
// [7; 10; 13]
// use of List.mapi.
let newListAddIndex = List.mapi (fun i x -> x + i) list1
printfn "%A" newListAddIndex
// The output is as follows
// [1; 3; 5]
// use of List.mapi2.
let listAddTimesIndex = List.mapi2 (fun i x y -> (x + y) * i) list1 list2
printfn "%A" listAddTimesIndex
// The output is as follows
// [0; 7; 18]
// List.collect is like List.map, except that each element produces a list
// and all these lists are concatenated into a final list. In the following
// code, each element of the list generates three numbers.
let collectList = List.collect (fun x -> [for i in 1..3 -> x * i]) list1
printfn "%A" collectList
// The output is as follows:
// [1; 2; 3; 2; 4; 6; 3; 6; 9]
// use List.filter, which takes a Boolean condition and produces a new list
// that consists only of elements that satisfy the given condition.
let evenOnlyList = List.filter (fun x -> x % 2 = 0) [1; 2; 3; 4; 5; 6]
// The resulting list is [2; 4; 6].
// A combination of map and filter, List.choose enables you to transform
// and select elements at the same time. List.choose applies a function that
// returns an option to each element of a list, and returns a new list of
// the results for elements when the function returns the option value Some.
// use of List.choose to select capitalized words out of a list of words.
let listWords = [ "and"; "Rome"; "Bob"; "apple"; "zebra" ]
let isCapitalized (string1:string) = System.Char.IsUpper string1[0]
let results = List.choose (fun elem ->
match elem with
| elem when isCapitalized elem -> Some(elem + "'s")
| _ -> None) listWords
printfn "%A" results
// The output is as follows:
// ["Rome's"; "Bob's"]
```
### Fold & Scan
The fold and scan operations are like List.iter and List.map in that you invoke a function on each element, but these operations provide an additional parameter called the accumulator that carries information through the computation.
```fsharp
let sumList list = List.fold (fun acc elem -> acc + elem) 0 list
printfn "Sum of the elements of list %A is %d." [ 1 .. 3 ] (sumList [ 1 .. 3 ])
// output
// Sum of the elements of list [1; 2; 3] is 6.
// The following example computes the average of a list.
let averageList list = (List.fold (fun acc elem -> acc + float elem) 0.0 list / float list.Length)
// The following example computes the standard deviation of a list.
// The standard deviation is computed by taking the square root of the
// sum of the variances, which are the differences between each value
// and the average.
let stdDevList list =
let avg = averageList list
sqrt (List.fold (fun acc elem -> acc + (float elem - avg) ** 2.0 ) 0.0 list / float list.Length)
let testList listTest =
printfn "List %A average: %f stddev: %f" listTest (averageList listTest) (stdDevList listTest)
testList [1; 1; 1]
testList [1; 2; 1]
testList [1; 2; 3]
// output
// List [1; 1; 1] average: 1.000000 stddev: 0.000000
// List [1; 2; 1] average: 1.333333 stddev: 0.471405
// List [1; 2; 3] average: 2.000000 stddev: 0.816497
// List.fold is the same as to List.iter when the accumulator is not used.
let printList list = List.fold (fun acc elem -> printfn "%A" elem) () list
printList [0.0; 1.0; 2.5; 5.1 ]
// output
// 0.0
// 1.0
// 2.5
// 5.1
// The following example uses List.fold to reverse a list.
// The accumulator starts out as the empty list, and the function uses the cons operator
// to add each successive element to the head of the accumulator list, resulting in a
// reversed form of the list.
let reverseList list = List.fold (fun acc elem -> elem::acc) [] list
printfn "%A" (reverseList [1 .. 10])
// output
// [10; 9; 8; 7; 6; 5; 4; 3; 2; 1]
// Use List.fold2 to perform computations over two lists (of equal size) at the same time.
// Example: Sum the greater element at each list position.
let sumGreatest list1 list2 = List.fold2 (fun acc elem1 elem2 ->
acc + max elem1 elem2) 0 list1 list2
let sum = sumGreatest [1; 2; 3] [3; 2; 1]
printfn "The sum of the greater of each pair of elements in the two lists is %d." sum
// output
// The sum of the greater of each pair of elements in the two lists is 8.
// List.fold and List.scan differ in that List.fold returns the final value of
// the extra parameter, but List.scan returns the list of the intermediate
// values (along with the final value) of the extra parameter.
// Discriminated union type that encodes the transaction type.
type Transaction =
| Deposit
| Withdrawal
let transactionTypes = [Deposit; Deposit; Withdrawal]
let transactionAmounts = [100.00; 1000.00; 95.00 ]
let initialBalance = 200.00
// Use fold2 to perform a calculation on the list to update the account balance.
let endingBalance = List.fold2 (fun acc elem1 elem2 ->
match elem1 with
| Deposit -> acc + elem2
| Withdrawal -> acc - elem2)
initialBalance
transactionTypes
transactionAmounts
printfn "%f" endingBalance
// output
// 1205.000000
// For a calculation like summation, List.fold and List.foldBack have the same effect because the result does not depend on the order of traversal. In the following example, List.foldBack is used to add the elements in a list.
let sumListBack list = List.foldBack (fun elem acc -> acc + elem) list 0
printfn "%d" (sumListBack [1; 2; 3])
// output: 6
// For a calculation in which the order of traversal is important, fold and foldBack have different
// results. For example, replacing fold with foldBack in the listReverse function
// produces a function that copies the list, rather than reversing it.
let copyList list = List.foldBack (fun elem acc -> elem::acc) list []
printfn "%A" (copyList [1 .. 10])
// output: [1; 2; 3; 4; 5; 6; 7; 8; 9; 10]
// The following example returns to the bank account example. This time a new
// transaction type is added: an interest calculation. The ending balance now
// depends on the order of transactions.
type Transaction2 =
| Deposit
| Withdrawal
| Interest
let transactionTypes2 = [Deposit; Deposit; Withdrawal; Interest]
let transactionAmounts2 = [100.00; 1000.00; 95.00; 0.05 / 12.0 ]
let initialBalance2 = 200.00
// Because fold2 processes the lists by starting at the head element,
// the interest is calculated last, on the balance of 1205.00.
let endingBalance2 = List.fold2 (fun acc elem1 elem2 ->
match elem1 with
| Deposit -> acc + elem2
| Withdrawal -> acc - elem2
| Interest -> acc * (1.0 + elem2))
initialBalance2
transactionTypes2
transactionAmounts2
printfn "%f" endingBalance2
// output
// 1210.020833
// Because foldBack2 processes the lists by starting at end of the list,
// the interest is calculated first, on the balance of only 200.00.
let endingBalance3 = List.foldBack2 (fun elem1 elem2 acc ->
match elem1 with
| Deposit -> acc + elem2
| Withdrawal -> acc - elem2
| Interest -> acc * (1.0 + elem2))
transactionTypes2
transactionAmounts2
initialBalance2
printfn "%f" endingBalance3
// output
// 1205.833333
```
<file_sep>+++
title = "Collection"
description = "Collection"
weight=6
+++
## Collection
### Collection protocols
* To implement a protocol, objects must support certain operations.
* Most collections implement container, sized and iterable.
* All except dict and set are sequences
* __Container__: membership testing using `in` and `not in`
* __Sized__: Determine number of elements with `len(s)`
* __Iterable__: Can produce an iterator with `iter(s)`, e.g.
for item in iterable:
do_something(item) `len(s)`
* __Sequence__:
* Retrieve elements by index, e.g. `item = seq[index]`
* Find items by value `index = seq.index(item)`
* Count items `num = seq.count(item)`
* Produce a reversed sequence `r = reversed(seq)`
* __Set__: set algebra operations, including method and infix operators . e.g.
* subset
* proper subset
* equal
* not equal
* proper superset
* superset
* intersections
* union
* symmetric difference
* difference
* Built-in collections
Protocol | Implementing Collections
---------|----
Container |str, list, range, tuple, set, bytes, dict
Sized | str, list, range, tuple, set, bytes, dict
Iterable | str, list, range, tuple, set, bytes, dict
Sequence | str, list, range, tuple, bytes
Set | set
Mutable Sequence | list
Mutable Set | set
Mutable Mapping | dict
### Example
* sortedset
```python
from bisect import bisect_left
from collections.abc import Sequence, Set
from itertools import chain
class SortedSet(Sequence, Set):
def __init__(self, items=None):
self._items = sorted(set(items)) if items is not None else []
def __contains__(self, item):
try:
self.index(item)
return True
except ValueError:
return False
def __len__(self):
return len(self._items)
def __iter__(self):
return iter(self._items)
def __getitem__(self, index):
result = self._items[index]
return SortedSet(result) if isinstance(index, slice) else result
def __repr__(self):
return "SortedSet({})".format(repr(self._items) if self._items else '')
def __eq__(self, rhs):
if not isinstance(rhs, SortedSet):
return False
return self._items == rhs._items
def _is_unique_and_sorted(self):
return all(self[i] < self[i + 1] for i in range(len(self) - 1))
def index(self, item):
assert self._is_unique_and_sorted()
index = bisect_left(self._items, item)
if (index != len(self._items)) and self._items[index] == item:
return index
raise ValueError("{} not found".format(repr(item)))
def count(self, item):
assert self._is_unique_and_sorted()
return int(item in self._items)
def __add__(self, rhs):
return SortedSet(chain(self._items, rhs._items))
def __mul__(self, rhs):
return SortedSet(self) if rhs > 0 else SortedSet()
def __rmul__(self, lhs):
return self * lhs
def issubset(self, iterable):
return self <= SortedSet(iterable)
def issuperset(self, iterable):
return self >= SortedSet(iterable)
def intersection(self, iterable):
return self & SortedSet(iterable)
def union(self, iterable):
return self | SortedSet(iterable)
def symmetric_difference(self, iterable):
return self ^ SortedSet(iterable)
def difference(self, iterable):
return self - SortedSet(iterable)
```
* test case
```python
import unittest
from sorted_set import SortedSet
class TestConstruction(unittest.TestCase):
def test_empty(self):
s = SortedSet([])
def test_from_sequence(self):
s = SortedSet([7, 8, 3, 1])
def test_with_duplicates(self):
s = SortedSet([8, 8, 8])
def test_from_iterable(self):
def gen6842():
yield 6
yield 8
yield 4
yield 2
g = gen6842()
s = SortedSet(g)
def test_default_empty(self):
s = SortedSet()
class TestContainerProtocol(unittest.TestCase):
def setUp(self):
self.s = SortedSet([6, 7, 3, 9])
def test_positive_contained(self):
self.assertTrue(6 in self.s)
def test_negative_contained(self):
self.assertFalse(2 in self.s)
def test_positive_not_contained(self):
self.assertTrue(5 not in self.s)
def test_negative_not_contained(self):
self.assertFalse(9 not in self.s)
class TestSizedProtocol(unittest.TestCase):
def test_empty(self):
s = SortedSet()
self.assertEqual(len(s), 0)
def test_one(self):
s = SortedSet([42])
self.assertEqual(len(s), 1)
def test_ten(self):
s = SortedSet(range(10))
self.assertEqual(len(s), 10)
def test_with_duplicates(self):
s = SortedSet([5, 5, 5])
self.assertEqual(len(s), 1)
class TestIterableProtocol(unittest.TestCase):
def setUp(self):
self.s = SortedSet([7, 2, 1, 1, 9])
def test_iter(self):
i = iter(self.s)
self.assertEqual(next(i), 1)
self.assertEqual(next(i), 2)
self.assertEqual(next(i), 7)
self.assertEqual(next(i), 9)
self.assertRaises(StopIteration, lambda: next(i))
def test_for_loop(self):
index = 0
expected = [1, 2, 7, 9]
for item in self.s:
self.assertEqual(item, expected[index])
index += 1
class TestSequenceProtocol(unittest.TestCase):
def setUp(self):
self.s = SortedSet([1, 4, 9, 13, 15])
def test_index_zero(self):
self.assertEqual(self.s[0], 1)
def test_index_four(self):
self.assertEqual(self.s[4], 15)
def test_index_one_beyond_the_end(self):
with self.assertRaises(IndexError):
self.s[5]
def test_index_minus_one(self):
self.assertEqual(self.s[-1], 15)
def test_index_minus_five(self):
self.assertEqual(self.s[-5], 1)
def test_index_one_before_the_beginning(self):
with self.assertRaises(IndexError):
self.s[-6]
def test_slice_from_start(self):
self.assertEqual(self.s[:3], SortedSet([1, 4, 9]))
def test_slice_to_end(self):
self.assertEqual(self.s[3:], SortedSet([13, 15]))
def test_slice_empty(self):
self.assertEqual(self.s[10:], SortedSet())
def test_slice_arbitrary(self):
self.assertEqual(self.s[2:4], SortedSet([9, 13]))
def test_slice_full(self):
self.assertEqual(self.s[:], self.s)
class TestReprProtocol(unittest.TestCase):
def test_repr_empty(self):
s = SortedSet()
self.assertEqual(repr(s), "SortedSet()")
def test_repr_some(self):
s = SortedSet([42, 40, 19])
self.assertEqual(repr(s), "SortedSet([19, 40, 42])")
if __name__ == '__main__':
unittest.main()
```
<file_sep>+++
title = "Azure: CLI - 1"
weight = 4
description="Introduction of Azure CLI"
+++
## Azure Cli
The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
### Install Azure CLI on macOS
* Mac with Intel CPU
```sh
brew update && brew install azure-cli
```
### Install Azure Cli with Docker
* Use `docker run`
```sh
docker run -it mcr.microsoft.com/azure-cli
```
* Run with SSH key
```sh
docker run -it -v ${HOME}/.ssh:/root/.ssh mcr.microsoft.com/azure-cli
```
### Get Started
* Sign In
```
az login
```
* Show accounts
```
az account list
az account show
```
* Setup default subscription
```
az account set --sbuscription XXXX-XXXXX-xXXXX-XXX
```
* Show App Services
```
az appservice plan list --query-examples
```
* Show Web Apps
```
az webapp list --query-examples
```
<file_sep>+++
date = "2016-04-10T14:59:31+11:00"
title = "Scrapy Notes"
draft = true
+++
$ pip install scrapy
$ cat > myspider.py <<EOF
import scrapy
class BlogSpider(scrapy.Spider):
name = 'blogspider'
start_urls = ['https://blog.scrapinghub.com']
def parse(self, response):
for title in response.css('h2.entry-title'):
yield {'title': title.css('a ::text').extract_first()}
next_page = response.css('div.prev-post > a ::attr(href)').extract_first()
if next_page:
yield scrapy.Request(response.urljoin(next_page), callback=self.parse)
EOF
$ scrapy runspider myspider.py
Command "c:\apps\python3\python3.exe -u -c "import setuptools, tokenize;__file__='C:\\Users\
\Harry\\AppData\\Local\\Temp\\pip-build-juixotv0\\Twisted\\setup.py';f=getattr(tokenize, 'op
en', open)(__file__);code=f.read().replace('\r\n', '\n');f.close();exec(compile(code, __file
__, 'exec'))" install --record C:\Users\Harry\AppData\Local\Temp\pip-w4_uq4jb-record\install
-record.txt --single-version-externally-managed --compile" failed with error code 1 in C:\Us
ers\Harry\AppData\Local\Temp\pip-build-juixotv0\Twisted\
<file_sep>+++
title = "Cron Job Note - 1"
description = "Common Cron Job examples"
+++
## Introduction
Cron job is one of most common techniques used on every Unix / Linux.
## Common Use Cases
### House keeping - Clean up the backup
* Run every night to remove the daily backup tar ball
* Assumption:
* The script file named `housekeeping.sh`.
* The script sits inside folder `bin` which is under your user account.
* All backup files have the .tar as extension.
* The script will check the backup tar balls within the folder `bacup`, which is under your user account as well.
* The script will remove the latest backup if total backup files are over 5.
* Cron Job setting -
```bash
# Replace the user_id with you actual user name
20 15 * * * <user_id> /home/<user_id>/bin/housekeeping 2>&1 | logger
```
* The script file
```bash
#!/bin/bash
# crontab config
# 20 15 * * * /home/${USER}/bin/housekeeping 2>&1
# script location: /home/$(USER}/bin
# log file location: /home/$(USER}/log
logfile=/home/${USER}/log/housekeeping.log;
backup_dir=/home/${USER}/data/backups
# Log the start time
echo $(date)>> $logfile;
fln=$(($(find ${backup_dir} -type f -name "*.tar" | egrep -i ".tar" | wc -l)+0));
# Log the number of files
echo $fln>>$logfile;
if [[ $fln -ge 6 ]]; then
for t in $(find ${backup_dir} -type f -name "*.tar" | sort | egrep -m1 ".tar");
do
rm -rf $t;
# Log the removed file
echo "remove $t" >> $logfile;
done;
fi;
```
### Backup the database
#### Backup the MySql database nightly
* The database backup dump will be stored into the folder named after the brand name, e.g. the folder facebook for Facebook
* The script of backup `backup.sh`
```bash
# Every Backup directory has file named ENVIRONMENT
#
# ENVIRONMENT contains the setting like db, server, etc.
#
# DB_HOST=your_database_host_server
# DATABASE=the_target_database
# DB_USER=db_login_id
# DB_PASSWORD=<PASSWORD>
# BACKUPS_TO_KEEP=5
#
# THIS SCRIPT MAY ONLY BE RUN ONCE EACH DAY
# Usge:
# ./backup.sh facebook
#
[[ -z "$1" ]] && echo "Must run script with brand name, which has matching folder in /var/backup/" && exit 1
BACKUP_DIR=/var/backup/${1}
source ${BACKUP_DIR}/ENVIRONMENT
LATEST_DUMP_TARBALL=latest-dump.tgz
DATE=$(date +%d-%m-%Y) # dd-mm-yyyy
LATEST_BACKUP_DIR=${BACKUP_DIR}/${DATE}
echo "$(date): Beginning backup of ${DATABASE} on ${DB_HOST}"
mkdir -p ${LATEST_BACKUP_DIR}
cd ${LATEST_BACKUP_DIR}
START=$(date +%s)
# Set procedure definer to ussadmin
SQL="UPDATE mysql.proc p \
SET definer = '${DB_USER}@%' \
WHERE db = '${DATABASE}' \
AND definer <> '${DB_USER}@%';"
mysql -h ${DB_HOST} -P 3306 ${DATABASE} -u ${DB_USER} -p${DB_PASSWORD} -e"${SQL}"
mysqldump -h ${DB_HOST} -P 3306 ${DATABASE} \
--routines \
--lock-tables=false \
-u ${DB_USER} -p${DB_PASSWORD} > ./dump.sql
EXPORT_RESULT=$?
END=$(date +%s)
echo "Export duration: $(( $END - $START ))s"
echo "Gzipping dump file into tarball"
START=$(date +%s)
tar czf dump.tgz dump.sql
[[ $? ]] && rm -f dump.sql
END=$(date +%s)
echo "Gzipping duration: $(( $END - $START ))s"
echo "Creating ${BACKUP_DIR}/${LATEST_DUMP_TARBALL}"
if [[ ${EXPORT_RESULT} ]]; then
if [[ -f ${BACKUP_DIR}/${LATEST_DUMP_TARBALL} ]]; then
rm --force ${BACKUP_DIR}/${LATEST_DUMP_TARBALL};
fi
ln -s ${LATEST_BACKUP_DIR}/dump.tgz ${BACKUP_DIR}/${LATEST_DUMP_TARBALL};
else
echo "Export operation did not return 0, skipping symlink update"
fi
echo "$(date): Completed backup for ${DATABASE} on ${DB_HOST}"
```
<file_sep>+++
date = "2019-12-07T16:56:21+11:00"
title = " Docker Kits"
description = "The repository is the collection of kits built on the top of docker image. "
+++
## Docker Kits
The repository is the collection of kits built on the top of docker image. At the very begiinning, the repository was built for kits used for infrastructure. It is about to expand the scope from infrastructure to simplify the integration test, even build for speical development.
Repository name in Docker Hub: [harryh00/docker-kits](https://hub.docker.com/r/harryh00/docker-kits)
## Why chooses docker
Docker is a tool designed to make it easier to create, deploy, and run applications by using containers. Containers allow a developer to package up an application with all of the parts it needs, such as libraries and other dependencies, and deploy it as one package.
## Benefits
Containers work a little like VMs, but in a far more specific and granular way. The benefits of Docker containers show up in many places. Here are some of the advantages of Docker used as tool kits:
* Docker enables more efficient use of system resources
* Docker enables faster software delivery cycles
* Docker enables application portability
## Images and tags
### Alpine base kit
* harryh00/docker-kits:alpine-ansible
* harryh00/docker-kits:alpine-terraform
* harryh00/docker-kits:alpine-k8s
* harryh00/docker-kits:alpine-aws2
### Ubuntu base kit
* harryh00/docker-kits:ubuntu-ansible
### CentOS base kit
* harryh00/docker-kits:centos-ansible
## How to use
* Sample 1: use ansible kit
```
# one-off use
docker run --rm -it harryh00/docker-kits:alpine-ansible ansible --version
# use the kit without install ansible
docker run --rm -it -v ${PWD}:/app -w /app harryh00/docker-kits:alpine-ansible /bin/bash
```
* Sample 2: use aws cli kit
```
# mount the local aws config to aws cli kit
docker run --rm -it -v ~/.aws:/root/.aws harryh00/docker-kits:alpine-aws2
```
* Sample 3: use k8s cli kit
```
docker run --rm -it -v ~/.kube:/root/.kube -v ~/.aws:/root/.aws harryh00/docker-kits:alpine-k8s
```
## [Github Repository](https://github.com/harryho/docker-kits.git)
## [Docker Hub Repository](https://hub.docker.com/r/harryh00/docker-kits)
<file_sep>+++
title = "Unpacking"
description = "Data structure - Unpacking "
draft=true
weight=20
+++
### Data structure
#### Unpacking
* Unpacking a Sequence into Separate Variables
```py
>>> p = (4, 5)
>>> x, y = p # x = 4, y = 5
>>> data = [ 'ACME', 50, 91.1, (2012, 12, 21) ]
>>> name, shares, price, date = data # date = (2012, 12, 21)
>>> name, shares, price, (year, mon, day) = data #
```
* Unpacking Elements from Iterables of Arbitrary Length
```py
def drop_first_last(grades):
first, *middle, last = grades
return avg(middle)
```
* Another use case. The phone_numbers variable below will always be a list, regardless of how many phone numbers are unpacked (including none).
```py
>>> record = ('Dave', '<EMAIL>', '773-555-1212', '847-555-1212')
>>> name, email, *phone_numbers = user_record
>>> phone_numbers
['773-555-1212', '847-555-1212']
>>> record = ('ACME', 50, 123.45, (12, 18, 2012))
>>> name, *_, (*_, year) = record
>>> year # 2012
```
* Combine with splitting function
```python
>>> line = 'nobody:*:-2:-2:Unprivileged User:/var/empty:/usr/bin/false'
>>> name, *fields, homedir, sh = line.split(':')
>>> sh # /usr/bin/false
```
* It can be especially useful when iterating over a sequence of tuples of varying length. And these iterables have some known component or pattern in their construction.
```python
records = [
('foo', 1, 2),
('bar', 'hello'),
('foo', 3, 4),
]
def do_foo(x, y):
print('foo', x, y)
def do_bar(s):
print('bar', s)
# Only works for python 3
for tag, *args in records:
if tag == 'foo':
do_foo(*args)
elif tag == 'bar':
do_bar(*args)
```
#### Keeping the Last N Items
* keep a limited history of the last few items seen during iteration or during
some other kind of processing.
* Keeping a limited history is a perfect use for a collections.deque
```py
from collections import deque
def search(lines, pattern, history=5):
previous_lines = deque(maxlen=history)
for line in lines:
if pattern in line:
yield line, previous_lines
previous_lines.append(line)
# Example use on a file
if __name__ == '__main__':
with open('main.py') as f:
for line, prevlines in search(f, 'python', 5):
for pline in prevlines:
print(pline, end='')
print(line, end='')
print('-'*20)
### Output
# previous_lines.append(line)
# # Example use on a file
# if __name__ == '__main__':
# with open('main.py') as f:
# for line, prevlines in search(f, 'python', 5):
```
<file_sep>+++
date = "2017-06-07T16:56:21+11:00"
title = "Angular vs React vs Vue"
description="Angular, React, Vue as most popular JavaScript frameworks at present, we just discuss Angular 1.x, Angular 2 / 4, React 15+ (Redux), and Vue 2+ here "
+++
>Angular, React, Vue as most popular JavaScript frameworks at present, we just discuss Angular 1.x, Angular 2 / 4, React 15+ (Redux), and Vue 2+ here. There is no Angular 3, if you have not noticed before.
## Client side is a battle field
In past 6–8 years, the Restful API has been accepted as one of standard web interfaces for most web applications, solution architect can simply add REST API on the top of existing web layer or business layer to provide REST API and support multiple client devices. So the developers can continue to develop or maintain system with their favorite programming language, framework or technical stacks.
On the contrary, it is a completely different story on the client-side, there are tons of variant JavaScript frameworks emerged in the last 10 years. It is good to have more options, but it is a nightmare for web developers who are working in such battle field, because they need to try a lot of different stuff to make a decision, especially when the project schedule is tight, it makes tech lead or architect quite stressed. Something becomes much worse is when the development team try to adopt new framework for the new project, it is not easy to work out which framework we should choose.
Don’t forget there is another big risk to adopt new programming language(ES 6 or Typescript) with the new framework, as well as new development, build and test tool, if the team has no enough skills or experience. As solution architect, you need to think it through for development team, and also consider if the team can really pick it up quickly. That is why we have to compare those frameworks here before we make a decision.
## Common pitfalls to avaoid
### Performance is not a priority
We can find lots of comparison between those frameworks, and so many of them are related to performance, programming language, design pattern, etc. Actually many web applications in the world are just small to median size web application, we don’t need to build the web application as Google, Facebook or Twitter. In my opinion, the performance of framework is the not critical benchmark, at lease it is not first priority which we need to consider if it is right for the team. Except performance, we have more concern on tech stacks, community and ecosystem involved with the framework, which have more impact on team’s productivity and system’s maintainability.
### Cool stuff is not always the best
We have seen so many cool stuff which are finally abandoned in the past, the Silverlight is one of such examples. We shouldn’t choose new framework because it looks cool or it is the latest one. We choose the new one because it really can solve our problem, improve our productivity and quality in the long run. Don’t forget there is always some cost to adopt new things. We need to balance the cost and outcome of the technical investment, and we need to work it out if it is right time to do it.
### Programming language is still the barrier
If we need to use new programming language, we have to evaluate with the existing development team. Even ES6 or TypeScript (TS) claims it is compatible to Vanilla JS, but when you start to look into new framework or sample project, which are coded with ES6 or TS, it still makes you so confused if you are not familiar with such syntax. It will significantly impact the efficiency of learning new framework. So there is always a learning curve, which we cannot ignore, to code something in a new programming language.
Someone complaints all those JS frameworks makes the build process much more complicated than the old web frameworks, because of the new programming language. Does it really matter? The short answer is Yes, but we are not going discuss the advantage in details here. If your team comes from .Net Web Form or Java MVC background, it would be a steep curve for the team to pick up ES6 or TypeScript and Component-based framework, not to mentioned new build and test tools.
No wonder a few .Net teams were struggling with Node stack integration on Visual Studio, especially when the team members have no Node.js experience. So we need the whole team to discuss the difficulties before we adopt new technology and framework. It is helpful to make sure the team has the same view, and it is also important to plan our training and decide how to transform development team step by step.
## The difference of those framework
> Let's look into the frameworks and list the difference of these frameworks.
* Basic tech stacks
Tech Stacks | Angular 1.x | Angular 2 / 4 | React 1.5 (Redux) | Vue 2
----------------|-------------------| ---------------|---------------------|---------
Vanilla JS | Yes | Supported | Supported | Supported
ES 6 | 1.5+ | Supported | Yes | Yes
TypeScript | | Yes | | Supported |
MVC | 1.2-1.4 | | |
Component-Based | 1.5+ | Yes | Yes | Yes
Shadow DOM | | Yes | |
Virtual DOM| | | Yes | Yes
Immutable state | | | Yes | Yes
* Yes: Programming language which the framework uses.
* Supported: Programming language which the framework supports.
## Where to start
For the team which comes with Web Form, with Vanilla JS background, we can start with Angular 1.x (Up to 1.4) on some small projects, or we can build something training project, because the MVC pattern is very similar to their previous coding experience.
One more thing, I have to mention is the Angular 1.x application can be built without any Node.js tools, such as Gulp, Grunt, Webpack, etc. It makes the team feel comfortable to adopt it without prior experiences. Also, it gives the team some buffer to organize the training to pick up Node.js tools for the future.
For the team which has experience of Angular 1.2 ~ 1.4, they can choose to stay on later version of Angular 1.x, e.g. Angular 1.5+, and they can start to convert coding pattern from MVC to Component-based. After that, if the team is planning to move to Angular 2 / 4, it is better to do some TypeScript training. In my view, so far the ecosystem for Angular 2 / 4 is still under development. It is a bit risky to use Angular 2 / 4 to build the real-world production. There are quite many gotchas which you have to figure out on your own.
For the team which has TypeScript or ES6 experience, they can choose what they prefer. They can spend more time on UI integration. There are a few customized UI package for bespoke framework. That is what we are going to discuss in the next.
### Responsive UI library support
> To build a real-world application, we need to integrate some popular responsive UI libraries instead of building all styles on our own. Let’s take a look the support of Bootstrap or Material-Design for different frameworks.
UI library | Angular 1.x | Angular 2 / 4 | React 1.5 (Redux) | Vue 2
--------------|------------------- | ---------------|------------------|---------
Bootstrap 3 | ui-bootstrap (Very Good) | | react-bootstrap(Very Good) | VueStrap* (Very Good)
Bootstrap 4 | | ui-bootstrap (Alpha) | In progress | BootstrapVue (Good) |
Material Design | Materialize (Good) | Angular Material(Basic) | Material-UI (Good) | Vuetify (Very Good)
* VueStrap: Please use the [Willen's fork](https://github.com/wffranco/vue-strap) for Vue 2.
* Libraries in the table above has been tested or used in some projects.
From what we can see now, the Bootstrap 4 is similar to Material-Desgin. So it is good news for developer. They just need to pick their favor, and they will always get analogical effect.
Actually there are tons of UI libraries / CSS framework available on Github, so many are platform neutral framework, i.e. It can be integreated with Angular, React or Vue. Be honest, integration is always not easy, it will take you or your team some extra effort. Keep it in mind, to integrate platform neutral framework you need to take care of dependencies and build, test tools on your own, such as webpack or yarn.
### Stable API
Against to Angular 1.x, the Angular 2 is completely a new animal. Angular 4 comes with some breaking changes, which breaks a few Angular 2 dependencies (third parties). Since the API of Angular 4 is still under active development, we cannot use it for production. According to Angular team’s announcement, they want to fix all Angular-2’s bugs and issues in Angular 4 and keep all built-in libraries sync to Angular 4. It will take a long while to get things ready. If your project uses Angular 1.2–1.4, I’d like to suggest you to keep it, until Angular 4 is finalized.
React-Redux is much more popular than React-Flux recently, but it doesn’t means it is better than React-Flux pattern. In my opinion, React-Flux is much more straight and close to original React design. If you ready use React-Flux, you have better to stick with it.
Vue 2 comes with some breaking changes. There is migration guide for Vue 1.x to Vue 2. It doesn’t seem very different. Vue 2 is ready for production.
## How to compare
In order to compare those frameworks properly, I use those frameworks to create a small real-world web application, which has built-in authentication support for the back-end API service, and integrated with some responsive UI framework, e.g. Bootstrap or Material-Design.
You will find there is no project built on the Angular 1.x, because my team and me have done a lot real-world application upon Angular 1.x. We know Angular 1.x, inlcuding its ecosystem is quite reliable, which you replicate any web application with this framework.
On the other hand, Angular 1.x is built with Vanilla JS, i.e. you don’t need transpiler to build Angular 1.x app, so it is a bit unfair to compare with the framework which is coded with ES 6 or TypeScript, because the build tool and setup for Angular 1.x is easier than others. I mention the Angular 1.x here to remind them, actually there is other option for the team comes from traditional MVC stacks. It would be proper way to transform the team smoothly.
### Following are the projects and related screenshots
[Angular 4 CRM](/projects/angular4-crm/)

[React CRM](/projects/react-crm/)

[Vue 2 CRM](/projects/vue2-crm/)

## Comparison of different framework
Let’s go back to projects above and take a look. Basically they are implemented almost the same features as real-world simple CRM application.
### __Features__
* Authentication & Token support for Restful API
* Customer CRUD functions
* Order CRUD functions
* Dashboard including two charts (Bar/Line/Doughnut)
* Integrate with Material Design (Angular project includes bootstrap)
### Size of source code
| |Angular 4 CRM | React Redux CRM | Vue 2 CRM
|----------|-----------------|--------------|-------------
Dependencies | 22 | 13 | 9
Code Size | 135KB | 113KB | 49KB
Working Hours | 72 hrs | 80 hrs | 48hrs
* Dependencies: Any dependencies for test, distribute are excluded
* Code Size: It includes some customized CSS file, but image files are excluded
* Working Hours: The effort for learning curve has been eliminated, but R&D effort cannot be excluded.
### Working hours is a reflection of productivity
Firstly, I have to explain why React project took more effort than the other two projects. Comparing with React, the Angular 4 and Vue 2 are a bit new, i.e. there are more available packages or libraries for React on-line. As I mentioned before, it is not a good news. We need to try more different to figure out the pros and cons of different solutions. Unfortunately, we cannot exclude such R&D effort when we build these projects.
According to above the dependencies, code size, we can see the project based on Vue.js is much simpler than other two projects.In my view, Vue 2 is my favor for next new project. It combines advantages of Angular and React. It also addresses some problems which we found in Angular and React.
Vue.js uses Virtual DOM, which avoids many dirty checking in Angular 1.x, and the complicated coding pattern (Observable & ReactiveJs, IMO) in Angular 2 / 4.
Vue.js makes the handling of immutable and mutable variables much easier than React. Its template is very handy and straight. It is the same as regular HTML, it is very easy to convert the mock-up HTML into Vue template, especially when you need to customize you styles. Vue’s template and directive is similar to Angular.
Vue.js is not just cool, it is elegant and simple. I am pretty sure if you have Angular or React background, you will pick it up in a couple hours or days. Once you start to use it, you won’t want to go back. Its official routing system is quite stable and easy to use. Compare with Angular-Router or React-Router, it is much more reliable.
Generally, Material-Design libraries for React is not handy as other customized version for Angular or Vue. The special coding style of JSX needs to convert all CSS and HTML into JSX format. To be honest, I am not so convinced by React’s JSX, because it is not straight as final HTML or CSS. Compare to other framework, it is a bit verbose and inconvenient. We cannot simply copy the style code from the browser’s dev tool when we debug it on the browser. i.e. You need to put more effort to make your page pretty.
Angular’s Material-Design library has very limit components. To build a real-world application you need to add another UI library to supplement the former missing components. Last but not least, the Vuetify is the best Material-Design so far we have found and tested.
Angular 4 CRM — https://github.com/harryho/ng4crm
React Redux CRM — https://github.com/harryho/react-crm
Vue 2 CRM — https://github.com/harryho/vue2crm
## Summary
Before we make any conclusion, we have to be aware the world keeps changing. Perhaps when I was writing this article, some problem of framework have been solved, or some small problem became worse and worse. We have to review the decision we made from time to time and correct them ASAP if we find the cost is overweight the outcome.
* Team with Web Form and Vanilla Js background should starts with Angular 1.4 and take some time to be familiar with Node.js tools
* Team with Vanilla Js background should start to learn ES 6 or TypeScript, since sooner or later all the browsers, including mobile devices, will support ES 6 or TypeScript.
* Teamwith ES6 /TypeScript background can choose any framework you prefer, the integration with other UI library will take you some time to make a judgement.
* Teamwith React-Flux can continue or switch to React-Redux. It maybe reduces some boiler code, but I don’t think it is a big deal.
* For the newcomer of React, I will recommand React-Redux, because it has better community support.
* In my opinion, if you continue to invest anything on Angular 2, it is a bit waste, because Angular team hopes you to move to Angular 4 as soon as possible once Angular 4 is ready for production, and they are planning to fix Angular 2’s issues in Angular 4.
* Angular 4 and its ecosystem are under active development, but please be careful if you want to use them in your production.
* Vue.js framework is a very nice one. Give a go on your next project.
## Update
Angular Material Design App — https://github.com/harryho/ng-md-app
React Redux CRM — https://github.com/harryho/react-crm
Vue 2 CRM — https://github.com/harryho/vue2crm
Angular 4 CRM — https://github.com/harryho/ng4crm (It is no longer maintained to support latest Angular)<file_sep>+++
title = "DigitialOcean: Lets Encrypt"
description="Lets Encrypt & Auto renewal"
weight=4
+++
> Here I continue to finish the web host setup. The last step of web host setup is to add SSL certificate for each site
### Lets Encrypt
To enable HTTPS on your website, you need to get a certificate (a type of file) from a Certificate Authority (CA). [Let’s Encrypt](https://letsencrypt.org/) is a CA. In order to get a certificate for your website’s domain from Let’s Encrypt, you have to demonstrate control over the domain. With Let’s Encrypt, you do this using software that uses the ACME protocol, which typically runs on your web host.
#### Installing Certbot
* The first step to using Let’s Encrypt to obtain an SSL certificate is to install the Certbot software on your server.
* Certbot is in very active development, so the Certbot packages provided by Ubuntu tend to be outdated. However, the Certbot developers maintain a Ubuntu software repository with up-to-date versions, so we’ll use that repository instead.
```bash
# First, add the repository
sudo add-apt-repository ppa:certbot/certbot
# Install Certbot’s Nginx package with apt
sudo apt install python-certbot-nginx
```
* In order to configure SSL for Nginx, we need to verify some of Nginx’s configuration.
### Confirming Nginx’s Configuration
* In the previous setup, the site for __domain-one.com__ has been up and running in the droplet.
* Open the server block file of __domain-one.com__ via any text editor:
sudo vi /etc/nginx/sites-available/example.com
* Find the existing __server_name__ line in the file
server_name domain-one.com www.domain-one.com;
* If you change the server name, you need to re-run the test and reload nginx
sudo nginx -t
sudo systemctl reload nginx
Certbot can now find the correct server block and update it.
Next, let’s update the firewall to allow HTTPS traffic.
### Allowing HTTPS Through the Firewall
* UFW configuration has been done in the prevous setup. It is good to confirm the UFW configuration.
* You can see the current setting by typing:
sudo ufw status
Output
Status: active
To Action From
-- ------ ----
OpenSSH ALLOW Anywhere
Nginx Full ALLOW Anywhere
OpenSSH (v6) ALLOW Anywhere (v6)
Nginx Full (v6) ALLOW Anywhere (v6)
* If the 'Nginx Full' is not allowed in your UFW, please run commands below to enable it.
sudo ufw allow 'Nginx Full'
sudo ufw delete allow 'Nginx HTTP'
* Next, let’s run Certbot and fetch our certificates.
### Obtaining an SSL Certificate
Certbot provides a variety of ways to obtain SSL certificates through plugins. The Nginx plugin will take care of reconfiguring Nginx and reloading the config whenever necessary. To use this plugin, type the following:
sudo certbot --nginx -d example.com -d www.example.com
This runs certbot with the --nginx plugin, using -d to specify the names we’d like the certificate to be valid for.
If this is your first time running certbot, you will be prompted to enter an email address and agree to the terms of service. After doing so, certbot will communicate with the Let’s Encrypt server, then run a challenge to verify that you control the domain you’re requesting a certificate for.
If that’s successful, certbot will ask how you’d like to configure your HTTPS settings.
# Please choose whether or not to redirect HTTP traffic to HTTPS, removing HTTP access.
# -------------------------------------------------------------------------------
# 1: No redirect - Make no further changes to the webserver configuration.
# 2: Redirect - Make all requests redirect to secure HTTPS access. Choose this for
# new sites, or if you're confident your site works on HTTPS. You can undo this
# change by editing your web server's configuration.
# -------------------------------------------------------------------------------
# Select the appropriate number [1-2] then [enter] (press 'c' to cancel):
# Select your choice then hit ENTER. The configuration will be updated, and Nginx will
# reload to pick up the new settings. certbot will wrap up with a message telling
# you the process was successful and where your certificates are stored:
# ...
# ...
### Renew certificate
Let’s Encrypt’s certificates are only valid for ninety days. This is to encourage users to automate their certificate renewal process.
#### Verifying Certbot Auto-Renewal
The certbot package we installed takes care of this for us by adding a renew script to `/etc/cron.d`. This script runs twice a day and will automatically renew any certificate that’s within thirty days of expiration.
To test the renewal process, you can do a dry run with certbot:
sudo certbot renew --dry-run
#### Use Cron to renew certificate
Here I just add the cronjob to system crontab `/etc/crontab`
# Lets encrypt cronjob - start from 1st April 2020
0 0 1 */3 * root /usr/bin/certbot renew >> /var/log/letsencrypt/renew.log
<file_sep>+++
date = "2014-01-10T14:59:31+11:00"
title = "CentOS 6/7 Multi-Boot Setup"
description = "CentOS 6/7 Multi-Boot note"
draft = false
+++
## Bootable usb preparation
> Download dvd iso from url or torrent
> Use Win32 Image Writer to create usb. ( Bootice is useful tool to reformat the USB as origin )
## Install CentOS on virtual machine
* Before you install, you are better to backup anything on the device which you are going to install, and chcek your internet is working properly.
* CentOS provide a friendly and nice installation process. If you choose VMWare or VirtulBox as machine, you can open [installation steps](http://www.tecmint.com/centos-7-installation) on your browser or use ipad /tablet to access this page. You just need to follow the instruction step by step, it will be done within an hour or more(it varies in computing power of PC).
## Prepare VM for CentOS
* Create a new virtual machine.
* Type in the name of vm. e.g. CentOS
* Select the type of Linux
* You can choose RedHat(32/64 bit ) or something else. It doesn't matter. We don't use any built-in xxx.iso files from VirtualBox. Then click `Next`
* Select the memory size for the CentOS. It is up to you. I prefer 4GB, but 2GB is necessary. And then click `Next`.
* Select "Create a virtual hard drive now", and then click `Next`.
* Select defaut VDI, then click `Next`.
* Select "Dynamically allocated", then click `Next`.
* Choose the location of CentOS. e.g. c:\vbox\centos\centos.vdi. Select the size of VDI file. At least 10G. I'd like to select 20 or more. Then a virtual machine of CentOS is created.
* Choose the location of CentOS. e.g. c:\vm\centos\centos.vmdk. Select the size of VMDK file. At least 10G. I'd like to select 20 or more. Then a virtual machine of CentOS is created.
* Config the CentOS hardware setting on VirtulBox or VMWare.
**VirtualBox**
* On VirtualBox toolbar, there is a `Start` button. Click `Start`, then go the Storage item.
* Under the Storage Tree section, there is Empty CD icon. Click the Empty icon.
* Under the Attributes section, click the CD icon at the end of the dropdown list of CD/DVD Drive. Choose the Unbuntu iso file which you download from Unbuntu.org. Click `OK`.
* Leave all the other setting as default. Click the `Start`button on the toolbar.
**VMWare**
* On VMware, you can find CD/DVD button on the tab page of new virtual machine.
* Click the CD button at the end of the dropdown list of CD/DVD Drive. Choose the Unbuntu iso file which you download from Unbuntu.org. Click `OK`.
* Leave all the other setting as default. Click the `Power on this virtual machine` option on the tab page.
## Config CentOS default setting
After the CentOS starts, we can config the default setting of CentOS. Don't be panic, the configuration envrionment is very nice. You don't need to type any command line so far.
* Prepare the installation.
* Choose the language of CentOS. Then click Continue.
* On the Installation Summary screen, there is warning icon attached to the hard drive icon, which is under SYSTEM section with a lable "INSTALLATION DESTINATION". Click that label.
* You will see the partition is already done automatically. I'd like you to leave it as until you are familar with the CentOS. Then click Done button on the header.
* Leave the SOFTWARE SELECTION as Minimal Install; NETWORK & HOSTNAME as Not connected. Then click Begin Installation button.
* Setup account
* Setup password of root account. Please remember the password. If you forget it you need to reset the root's password. To do that you need to do a few things which depends on the CentOS version. As a beginner, please don't make it too complicated.
* Create your account. e.g. harryporter Mark your account as administrator to save some effort. Please remember your password and don't make it too complicated.'
* The progress of installation is complete. Click Finish configuration button.
w$w4 d. After a few seconds, you will the Reboot button. Then click it.
## Manage CentOS packages and software
* there are two management tools rpm and yum. To make it easy, we just talk about yum. It is a very handy tool. Comparing with the windows built-in program management tool, it is much powerful than that. It provides necessary functions for admin. If you need to maintain the Linux server, then you will use it in your daily task.
* Use `man yum` to take a look the description of yum. You do not need to understand all usage of yum. Just have an overview is enough.
* There are a few useful and common yum commands.
```bash
yum list installed | less
yum search <pacakge_name>
yum grouplist
```
### Install CentOS on PC or laptop
## Setup network ( via cable )
If you install a minimal version without network configuration, you will find you can not ping public domain. Here I am going to show you how to setup the connection.
* You can use `ip a` command to check the status of all network interface.
* You will see all the state of interfaces would be DOWN OR UNKNOWN.
```bash
## lo **** qdisc pfifo_fast noqueue UNKNOWN
## eth0 **** qdisc pfifo_fast state DOWN
```
* Use `## ifup eth0` to start the ech0 then you can access internet. It is a bit tedious to start the network service every time we reboot the system. Next step we will setup network service to start automatically after the system is up.
* There is a configuration file which can help you setup internet connection after the startup. Ususally the configuration is under this path `/etc/sysconfig/network-scripts`, and file name would `ifcfg-eth<*>`. So we check the real file name at first.
```bash
ls /etc/sysconfig/network-scripts/ifcfg-*
## Following are sample of files which will sit in your system.
/etc/sysconfig/network-scripts/ifcfg-eth0
/etc/sysconfig/network-scripts/ifcfg-lo
```
Now we need to update this file via nano or vi. If you have no any experience of vi, I recommended an interactive online tutorial for you. Just 20 mins, you can master basic vi usage.
http://www.openvim.com/tutorial.html
We use vi to open the config file.
```bash
## vi /etc/sysconfig/network-scripts/ifcfg-eth0
```
You will a setting below.
ONBOOT=no
You just need to update it to ONBOOT=yes, then save it and reboot CentOS to test the result.
```bash
## reboot
```
After you reboot and login CentOS, you can use ping to test if your system can access internet, then configuration is updated successful.
### Setup Wifi
* Setup Wifi during the installation. After the installation, you will find the Wifi is not available on Cent OS
* Mount the DVD or iso file
* use yum to install the NetworkManager-wifi package
```bash
yum --disablerepo=\* install /path/to/dvd/Packages/NetworkManager-wifi*
```
### Multiple boot system ( Fedora, CentOS, Redhat )
## Partition setup for multiple OS installation
> Fedora, CentOS and Redhat share the almost the same installation process.
> Click on the `Installation Destination` icon to change this to custom partitioning
> Under the Other Storage Options, choose I will configure partitioning then click Done
> Following is sample of partition setup of multiple boot system.
```ini
sda
+----sda1 nfts 500M Windows recovery
+----sda2 efi /boot 100M grub2 , Windows boot manager
+----sda3 / 10M
+----sda4 ntfs / 40000M Window 7/8/10
+----sda5 swap <Double size of your RAM size>
+----sda6 ext4 / 20000M Ubuntu 14 desktop
+----sda7 ext4 / 20000M Fedora 20 desktop
+----sda8 ext4 / 20000M CentOS 6 desktop
+----sda9 ext4 / 20000M OpenSuse desktop
+ ...
```
## Troubleshooting
### ifconfig not found in CentOS minimal server
Use command `ip`
```
ip addr
ip -s link
```
### Enable Network (Non-wifi) onboot after minimal installation
If you cannot ping any domain, use `dhclient -v` to check if the internet is available.
Setup the network enabled onboot
```
## cd /etc/sysconfig/network-scripts/
## sed -i -e 's@^ONBOOT="no@ONBOOT="yes@' ifcfg-e.xx.xxx
```
### Boot CentOS in terminal
```
## cat /etc/inittab
## systemctl get-default
graphic.target
## systemctl set-default multi-user.target
```
### Fedora boot error
* Please check the grub.cfg if you get booting error
* You can try following command to boot Fedora from Grub menu
```bash
linux /boot/vmluz-x.x.x-x.x.x
initrd /boot/intrd-plymouth.img
```
<file_sep>IP_LOG=ip.log
LOG=aws_sg.log
echo $SG_LIST
show_sg_ids() {
aws ec2 --profile $PROFILE describe-security-groups \
--output json \
--filters "Name=group-name,Values=*Bastion*" \
--query 'SecurityGroups[*].{Name:GroupName,ID:GroupId,permissions:IpPermissions[*]}' | jq
}
get_perm() {
PROFILE=$1
# echo $PROFILE | tee -a $LOG
if [[ $PROFILE == "uss" ]]; then
PERM='[{"IpProtocol":"tcp","FromPort":22,"ToPort":22,"IpRanges":[{"CidrIp":"IP_ADDRESS/32"}]}]'
elif [[ $PROFILE == "ad1" ]]; then
PERM='[{"IpProtocol":"tcp","FromPort":3389,"ToPort":3389,"IpRanges":[{"CidrIp":"IP_ADDRESS/32"}]}]'
fi
echo $PERM
}
get_desc() {
PROFILE=$1
if [[ $PROFILE == "uss" ]]; then
DESC='[{"IpProtocol":"tcp","FromPort":22,"ToPort":22,"IpRanges":[{"CidrIp":"IP_ADDRESS/32","Description":"Harry"}]}]'
elif [[ $PROFILE == "ad1" ]]; then
DESC='[{"IpProtocol":"tcp","FromPort":3389,"ToPort":3389,"IpRanges":[{"CidrIp":"IP_ADDRESS/32","Description":"Harry"}]}]'
fi
# echo $PROFILE | tee -a $LOG
echo $DESC
}
update_sg() {
PROFILE=$1
SGID=$2
OIP=$3
NIP=$4
echo $PROFILE $SG $OIP $NIP | tee -a $LOG
PERM=$(get_perm $PROFILE)
DESC=$(get_desc $PROFILE)
OLD_PERM=${PERM/"IP_ADDRESS"/$OIP}
NEW_PERM=${PERM/"IP_ADDRESS"/$NIP}
NEW_DESC=${DESC/"IP_ADDRESS"/$NIP}
echo $OLD_PERM | tee -a $LOG
echo $NEW_PERM | tee -a $LOG
echo $NEW_DESC | tee -a $LOG
aws ec2 --profile $PROFILE \
revoke-security-group-ingress \
--group-id $SGID --ip-permissions $OLD_PERM
aws ec2 --profile $PROFILE \
authorize-security-group-ingress \
--group-id $SGID \
--ip-permissions $NEW_PERM
aws ec2 --profile $PROFILE \
update-security-group-rule-descriptions-ingress \
--group-id $SGID --ip-permissions $NEW_DESC
aws ec2 --profile $PROFILE \
describe-security-groups \
--output json \
--group-ids $SGID | jq
}
update_ad1_sg() {
OIP=$1
NIP=$2
AD1_SG_LIST=(
sg-0ad548ef15af34a70 # AD1_Dev_Web_AZ2_SG
sg-0a6169737f7d9f004
sg-016fe912b6508922f
sg-357d1c4c
sg-afb1aac8
sg-dd1470a4
sg-e9abf48e
sg-2ea3fc49 # NSW PROXY
sg-015675cf1a33c7ac8 # NSW UAT
sg-0a16987f96e469902 # NSW1-PROD-BASTION-SG
)
echo " :::::::::::: PROFILE - ad1 :::::::::::: " | tee -a $LOG
for SG in "${AD1_SG_LIST[@]}"; do
update_sg ad1 $SG $OIP $NIP
done
show_ips ad1
}
update_uss_sg() {
OIP=$1
NIP=$2
USS_SG_LIST=(
sg-01964d6e858a0ca06
sg-021143c587a1cc824
sg-024d069fa41797225
sg-04e182d3cd00aab71
sg-30865449
sg-3736e74e
sg-878351fe
sg-028138035f54e1019
sg-04e182d3cd00aab71
sg-0fc6ba78da9c3db99
sg-d771cbae
sg-5805bf3f # FTP
)
echo " :::::::::::: PROFILE - uss :::::::::::: " | tee -a $LOG
for SG in "${USS_SG_LIST[@]}"; do
update_sg uss $SG $OIP $NIP
done
show_ips uss
}
show_ips() {
PROFILE=$1
aws ec2 --profile $PROFILE \
describe-security-groups \
--output json \
--query 'SecurityGroups[*].{Name:GroupName,ID:GroupId,permissions:IpPermissions[*]}' | grep -i "Harry" -C 2
}
OIPS=(
192.168.127.12
)
main() {
echo 'Start...' $(date) | tee -a $LOG
# echo $PROFILE | tee -a $LOG
for OLD_IP in ${OIPS[@]}; do
NEW_IP=$(curl ifconfig.me)
echo Old IP $OLD_IP | tee -a $LOG
echo New IP $NEW_IP | tee -a $LOG
# AD1 session
update_ad1_sg $OLD_IP $NEW_IP
## USS session
update_uss_sg $OLD_IP $NEW_IP
done
echo "DONE $(date) !!!!!!!!!! " | tee -a $LOG
}
main
# 172.16.31.10
# 172.16.58.3
# 172.16.58.3
# 172.16.58.3
# 172.16.58.3
# 172.16.31.10
# 14.203.89.47<file_sep>+++
title = "Inheritance & Polymorphism"
description = "Inheritance & Polymorphism"
weight=5
+++
## Inheritance & Polymorphism
* Specify single inheritance by putting a base class in parentheses after defining a class's name
* Subclasses have all of the methods of their base class
* It's often best to explicitly call a base class initializer from a subclass's initializer
* If a class with a single base class doesn't define an initializer, the base class's initializer will be called automatically on construction
* Python treats `__init__()` like any other method
* Base class `__init__()` is not called if overridden
* Use `super()` to call base class `__init__()`
* `isinstance()` takes an object as its first argument and a type as its second
* `isinstance()` determines if its first argument is an instance of the second argument, or any subclass of the second argument
* `isinstance()` can accept a tuple of types as its second argument, in which it returns True if the first argument is of any of those types
* Checking for specific types is rare in Python and is sometimes regarded as bad design
* `isinstance()` determines if its first argument is a direct or indirect subclass of, or the same type as, the second argument
* Multiple inheritance means having more than one direct base class
* You declare multiple base classes with a comma-separated list of class names in parentheses after a class's name in a class definition
* A class can have as many base classes as you want
* Python uses a well-defined "method resolution order" to resolve methods at runtime
* If a multiply-inheriting class defines no initializer, Python will automatically call the initializer of its first base class on construction
* `__bases__` is a tuple of types on a class object which defines the base classes for the class
* `__bases__` is in the same order as in the class definition
* `__bases__` is populated for both single and multiple inheritance
* Method resolution order defines the order in which Python will search an inheritance graph for methods
* MRO is short for Method Resolution Order
* MRO is stored as a tuple of types in the `__mro__` attribute of a class
* The `mro()` method on type objects returns the contents of `__mro__` as a list
* To resolve a method, Python uses the first entry in a class's MRO which has the requested method
* MRO is dependent on base class declaration order
* MRO is calculated by Python using the C3 algorithm
* MRO honors base-class ordering from class definitions
* MRO puts subclasses before base classes
* The relative order of classes in an MRO is consistent across all classes
* It is possible to specify an inconsistent base class ordering, in which case Python will raise a TypeError when the class definition is reached
* `super()` operates by using the elements in an MRO that come after some specified type
* `super()` returns a proxy object which forwards calls to the correct objects
* There are two distinct types of `super()` proxies, bound and unbound
* Unbound `super()` proxies are primarily used for implementing other Python features
* Bound proxies can be bound to either class objects or instances
* Calling `super()` with a base-class and derived-class argument returns a proxy bound to a class
* Calling `super()` with a class and an instance of that class returns a proxy bound to an instance
* A `super()` proxy takes the MRO of its second argument (or the type of its second argument), finds the first argument in that MRO, and uses everything after it in the MRO for method resolution
* Since class-bound proxies aren't bound to an instance, you can’t directly call instance methods that they resolve for you
* However, classmethods resolved by class-bound proxies can be called directly
* Python will raise a TypeError if the second argument is not a subclass or instance of the first argument
* Inappropriate use of `super()` can violate some design constraints * Calling `super()` with no arguments inside an instance method produces an instance-bound proxy
* Calling `super()` with no arguments inside a classmethod produces a class-bound proxy
* In both cases, the no-argument form of `super()` is the same as calling `super()` with the method's class as the first argument and the method's first argument as the second
* Since `super()` works on MROs and not just a class's base classes, class can be designed to cooperate without prior knowledge of one another
* The class object is at the core of Python's object model
* object is the ultimate base class for all other classes in Python
* If you don't specify a base class for a class, Python automatically uses object as the base
* Because object is in every class's inheritance graph, it shows up in every MRO.
* object provides hooks for Python's comparison operators
* object provides default `__repr__()` and `__str__()` implementations
* object implements the core attribute lookup and management functionality in Python
* Inheritance in Python is best used as a way to share implementation
### Explanation with example
#### Example code
* The code below demonstrates the weird `super()` in Python
```python
from pprint import pprint as pp
class Parent(object):
name = 'Parent'
@classmethod
def do_otherthing(self):
print('This is from Parent. {} do_otherthing'.format(self.name))
## print('This is from Parent. do_otherthing {}'.format(self.name))
def do_something(self):
print('This is from Parent. The name is {}'.format(self.name))
class Child(Parent):
name = 'Child'
@classmethod
def do_otherthing(self):
print('This is from Child. {} do_otherthing'.format(self.name))
## print('This is from Child. do_otherthing {}'.format(self.name))
def do_something(self):
print("This is from Child. The name is {} ".format(self.name))
class OtherChild(Parent):
name = 'OtherChild'
def do_something(self):
print("This is from OtherChild. The name is {} ".format(self.name))
class OtherOtherChild(Parent):
name = 'OtherOtherChild'
def do_something(self):
print("This is from OtherOtherChild. The name is {} ".format(self.name))
class GrandChild(Child, OtherChild, OtherOtherChild):
name = 'GrandChild'
@classmethod
def do_otherthing(self):
print('This is from GrandChild. {} do_otherthing'.format(self.name))
def do_something(self):
print("This is from GrandChild. The name is {}".format(self.name))
if __name__ == "__main__":
pp(Child.__mro__)
pp(GrandChild.__mro__)
c = Child()
c.do_something()
gc = GrandChild()
gc.do_something()
print('Class bound super()')
super(Child, Child).do_otherthing()
super(Child, GrandChild).do_otherthing()
super(GrandChild, GrandChild).do_otherthing()
print('Instance bound super()')
super(Child, gc).do_something()
super(OtherChild, gc).do_something()
super(GrandChild , gc).do_something()
### test result
#(<class '__main__.Child'>,
#<class '__main__.Parent'>,
#<class 'object'>)
#(<class '__main__.GrandChild'>,
## <class '__main__.Child'>,
## <class '__main__.OtherChild'>,
## <class '__main__.OtherOtherChild'>,
## <class '__main__.Parent'>,
## <class 'object'>)
## This is from Child. The name is Child
## This is from GrandChild. The name is GrandChild
## Class bound super()
## This is from Parent. Child do_otherthing
## This is from Parent. GrandChild do_otherthing
## This is from Child. GrandChild do_otherthing
## Instance bound super()
## This is from OtherChild. The name is GrandChild
## This is from OtherOtherChild. The name is GrandChild
## This is from Child. The name is GrandChild
## Multi-inheritance in Python is very different from
## other OO language like C++, C#, Java
## Above result make me surprised a the first time,
## but after I check the MRO then I can understand
## why many veterans suggest to avoid multi-inheritance.
## Its `super` "Magic" really confuses many people.
```
#### Explanation with break down
* Let's break down the how the super works with sample code
##### Case 1: Parent and child
* Case 1:
* code : `super(Child, Child).do_otherthing()`
* `super` takes the MRO of its second argument `Child`
```
<class '__main__.Child'>,
<class '__main__.Parent'>,
<class 'object'>
```
* `super` finds the first argument `Child` in that MRO, and uses everything after it in the MRO for method resolution
* `super` uses the method from `Parent` which is after `Child`, and bind the `Child` class object
##### Case 2: Grandparent and grandchild
* Case 2:
* code : `super(GrandChild, GrandChild).do_otherthing()`
* `super` takes the MRO of its second argument `GrandChild`
```
<class '__main__.GrandChild'>,
<class '__main__.Child'>,
<class '__main__.OtherChild'>,
<class '__main__.OtherOtherChild'>,
<class '__main__.Parent'>,
<class 'object'>
```
* `super` finds the first argument `GrandChild` in that MRO, and uses everything after it in the MRO for method resolution
* `super` uses the method from `Child` which is after `GrandChild`, and bind the `GrandChild` class object
##### Case 3: Grandparent, children & grandchild
* Case 3:
* code : `super(OtherChild, gc).do_something()`
* `super` takes the MRO of the type `GrandChild` of its second argument `gc`
```
<class '__main__.GrandChild'>,
<class '__main__.Child'>,
<class '__main__.OtherChild'>,
<class '__main__.OtherOtherChild'>,
<class '__main__.Parent'>,
<class 'object'>
```
* `super` finds the first argument `OtherChild` in that MRO, and uses everything after it in the MRO for method resolution
* `super` uses the method from `OtherOtherChild` which is after `OtherChild`, and bind the `gc` class object
<file_sep>+++
date = "2017-10-11T11:59:31+11:00"
title = "Try Minikube"
description="Test Minikube on virtual machine"
+++
## Prerequisites
* Install KVM or VirtualBox
* Install Ubuntu / Debian on KVM or VirtualBox
* Install Minikube
## Check K8s version and config
kubectl version
kubectl config
kubectl cluster-info
## Get / Describe command
kubectl get ndoes
kubectl get pods
kubectl get deployments
kubectl get services
kubectl describe pods
## Deploy hello-world node demo app
* Deploy a demo app
```bash
kubectl run kubernetes-bootcamp --image=gcr.io/google-samples/kubernetes-bootcamp:v1 --port=8080
# View deployments and pods
kubectl get deployments
kubectl get pods
```
* Pods that are running inside Kubernetes are running on a private, isolated network. By default they are visible from other pods and services within the same kubernetes cluster, but not outside that network.
* The kubectl command can create a proxy that will forward communications into the cluster-wide, private network.
```bash
# Create a proxy from another terminal
kubectl proxy
# Test it from original termianl
curl http://localhost:8001/version
```
* Get pod name
```bash
export POD_NAME=$(kubectl get pods -o go-template --template \
'{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}')
echo Name of the Pod: $POD_NAME
kubectl logs $POD_NAME
```
* Execute command on containter
```bash
kubectl exec $POD_NAME env
kubectl exec $POD_NAME bash
kubectl exec $POD_NAME curl localhost:8080
```
* Scale demo app with replica
```bash
# kubernetes-bootcamp deployment is the same as above
kubectl get pods -o wide
kubectl describe deployments/kubernetes-bootcamp
kubectl scale deployments/kubernetes-bootcamp --replicas=2
# Get NodePort
export NODE_PORT=$(kubectl get services/kubernetes-bootcamp \
-o go-template='{{(index .spec.ports 0).nodePort}}')
echo NODE_PORT=$NODE_PORT
# Test load balance
curl $(minikube ip):$NODE_PORT
```
<file_sep>+++
date = "2016-08-11T11:59:31+11:00"
title = "Git Practices"
description="Useful Git commands & practices for repository management"
+++
## Push existing repository to remote
* Update the remote git url
$ git remote set-url origin [your_new_repo_url]
* Push it remote branch master/main
$ git push -uf origin [master/main]
## Create a new branch with git
### Create the branch locally
* Create the branch on your local machine and switch in this branch
$ git checkout -b [name_of_your_new_branch]
### Push the branch
* Push the branch on git-repository (Github, Bitbucket)
$ git push origin [name_of_your_new_branch]
When you want to commit something in your branch, be sure to be in your branch.
## Manage branches (Push, Fetch & Merge)
List all branches
$ git branch
Add a new remote for your branch :
$ git remote add [name_of_your_remote]
Push changes from your commit into your branch :
$ git push [name_of_your_new_remote] [name_of_your_branch]
Update your branch when the original branch from official repository has been updated :
$ git fetch [name_of_your_remote]
Then you need to apply to merge changes, if your branch is derivated from develop you need to do :
$ git merge [name_of_your_remote]/develop
## Delete branch
Delete a branch on your local filesystem :
$ git branch -d [name_of_your_new_branch]
To force the deletion of local branch on your filesystem :
$ git branch -D [name_of_your_new_branch]
Delete the branch on github :
$ git push origin :[name_of_your_new_branch]
Compare two branch:
$ git diff [name_of_branch1]..[name_of_branch2]
---
## Branch merge
### Fast-Forward Merge
Our first example demonstrates a fast-forward merge. The code below creates a new branch, adds two commits to it, then integrates it into the main line with a fast-forward merge.
```bash
## Start a new feature
$ git checkout -b new-feature master
## Edit some files
$ git add <file>
$ git commit -m "Start a feature"
## Edit some files
$ git add <file>
$ git commit -m "Finish a feature"
## Merge in the new-feature branch
$ git checkout master
$ git merge new-feature
$ git branch -d new-feature
```
This is a common workflow for short-lived topic branches that are used more as an isolated development than an organizational tool for longer-running features.
Also note that Git should not complain about the git branch -d, since new-feature is now accessible from the master branch.
### 3-Way Merge
The next example is very similar, but requires a 3-way merge because master progresses while the feature is in-progress. This is a common scenario for large features or when several developers are working on a project simultaneously.
```bash
## Start a new feature
$ git checkout -b new-feature master
## Edit some files
$ git add <file>
$ git commit -m "Start a feature"
## Edit some files
$ git add <file>
$ git commit -m "Finish a feature"
## Develop the master branch
$ git checkout master
## Edit some files
$ git add <file>
$ git commit -m "Make some super-stable changes to master"
## Merge in the new-feature branch
$ git merge new-feature
$ git branch -d new-feature
```
### Rebase for merge
Problem: You are working with a few experienced devs constantly improving an online shooping site. After you complete the first assignment and ready to commit to master you find someone merged a change that affects or overlaps with the ones you made, and it could lead to bugs in the online-shoppping website.
Solution: Situations like these are a big example of when you'd want to rebase. Let's say when you created your branch off of the master branch, the master branch was on commit No. 1. Every commit in your branch was put on top of commit #1. When you're ready to merge your branch to master, you find other developers have some changes and the most recent commit is commit No. 4. **Rebasing is taking all your branch's commits and adding them on top of commit No. 4 instead of commit No. 1.** If you consider commit No. 1 as the "base" of your branch, you're changing that base to the most recent one, commit No. 4. Hence why it's called rebasing!
#### Rebase with conflict
```bash
git rebase master
# When there is conflict, the rebase will pause.
# You have to manually solve the conflict
# Add the resolved files to stage and commit it
git add <Resolved-File>
git commit
# Conttinue the rebase process
git rebase --continue
```
#### Rebase interactively
Rebase to master branch
$ git rebase -i master
Rebase with fixup and squash
* squash (s for short), which melds the commit into the previous one (the one in the line before)
* fixup (f for short), which acts like “squash”, but discards this commit’s message
```bash
# Commit your changes
$ git add.
$ git commit
$ git rebase -i master
# Update the prop up editor
# fixup <COMMIT-ID>
# squahs <COMMIT-ID> <Message>
```
Rebase with autosquash
$ git rebase -i --autosquash master
## Git reword
Reword the last commit message. The command below will open an editor to let you change previous commit message
$ git commit --amend
Reword the last commit message and author.
$ git commit --amend --author="Other author <<EMAIL>>"
## Git Submodule
Add other repository into your existing project as submodule
$ git submodule add <Git-Repository-URL> <your-repo-folder>
Add other repository into your existing project as submodule under specific location
$ git submodule add <Git-Repository-URL> <your-specific-location>/<your-repo-folder>
Add other repository into your existing project for specific branch
$ git submodule add <Git-Repository-URL> -b <branch-name> <your-repo-folder>
Initialize and update submoule
$ git submodule init
$ git submodule update
## Git tag
Listing the existing tags
$ git tag -l
Creating Tags
Git supports two types of tags: lightweight and annotated. IMO, I always prefer the annotated tag, because the tag is supposed to mark milestone in the repository's history.
Annotated Tags
$ git tag -a v1.1 -m "release version 1.1"
Lightweight tags
$ git tag -a v1.1-lw
Push tags
$ git push origin v1.1
Delete a tag (local)
$ git tag -D v1.1
Delete a tag (remote)
$ git push --delete origin v1.1
<file_sep>+++
date = "2016-03-04T14:59:31+11:00"
title = "KVM Notes"
draft = true
+++
Installl Virtual Machine
```
sudo virt-install \
--virt-type=kvm \
--name centos7 \
--ram 2048 \
--vcpus=2 \
--os-variant=centos7.0 \
--virt-type=kvm \
--hvm \
--cdrom=/var/lib/libvirt/boot/CentOS-7-x86_64-Minimal-1804.iso \
--network=bridge=virbr0,model=virtio \
--network=bridge=br0,model=virtio \
--graphics vnc \
--disk path=/var/lib/libvirt/images/centos7.qcow2,size=40,bus=virtio,format=qcow2
```
/var/lib/libvirt/boot/CentOS-7-x86_64-Minimal-1804.iso<file_sep>+++
title = "Azure: RBAC - 2"
weight = 1
description="Difference - Classic subscription, Azure Roles & Azure AD Roles"
+++
## Roles
- Classic subscription administrator roles
- Azure roles
- Azure Active Directory (Azure AD) roles
### History
> When Azure was initially released, access to resources was managed with just three administrator roles: Account Administrator, Service Administrator, and Co-Administrator. Later, Azure role-based access control (Azure RBAC) was added. Azure RBAC is a newer authorization system that provides fine-grained access management to Azure resources. Azure RBAC includes many built-in roles, can be assigned at different scopes, and allows you to create your own custom roles. To manage resources in Azure AD, such as users, groups, and domains, there are several Azure AD roles. To manage resources in Azure AD, such as users, groups, and domains, there are several Azure AD roles.
### High-level view
{{<mermaid>}}
graph TB
Root(Global Admin,User Access Admin-elevated access)
AADT(Azure Active Directtory Tenent)
RMG(Root Management Group)
MG(Management Group)
Sub(Subscriptions, Azure Account, Account Admin)
RG(Resource Group)
R(Resource)
AADT --> Root
Root --> RMG
GAAA([Global_Admin, Application_Admin])
OCRU([Owner,Contributor, Reader,User_Acess_Admin])
subgraph Azure_AD_Roles
GAAA
AADT
end
Root;
subgraph Azure_Roles
OCRU
RMG --> MG
MG --> Sub
subgraph Classic_Subscription_Admin_Roles
Sub --> RG
RG --> R
end
end
{{</mermaid >}}
### Classic subscription administrator roles
Account Administrator, Service Administrator, and Co-Administrator are the three classic subscription administrator roles in Azure. Classic subscription administrators have full access to the Azure subscription.
Classic subscription administrators have full access to the Azure subscription.
#### Account Administrator
* per Azure account
* Manage billing in the Azure portal
* Manage all subscriptions in an account
* Create new subscriptions
* Cancel subscriptions
* Change the billing for a subscription
* Change the Service Administrator
* Conceptually, the billing owner of the subscription.
* The Account Administrator has no access to the Azure portal.
#### Service Administrator
* 1 per Azure subscription
* Manage services in the Azure portal
* Cancel the subscription
* Assign users to the Co-Administrator role
* By default, for a new subscription, the Account Administrator is also the Service Administrator.
* The Service Administrator has the equivalent access of a user who is assigned the Owner role at the subscription scope.
* The Service Administrator has full access to the Azure portal.
#### Co-Administrator
* 200 per subscription
* Same access privileges as the Service Administrator, but can’t change the association of subscriptions to Azure directories
* Assign users to the Co-Administrator role, but cannot change the Service Administrator
* The Co-Administrator has the equivalent access of a user who is assigned the Owner role at the subscription scope.
### Azure account and Azure subscriptions
An Azure account represents a billing relationship. An Azure account is a user identity, one or more Azure subscriptions, and an associated set of Azure resources. The person who creates the account is the Account Administrator for all subscriptions created in that account. That person is also the default Service Administrator for the subscription.
Azure subscriptions help you organize access to Azure resources. They also help you control how resource usage is reported, billed, and paid for.
Each subscription can have a different billing and payment setup, so you can have different subscriptions and different plans by office, department, project, and so on.
### Azure roles
Azure RBAC is an authorization system built on Azure Resource Manager that provides fine-grained access management to Azure resources.
#### Owner
* Full access to all resources
* Delegate access to others
* The Service Administrator and Co-Administrators are assigned the Owner role at the subscription scope
* Applies to all resource types.
#### Contributor
* Create and manage all of types of Azure resources
* Create a new tenant in Azure Active Directory
* Cannot grant access to others
* Applies to all resource types.
#### Reader
* View Azure resources
* Applies to all resource types.
#### User Access Administrator
* Manage user access to Azure resources
### Azure AD roles
Azure AD roles are used to manage Azure AD resources in a directory such as create or edit users, assign administrative roles to others, reset user passwords, manage user licenses, and manage domains.
#### Global Administrator
* Manage access to all administrative features in Azure Active Directory, as well as services that federate to Azure Active Directory
* Assign administrator roles to others
* Reset the password for any user and all other administrators
* The person who signs up for the Azure Active Directory tenant becomes a Global Administrator.
#### User Administrator
* Create and manage all aspects of users and groups
* Manage support tickets
* Monitor service health
* Change passwords for users, Helpdesk administrators, and other User Administrators
#### Billing Administrator
* Make purchases
* Manage subscriptions
* Manage support tickets
* Monitors service health
### Azure roles VS Azure AD roles
Azure roles | Azure AD roles
-----|------
Manage access to Azure resources | Manage access to Azure Active Directory resources
Supports custom roles |Supports custom roles
Scope can be specified at multiple levels (management group, subscription, resource group, resource) | Scope is at the tenant level
Role information can be accessed in Azure portal, Azure CLI, Azure PowerShell, Azure Resource Manager templates, REST API | Role information can be accessed in Azure admin portal, Microsoft 365 admin center, Microsoft Graph, AzureAD PowerShell
### Overlap
By default, Azure roles and Azure AD roles do not span Azure and Azure AD. However, if a Global Administrator elevates their access by choosing the Access management for Azure resources switch in the Azure portal, the Global Administrator will be granted the User Access Administrator role (an Azure role) on all subscriptions for a particular tenant.
<file_sep>+++
date = "2017-08-04T14:59:31+11:00"
title = "Monero Blockchain notes"
draft = true
+++
### Prerequisites
* You have a VPS or Dedicated Server.
* You have some basic concept of blockchain, cryptocurrency.
* You are happy to play round with infrastructures.
### Windows
## Infrasture
* Machine : VPS
* OS : Windows 2012 R2 64bit
* CPU:
* Memory:
* Storage:
## Install Msys2
* Update package
```bash
pacman -Syuu
```
* Install dependencies:
```bash
pacman -S mingw-w64-x86_64-toolchain make mingw-w64-x86_64-cmake mingw-w64-x86_64-boost \
mingw-w64-x86_64-openssl mingw-w64-x86_64-zeromq mingw-w64-x86_64-libsodium
```
* Build the
### Linux <file_sep>+++
date = "2016-07-10T14:59:31+11:00"
title = "Build mobile app with web tech"
description="JavaScript, CSS, HTML are not just web tech stacks, but also available for Mobile "
+++
## What is mobile app
> *A mobile application, basically, is a computer generated program designed and developed to run on iPhone, Android Smartphone, and many other mobile devices. In a nutshell, there are three types of apps*
> *__Native apps__ are specific to a given mobile platform (iOS or Android) using the development tools and language that the respective platform. Usaully it looks and performs the best.*
> *__HTML5 apps__ use standard web technologies—typically HTML5, JavaScript and CSS. This write-once-run-anywhere approach to mobile development creates cross-platform mobile applications that work on multiple devices.*
> *__Hybrid apps__ make it possible to embed HTML5 apps inside a thin native container, combining the good parts of __Native app__ and __HTML5 app__ elements.*
## Mobile app development
According above breif history we can image the mobile developer community has become asfragmented as the market. Mobile software developers work with different programming environments, different tools, and different programming languages.
After a few years of improvement, we can see some __Hybrid app__ based framework becomes more and more popular and shining. `ionic`, `nativescript` and `react native` are most promising frameworks which we should really look into.
## Introduction of ionic developement
### Prerequisites
* Here we just introduce ionic 1.x. When I started investigating the `ionic`, the `ionic 2` just came out for a while. `ionic 2` study is on my todo list.
* You are familiar with web technologies, such as, HTML5, CSS, JavaScript, and you should have experience of nodejs and relevant skills.
### Getting started
* install ionic 1.x
* setup ionic and create new project ```demoApp```
```bash
ionic start demoApp slidemenu
cd demoApp
ionic platform add android
ionic build android
ionic emulate android
```
### Install packages
* Use `npm install` to install packages
* Folllowing is the `package.json`. You can tailor it on your own.
```json
{
"name": "ionic-project",
"version": "1.0.0",
"description": "An Ionic project",
"dependencies": {
"gulp": "^3.5.6",
"gulp-sass": "^2.0.4",
"gulp-concat": "^2.2.0",
"gulp-minify-css": "^0.3.0",
"gulp-rename": "^1.2.0"
},
"devDependencies": {
"bower": "^1.3.3",
"gulp-util": "^2.2.14",
"shelljs": "^0.3.0"
},
"cordovaPlugins": [
"cordova-plugin-device",
"cordova-plugin-console",
"cordova-plugin-whitelist",
"cordova-plugin-splashscreen",
"cordova-plugin-statusbar",
"ionic-plugin-keyboard"
],
"cordovaPlatforms": []
}
```
### Debug
* Browser is the best option for ionic mobile development debug tools
* Use Telerik AppBuilder to debug
### Test on emulator or device
* Android
* Download and install [Android SDK](https://developer.android.com/studio/index.html#downloads)
* Download install at least one sdk platform.
* ionic only support Android 4.1.x or later, so you are better to install any sdk platform version 18+.
* install x86 or x86_64 image for windows environment
* install Extra plugins: Google USB driver, X86 Emulator Accelerator
* Create AVD for your mobile app testing
* iPhone
* Install AppBuilder on Visual Studio
* Install Genymotion
### Troubleshooting
* Android emulator accelerator error due to version is too low to support the system image
* uninstall old version intel HAXM
* install new version manually from `<Android_SDK_Location>\extras\intel\Hardware_Accelerated_Execution_Manager`
* If app is not working on emulator, check cordova plugins or manually install cordova plugins
```bash
ionic plugins list
ionic plugins add XXXXX
```
<file_sep>#!/bin/bash
echo "commit blog content to github"
git config user.name "<NAME>"
git config user.email "<EMAIL>"
# Add changes under folder content to git.
git add content/*
git add static/img/*
# Commit changes.
msg="commit blog content `date`"
# if NOT "%1"=="" set msg=%1
if [ $# -eq 1 ]
then msg=$1
fi
git commit -m "$msg"
# Push source and build repos.
git push origin master<file_sep>+++
title="AWS - Cert Exams"
description="AWS Cert Exam Tips"
weight=9
draft=true
+++
## Solution Architect Associate
### Domains
### Tips
* If an instance in VPC is unable to communicate over certain protocol / port with another instance in the same VPC, then the problem is the security setting:
- Security group / NACL of the instance and / or
- Security Group / NACL of the destination instance
- The problelm will never be routing table configuration, due to the default route entry
- NACL are stateless. To allow certain traffic through it. Inbound and outbound rules both must be allowed.
* Default VPC & Default Security Group
- Have an inbound rule that allows all traffic sourced from the security group itself
- Have all the outbound traffic allowed by default
### Scenario 1
- EC2-A and EC2-B both are in the same VPC
- EC2-A is protected by NACL-A and SecGrp-A
- EC2-B is protected by NACL-B and SecGrp-B
- EC2-A can ICMP ping EC2-B
- EC2-B can ICMP ping EC2-A
__If EC2-A can ping EC2-B, but EC2-B can NOT ping EC2-A, which component will cause this problem?__
- SecGrp-B outbound
- NACL-B outbound
- NACL-A inbound
- SecGrp-A inbound
## Solution Architect Professional
## Developer Associate
## Developer Professional
<file_sep>+++
date = "2017-01-11T11:59:31+11:00"
title = "JIRA Practices"
description="JIRA is a proprietary issue tracking product, developed by Atlassian. Today I recap the bullet points about how I work with client within JIRA "
draft="true"
+++
## What is JIRA
> Jira is a proprietary issue tracking product, developed by Atlassian. It provides bug tracking, issue tracking, and project management functions. Although normally styled JIRA, the product name is not an acronym, but a truncation of Gojira, the Japanese name for Godzilla,[6] itself a reference to Jira's main competitor, Bugzilla. It has been developed since 2002.
## How to cooperate with client via JIRA
> The instruction below is only for the client who has independent JIRA instance. Here I am not going to discuss how to cooperate with multiple clients and projects on the same JIRA instance. IMO, the strategy for that would be case by case.
### Prerequisites
* Your client's business name is __ABC__. You help them to manage their JIRA. You are the administrator of JIRA instance.
* You have created a project for client, named **ABC IT Project**
* You want to allow the client to access this project on JIRA, but you want to avoid client's unintented update to mess up the project management.
* You want to cooperate with other project team which build some product communicating with your system
* You want to integrate with CI / CD tools
### Requirement of permission control
* Product owner, ScrumMaster, and development team should be able to track the development progress and be aware of criticle issues, etc.
* Business owners, executives, and managers as internal stakeholders should see the progress firsthand so that they can suggest course corrections.
* Sales, marketing, support, legal, compliance, and other Scrum and non-Scrum development teams might want to attend sprint reviews to provide area-specific feedback, etc.
* Customers, users, partners, and regulators as External stakeholders can provide valuable feedback to the Scrum team
### Change default permission scheme
* Default permission scheme is the built-in scheme, which you cannot delete it, but you can change it.
* Choose `Isssues` > `Permission schemes `
* Change all permissions from `Project Access - Any logged in user` to `Project role - Administrator` except the following items.
* View Development Tools
* View Read-Only Workflow
* Assignable User
* Delete Own Comments
* Edit Own Comments
* Delete Own Attachments
* Delete Own Worklogs
* Edit Own Worklogs
* Work On Issues
* Copy the default permission scheme and rename it to `<client_business_name> Permission Scheme`. In this case it would be `ABC Permission Scheme`.
* Create a new group named **ABC Group**,
* Update `ABC Permission Scheme` by granting `ABC Group` to some items.
* Browse Projects
* Create Issues
* Update the permission scheme of the project `ABC IT Project` from to `ABC Permission Scheme`.
### Change user's group
* Choose `User management` > `User`
* Add the ABC's user to group `ABC Group`
* Remove the user from default `jira-software-user` and make sure the checkbox `Access JIRA Software` is selected.
### Time Tracking and Estimation
#### Enable Time Tracking
* Choose `Issues` > `Time Tracking`
* Enable the `Time Tracking` if it is inactive.
* Choose `Boards` > `View All Boards`
* Choose `Board Settings` of the board
* Choose `Estimation`
* Choose `Remaining Estimate and Time Spent` as Time Tracking
<file_sep>+++
title = "OS"
weight = 5
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "IO, Json & XML"
description="Golang Introduction: IO, JSON, XML , Gob, Crypto "
weight=4
+++
### IO - Read & Write
#### Read from user input
```go
var (
firstName, lastName, s string
i int
f float32
input = "56.12 / 5212 / Go"
format = "%f / %d / %s"
)
func main() {
fmt.Println("Please enter your full name: ")
fmt.Scanln(&firstName, &lastName)
// fmt.Scanf("%s %s", &firstName, &lastName)
fmt.Printf("Hi %s %s!\n", firstName, lastName) // Hi <NAME>
fmt.Sscanf(input, format, &f, &i, &s)
fmt.Println("From the string we read: ", f, i, s)
// ouwtput: From the string we read: 56.12 5212 Go
}
```
#### Read command-line argument
```go
func main() {
who := "Harry "
if len(os.Args) > 1 {
who += strings.Join(os.Args[1:], " ")
}
fmt.Println("Good Morning", who)
}
```
#### Read from file
* Read by lines
```go
func main() {
inputFile, inputError := os.Open("hello.go")
if inputError != nil {
fmt.Printf("An error occurred on opening the inputfile\n" +
"Does the file exist?\n" +
"Have you got acces to it?\n")
return // exit the function on error
}
defer inputFile.Close() // Close file before exits the main func
inputReader := bufio.NewReader(inputFile)
for {
inputString, readerError := inputReader.ReadString('\n')
if readerError == io.EOF {
return
}
fmt.Printf("The input was: %s", inputString)
}
}
```
***NOTE: The End-of-line characters of text-files in Unix end on \n, but in Windows this is \r\n. By using the method ReadString or ReadBytes with \n as a delimiter you don’t have to worry about this.***
* Read entire file into a string
```go
func main() {
inputFile := "hello.go"
outputFile := "hello.go.txt"
buf, err := ioutil.ReadFile(inputFile)
if err != nil {
fmt.Fprintf(os.Stderr, "File Error: %s\n", err)
// panic(err.Error())
}
fmt.Printf("%s\n", string(buf))
err = ioutil.WriteFile(outputFile, buf, 0x644)
if err != nil {
panic(err.Error())
}
}
```
* Read into buffer
```go
for {
n, err := inputReader.Read(buf)
if (n == 0) { break}
}
```
* Read columns of data, such as csv
```go
func main() {
file, err := os.Open("csv_data.txt")
if err != nil {
panic(err)
}
defer file.Close()
var col1, col2, col3 []string
for {
var v1, v2, v3 string
_, err := fmt.Fscanln(file, &v1, &v2, &v3)
// scans until newline
if err != nil {
break
}
col1 = append(col1, v1)
col2 = append(col2, v2)
col3 = append(col3, v3)
}
fmt.Println(" Col 1 ",col1)
fmt.Println(" Col 3 ",col2)
fmt.Println(" Col 3 ",col3)
}
```
* Read from compressed file
```go
func main() {
fName := "public.zip"
var r *bufio.Reader
fi, err := os.Open(fName)
if err != nil {
fmt.Fprintf(os.Stderr, "%v, Can’t open %s: error: %s\n", os.Args[0],
fName, err)
os.Exit(1)
}
fz, err := gzip.NewReader(fi)
if err != nil {
r = bufio.NewReader(fi)
} else {
r = bufio.NewReader(fz)
}
for {
line, err := r.ReadString('\n')
if err != nil {
fmt.Println("Done reading file")
os.Exit(0)
}
fmt.Println(line)
}
}
```
#### Read with flag package
* The package flag has an extended functionality for parsing of command-line options.
* flag.PrintDefaults() prints out the usage information of the defined flag(s)
```go
var NewLine = flag.Bool("n", false, "print on newline")
// echo -n flag, of type *bool
const (
Space = " "
Newline = "\n"
)
func main() {
flag.PrintDefaults()
flag.Parse()
var s string = ""
for i := 0; i < flag.NArg(); i++ {
if i > 0 {
s += Space
}
s += flag.Arg(i)
}
if *NewLine { // -n is parsed, flag becomes true
s += Newline
}
os.Stdout.WriteString(s)
}
// :: Output
// -------------------------
// command: go run program.go -n abc
// out :
// -n print on newline
// abc
//----------------------------
// command: go run program.go -a abc
// flag provided but not defined: -a
// Usage of /tmp/go-build701354435/b001/exe/program:
// -n print on newline
// exit status 2
```
#### Read with flag parsing & buffer
* Assume there is a file named test.txt with some content
* Run Command: go build program.go && ./program test
```go
func cat(r *bufio.Reader) {
for {
buf, err := r.ReadBytes('\n')
if err == io.EOF {
break
}
fmt.Fprintf(os.Stdout, "%s", buf)
}
return
}
func main() {
flag.Parse()
if flag.NArg() == 0 {
cat(bufio.NewReader(os.Stdin))
}
for i := 0; i < flag.NArg(); i++ {
f, err := os.Open(flag.Arg(i))
if err != nil {
fmt.Fprintf(os.Stderr, "%s:error reading from %s: %s\n",
os.Args[0], flag.Arg(i), err.Error())
continue
}
cat(bufio.NewReader(f))
}
}
```
#### Read with flag parsing & slice
* Only different from previous sample is the cat function
```go
func cat(f *os.File) {
const NBUF = 512
var buf [NBUF]byte
for {
switch nr, err := f.Read(buf[:]); true {
case nr < 0:
fmt.Fprintf(os.Stderr, "cat: error reading: %s\n", err.Error())
os.Exit(1)
case nr == 0: // EOF
return
case nr > 0:
if nw, ew := os.Stdout.Write(buf[0:nr]); nw != nr {
fmt.Fprintf(os.Stderr, "cat: error writing: %s\n",
ew)
}
}
}
}
func main() {
flag.Parse()
if flag.NArg() == 0 {
cat(os.Stdin)
}
for i := 0; i < flag.NArg(); i++ {
f, err := os.Open(flag.Arg(i))
if err != nil {
fmt.Fprintf(os.Stderr, "%s:error reading from %s: %s\n",
os.Args[0], flag.Arg(i), err.Error())
os.Exit(1)
}
cat(f)
f.Close()
}
}
```
#### Write to a file
* Flags for open output file
* os.O_RDONLY: the read flag for read-only access
* os.WRONLY: the write flag for write-only access
* os.O_CREATE : the create flag: create the file if it doesn’t exist
* os.O_TRUNC : the truncate flag: truncate to size 0 if the file already exists
* Write with buffer
```go
func main() {
outputFile, outputError := os.OpenFile("output.txt",
os.O_WRONLY|os.O_CREATE, 0666)
if outputError != nil {
fmt.Printf("An error occurred with file creation\n")
return
}
defer outputFile.Close()
outputWriter := bufio.NewWriter(outputFile)
outputString := "hello world!\n"
for i := 0; i < 10; i++ {
outputWriter.WriteString(outputString)
}
outputWriter.Flush()
}
```
* Write a file without buffer
* Write the same content as previous sample
```go
func main() {
os.Stdout.WriteString("hello, world\n")
f, _ := os.OpenFile("output.txt", os.O_CREATE|os.O_WRONLY, 0)
defer f.Close()
for i := 0; i < 10; i++ {
f.WriteString("hello world!\n")
}
}
```
#### Write to standard output: interface
* The interface is fmt.Fprintf
* The fmt.Fprintf writes to a variable of type io.Writer.
* Any type that has a Write method, including os.Stdout, files (like os.File), pipes, network connections, channels, etc..., and also to write buffers from the bufio package.
```go
func main() {
// unbuffered: os.Stdout implements io.Writer
fmt.Fprintf(os.Stdout, "%s\n", "hello world! - unbuffered")
// buffered:
buf := bufio.NewWriter(os.Stdout)
// and now so does buf:
fmt.Fprintf(buf, "%s\n", "hello world! - buffered")
buf.Flush()
}
```
#### Copy
* Simply use io.Copy method
```go
func main() {
CopyFile("target_hello.txt", "hello.go")
fmt.Println("Copy done!")
}
func CopyFile(dstName, srcName string) (written int64, err error) {
src, err := os.Open(srcName)
if err != nil {
return
}
defer src.Close()
dst, err := os.OpenFile(dstName, os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
return
}
defer dst.Close()
return io.Copy(dst, src)
}
```
### JSON
* Mapping of Go type and Js type
* Go: bool -> Js: boolean
* Go: float64 -> Js: numbers
* Go: string -> Js: strings
* Go: nil -> Js: null
```go
type Address struct {
Type string
City string
Country string
}
type VCard struct {
FirstName string
LastName string
Addresses []*Address
Remark string
}
func main() {
pa := &Address{"private", "Aartselaar", "Belgium"}
wa := &Address{"work", "Boom", "Belgium"}
vc := VCard{"Jan", "Kersschot", []*Address{pa, wa}, "none"}
// fmt.Printf("%v: \n", vc)
// {<NAME> [0x126d2b80 0x126d2be0] none}:
// JSON format:
js, _ := json.Marshal(vc)
fmt.Printf("JSON format: %s", js)
// using an encoder:
file, _ := os.OpenFile("vcard.json", os.O_CREATE|os.O_WRONLY, 0)
defer file.Close()
enc := json.NewEncoder(file)
err := enc.Encode(vc)
if err != nil {
fmt.Println("Error in encoding json")
}
}
// cat vcard.json | jq # jq is a tool for json file
// ----------------------------------------------------
// {
// "FirstName": "Jan",
// "LastName": "Kersschot",
// "Addresses": [
// {
// "Type": "private",
// "City": "Aartselaar",
// "Country": "Belgium"
// },
// {
// "Type": "work",
// "City": "Boom",
// "Country": "Belgium"
// }
// ],
// "Remark": "none"
// }
```
### XML
* Sample xml
```xml
<Person>
<FirstName>Laura</FirstName>
<LastName>Lynn</LastName>
</Person>
```
* The encoding/xml package also implements a simle xml parser (SAX) to read XML-data and parse it into its constituents.
```go
var t, token xml.Token
var err error
func main() {
input :=
"<Person><FirstName>Laura</FirstName><LastName>Lynn</LastName></Person>"
inputReader := strings.NewReader(input)
p := xml.NewDecoder(inputReader)
for t, err = p.Token(); err == nil; t, err = p.Token() {
switch token := t.(type) {
case xml.StartElement:
name := token.Name.Local
fmt.Printf("Token name: %s\n", name)
for _, attr := range token.Attr {
attrName := attr.Name.Local
attrValue := attr.Value
fmt.Printf("An attribute is: %s %s\n", attrName,
attrValue)
// ...
}
case xml.EndElement:
fmt.Println("End of token")
case xml.CharData:
content := string([]byte(token))
fmt.Printf("This is the content: %v\n", content)
// ...
default:
// ...
}
}
}
//----------------------
// Token name: Person
// Token name: FirstName
// This is the content: Laura
// End of token
// Token name: LastName
// This is the content: Lynn
// End of token
// End of token
```
### Gob - Go binary format
* Simulate network communication
```go
type P struct {
X, Y, Z int
Name string
}
type Q struct {
X, Y *int32
Name string
}
func main() {
// Initialize the encoder and decoder. Normally enc and dec would
// be bound to network connections and the encoder and decoder
// would run in different processes.
var network bytes.Buffer
// Stand-in for a network connection
enc := gob.NewEncoder(&network) // Will write to network.
dec := gob.NewDecoder(&network)
// Will read from network.
// Encode (send) the value.
err := enc.Encode(P{3, 4, 5, "Pythagoras"})
if err != nil {
log.Fatal("encode error:", err)
}
// Decode (receive) the value.
var q Q
err = dec.Decode(&q)
if err != nil {
log.Fatal("decode error:", err)
}
fmt.Printf("%q: {%d,%d}\n", q.Name, *q.X, *q.Y)
// "Pythagoras": {3,4}
}
```
* Simulate file operation
```go
type Address struct {
Type string
City string
Country string
}
type VCard struct {
FirstName string
LastName string
Addresses []*Address
Remark string
}
func main() {
pa := &Address{"private", "Aartselaar", "Belgium"}
wa := &Address{"work", "Boom", "Belgium"}
vc := VCard{"Jan", "Kersschot", []*Address{pa, wa}, "none"}
// fmt.Printf("%v: \n", vc)
// {<NAME> [0x126d2b80 0x126d2be0] none}:
// using an encoder:
file, _ := os.OpenFile("vcard.gob", os.O_CREATE|os.O_WRONLY, 0)
defer file.Close()
enc := gob.NewEncoder(file)
err := enc.Encode(vc)
if err != nil {
log.Println("Error in encoding gob")
}
}
```
### Crypto
* The hash package: implements the adler32, crc32, crc64 and fnv checksums;
* The crypto package: implements other hashing algorithms like md4, md5, sha1, etc. and
complete encryption implementations for aes, blowfish, rc4, rsa, xtea, etc.
```go
func main() {
hasher := sha1.New()
io.WriteString(hasher, "test")
b := []byte{}
fmt.Printf("Result: %x\n", hasher.Sum(b))
fmt.Printf("Result: %d\n", hasher.Sum(b))
hasher.Reset()
data := []byte("We shall overcome!")
n, err := hasher.Write(data)
if n != len(data) || err != nil {
log.Printf("Hash write error: %v / %v", n, err)
}
checksum := hasher.Sum(b)
fmt.Printf("Result: %x\n", checksum)
}
//----------------------------------
// Result: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3
// Result: [169 74 143 229 204 177 155 166 28 76 8 115 211 145 233 135 152 47 187 211]
// Checksum: e2222bfc59850bbb00a722e764a555603bb59b2a
```
<file_sep>+++
title = "AWS: SQS,SNS,SES - 1"
description = "Introduction of SQS, SNS, SES"
weight=7
+++
## SQS
Amazon Simple Queue Service (SQS) is a fully managed message queuing service that enables you to decouple and scale microservices, distributed systems, and serverless applications. SQS eliminates the complexity and overhead associated with managing and operating message oriented middleware, and empowers developers to focus on differentiating work. Using SQS, you can send, store, and receive messages between software components at any volume, without losing messages or requiring other services to be available. Get started with SQS in minutes using the AWS console, Command Line Interface or SDK of your choice, and three simple commands.
__Queue types__
### Standard Queues
* Unlimited Throughput: Standard queues support a nearly unlimited number of transactions per second (TPS) per API action.
* At-Least-Once Delivery: A message is delivered at least once, but occasionally more than one copy of a message is delivered.
* Best-Effort Ordering: Occasionally, messages might be delivered in an order different from which they were sent.
### FIFO Queues
* High Throughput: By default, FIFO queues support up to 300 messages per second (300 send, receive, or delete operations per second). When you batch 10 messages per operation (maximum), FIFO queues can support up to 3,000 messages per second. To request a quota increase, file a support request.
* Exactly-Once Processing: A message is delivered once and remains available until a consumer processes and deletes it. Duplicates aren't introduced into the queue.
* First-In-First-Out Delivery: The order in which messages are sent and received is strictly preserved (i.e. First-In-First-Out).
## SNS
Amazon Simple Notification Service (SNS) is a fully managed messaging service for both system-to-system and app-to-person (A2P) communication. It enables you to communicate between systems through publish/subscribe (pub/sub) patterns that enable messaging between decoupled microservice applications or to communicate directly to users via SMS, mobile push and email.
> There are two main uses for SNS. First, you can use it as a traditional pub/sub messaging system. An example here is a microservice architecture where Service A may be interested in updates to objects in Service B. Rather than Service B directly notifying Service A about the update, Service B can send a message to an SNS Topic with details about the update. Service A can subscribe to the topic and process the messages as they arrive. The main alternatives to using SNS in this manner are tools like RabbitMQ in pub/sub mode or NATS.
> The second core use case for SNS is to deliver messages to large numbers of end users, such as via mobile push notifications or SMS messages. SNS allows for extremely high fan-out in these use cases, as you can have up to 12.5 million subscribers to a single topic. This is great for blasting your users with updates about a new sale or news in your application.
## SES
Amazon Simple Email Service (SES) is a cost-effective, flexible, and scalable email service that enables developers to send mail from within any application. You can configure Amazon SES quickly to support several email use cases, including transactional, marketing, or mass email communications. Amazon SES's flexible IP deployment and email authentication options help drive higher deliverability and protect sender reputation, while sending analytics measure the impact of each email. With Amazon SES, you can send email securely, globally, and at scale.
> The core main use case for SES is sending email. There are two email types. First is transactional email. Transaction email is when you send an automated message to a specific person. For example, you could send a customer an email that their order has shipped, or you can alert a user about the subscription ending.
> The second bucket of outgoing email is marketing email. This is when you send the same email to a large number of users at the same time. This could be notifying your email list of a large upcoming sale or making an announcement of a new product.<file_sep>+++
title = "AWS: EKS - 3"
description = "Cluster Autoscaler, Horizontal Pod Autoscaler, Vertical Pod Autoscaler"
weight=13
+++
## EKS - Part 3
### Cluster Autoscaler
The Kubernetes Cluster Autoscaler automatically adjusts the number of nodes in your cluster when pods fail to launch due to lack of resources or when nodes in the cluster are underutilized and their pods can be rescheduled onto other nodes in the cluster.
### Strategy of auto scaling
* Stateful application
If you are running a stateful application across multiple Availability Zones that is backed by Amazon EBS volumes and using the Kubernetes Cluster Autoscaler, you should configure multiple node groups, each scoped to a single Availability Zone.
* Other option
Create a single node group that spans multiple Availability Zones.
#### Single managed node group
Create an Amazon EKS cluster with a single managed node group
eksctl create cluster --name pg-smng --version 1.15 --managed --asg-access
#### Node group per AZ
Create a cluster with a dedicated managed node group for each Availability Zone
eksctl create cluster --name pg-ngaz --version 1.15 --without-nodegroup
For each Availability Zone in your cluster, use the following eksctl command to create a node group.
eksctl create nodegroup \
--cluster pg-ngaz \
--node-zones ap-southeast-2a \
--name ap-southeast-2a \
--asg-access \
--node-type t3.medium \
--nodes-min 1 --nodes 1 \
--nodes-max 3 --managed
* Node Group IAM Policy
The Cluster Autoscaler requires the following IAM permissions to make calls to AWS APIs on your behalf. The tool __eksctl__ automatically provides and attaches to your worker node IAM roles, when it creates the node groups.
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"autoscaling:DescribeAutoScalingGroups",
"autoscaling:DescribeAutoScalingInstances",
"autoscaling:DescribeLaunchConfigurations",
"autoscaling:DescribeTags",
"autoscaling:SetDesiredCapacity",
"autoscaling:TerminateInstanceInAutoScalingGroup",
"ec2:DescribeLaunchTemplateVersions"
],
"Resource": "*",
"Effect": "Allow"
}
]
}
#### Deploy Autoscaler
Deploy the Cluster Autoscaler to your cluster with the following command.
kubectl apply -f https://raw.githubusercontent.com/kubernetes/autoscaler/master/cluster-autoscaler/cloudprovider/aws/examples/cluster-autoscaler-autodiscover.yaml
Add the cluster-autoscaler.kubernetes.io/safe-to-evict annotation to the deployment with the following command.
kubectl -n kube-system annotate deployment.apps/cluster-autoscaler cluster-autoscaler.kubernetes.io/safe-to-evict="false"
Edit the Cluster Autoscaler deployment with the following command.
kubectl -n kube-system edit deployment.apps/cluster-autoscaler
Edit the cluster-autoscaler container command to replace <YOUR CLUSTER NAME> with your cluster's name, and add the following options.
--balance-similar-node-groups
--skip-nodes-with-system-pods=false
Final change will look like below
spec:
containers:
- command:
- ./cluster-autoscaler
- --v=4
- --stderrthreshold=info
- --cloud-provider=aws
- --skip-nodes-with-local-storage=false
- --expander=least-waste
- --node-group-auto-discovery=asg:tag=k8s.io/cluster-autoscaler/enabled,k8s.io/cluster-autoscaler/<YOUR CLUSTER NAME>
- --balance-similar-node-groups
- --skip-nodes-with-system-pods=false
Set the Cluster Autoscaler image tag
kubectl -n kube-system set image deployment.apps/cluster-autoscaler cluster-autoscaler=asia.gcr.io/k8s-artifacts-prod/autoscaling/cluster-autoscaler:v1.15.6
Log Autoscaler
kubectl -n kube-system logs -f deployment.apps/cluster-autoscaler
Add scale policy
### Horizontal Pod Autoscaler
The Kubernetes Horizontal Pod Autoscaler automatically scales the number of pods in a deployment, replication controller, or replica set based on that resource's CPU utilization. This can help your applications scale out to meet increased demand or scale in when resources are not needed, thus freeing up your worker nodes for other applications. When you set a target CPU utilization percentage, the Horizontal Pod Autoscaler scales your application in or out to try to meet that target.
The Horizontal Pod Autoscaler is a standard API resource in Kubernetes that simply requires that a metrics source (such as the Kubernetes metrics server) is installed on your Amazon EKS cluster to work.
Install metrics-server with __curl__ and __jq__
DOWNLOAD_URL=$(curl -Ls "https://api.github.com/repos/kubernetes-sigs/metrics-server/releases/latest" | jq -r .tarball_url)
DOWNLOAD_VERSION=$(grep -o '[^/v]*$' <<< $DOWNLOAD_URL)
curl -Ls $DOWNLOAD_URL -o metrics-server-$DOWNLOAD_VERSION.tar.gz
mkdir metrics-server-$DOWNLOAD_VERSION
tar -xzf metrics-server-$DOWNLOAD_VERSION.tar.gz --directory metrics-server-$DOWNLOAD_VERSION --strip-components 1
kubectl apply -f metrics-server-$DOWNLOAD_VERSION/deploy/1.8+/
#### Horizontal Autoscale Test
* Install httpd pod
kubectl run httpd \
--generator=run-pod/v1 \
--image=httpd --requests=cpu=100m \
--limits=cpu=200m --expose --port=80
* Run benchmark test
kubectl run apache-bench \
--generator=run-pod/v1 \
-i --tty --rm --image=httpd \
-- ab -n 900000 \
-c 9999 http://httpd.default.svc.cluster.local/
### Vertical Pod Autoscaler
The Kubernetes Vertical Pod Autoscaler automatically adjusts the CPU and memory reservations for your pods to help "right size" your applications. This adjustment can improve cluster resource utilization and free up CPU and memory for other pods. This topic helps you to deploy the Vertical Pod Autoscaler to your cluster and verify that it is working.
### Deploy Vertical Autoscaler
Open a terminal window and navigate to a directory where you would like to download the Vertical Pod Autoscaler source code.
* Clone the kubernetes/autoscaler GitHub repository.
git clone https://github.com/kubernetes/autoscaler.git
* Change to the vertical-pod-autoscaler directory.
cd autoscaler/vertical-pod-autoscaler/
* (Optional) If you have already deployed another version of the Vertical Pod Autoscaler, remove it with the following command.
./hack/vpa-down.sh
* Deploy the Vertical Pod Autoscaler to your cluster with the following command.
./hack/vpa-up.sh
* Check Vertical Autoscaler pods
kubectl get pods -n kube-system | grep vpa
#### Test Vertical Autoscaler
* Deploy the hamster.yaml Vertical Pod Autoscaler example with the following command.
kubectl apply -f examples/hamster.yaml
* Get the pods from the hamster example application.
kubectl get pods -l app=hamster
# Output:
hamster-c7d89d6db-rglf5 1/1 Running 0 48s
hamster-c7d89d6db-znvz5 1/1 Running 0 48s
* Describe one of the pods to view its CPU and memory reservation.
kubectl describe pod hamster-c7d89d6db-rglf5
* Describe the hamster-vpa resource to view the new recommendation.
kubectl describe vpa/hamster-vpa
# Output
Status:
Conditions:
Last Transition Time: 2020-02-11T13:31:48Z
Status: True
Type: RecommendationProvided
Recommendation:
Container Recommendations:
Container Name: hamster
Lower Bound:
Cpu: 530m
Memory: 262144k
Target:
Cpu: 587m
Memory: 262144k
Uncapped Target:
Cpu: 587m
Memory: 262144k
Upper Bound:
Cpu: 1
Memory: 500Mi
<file_sep>+++
title = "F# Namespace, Module & Import"
description = "F# Namespace, Module & Open Declaration"
+++
## Namespace
> A namespace lets you organize code into areas of related functionality by enabling you to attach a name to a grouping of F# program elements. Namespaces are typically top-level elements in F# files.
Namespaces **cannot** directly contain values and functions. Instead, values and functions must be included in modules, and modules are included in namespaces. Namespaces can contain types, modules.
```fsharp
namespace [rec] [parent-namespaces.]identifier
```
## Modules
> A module is a grouping of F# code, such as values, types, and function values, in an F# program. Grouping code in modules helps keep related code together and helps avoid name conflicts in your program.
```fsharp
// Top-level module declaration.
module [accessibility-modifier] [qualified-namespace.]module-name
declarations
// Local module declaration.
module [accessibility-modifier] module-name =
declarations
```
## OPen Declaration
> An import declaration specifies a module or namespace whose elements you can reference without using a fully qualified name.
```fsharp
open module-or-namespace-name
open type type-name
```
## Namespace Examples
- Example 1
```fsharp
namespace Widgets
type MyWidget1 =
member this.WidgetName = "Widget1"
module WidgetsModule =
let widgetName = "Widget2"
```
- Example 2
```fsharp
namespace Widgets
module WidgetModule1 =
let widgetFunction x y =
printfn "Module1 %A %A" x y
module WidgetModule2 =
let widgetFunction x y =
printfn "Module2 %A %A" x y
module useWidgets =
do
WidgetModule1.widgetFunction 10 20
WidgetModule2.widgetFunction 5 6
```
- Example 3 - Recursive nampespaces
```fsharp
namespace rec MutualReferences
type Orientation = Up | Down
type PeelState = Peeled | Unpeeled
// This exception depends on the type below.
exception DontSqueezeTheBananaException of Banana
type Banana(orientation : Orientation) =
member val IsPeeled = false with get, set
member val Orientation = orientation with get, set
member val Sides: PeelState list = [ Unpeeled; Unpeeled; Unpeeled; Unpeeled] with get, set
member self.Peel() = BananaHelpers.peel self // Note the dependency on the BananaHelpers module.
member self.SqueezeJuiceOut() = raise (DontSqueezeTheBananaException self) // This member depends on the exception above.
module BananaHelpers =
let peel (b: Banana) =
let flip (banana: Banana) =
match banana.Orientation with
| Up ->
banana.Orientation <- Down
banana
| Down -> banana
let peelSides (banana: Banana) =
banana.Sides
|> List.map (function
| Unpeeled -> Peeled
| Peeled -> Peeled)
match b.Orientation with
| Up -> b |> flip |> peelSides
| Down -> b |> peelSides
```
## Module Examples
- Example 1 - Recursive
```fsharp
namespace rec MutualReferences
type Orientation = Up | Down
type PeelState = Peeled | Unpeeled
// This exception depends on the type below.
exception DontSqueezeTheBananaException of Banana
type Banana(orientation : Orientation) =
member val IsPeeled = false with get, set
member val Orientation = orientation with get, set
member val Sides: PeelState list = [ Unpeeled; Unpeeled; Unpeeled; Unpeeled] with get, set
member self.Peel() = BananaHelpers.peel self // Note the dependency on the BananaHelpers module.
member self.SqueezeJuiceOut() = raise (DontSqueezeTheBananaException self) // This member depends on the exception above.
module BananaHelpers =
let peel (b: Banana) =
let flip (banana: Banana) =
match banana.Orientation with
| Up ->
banana.Orientation <- Down
banana
| Down -> banana
let peelSides (banana: Banana) =
banana.Sides
|> List.map (function
| Unpeeled -> Peeled
| Peeled -> Peeled)
match b.Orientation with
| Up -> b |> flip |> peelSides
| Down -> b |> peelSides
```
## Open Examples
- Example 1
```fsharp
// Open a .NET Framework namespace.
open System.IO
// Now you do not have to include the full paths.
let writeToFile2 filename (text: string) =
let stream1 = new FileStream(filename, FileMode.Create)
let writer = new StreamWriter(stream1)
writer.WriteLine(text)
writeToFile2 "file1.txt" "Testing..."
```
<file_sep>+++
title = "AWS: EKS - 6"
description = "Labs: GuestBook"
weight=16
draft=true
+++
## EKS - Part 6
### Guestbook
* Create the Redis master replication controller.
kubectl apply -f https://raw.githubusercontent.com/kubernetes/examples/master/guestbook-go/redis-master-controller.json
* Create the Redis master service.
kubectl apply -f https://raw.githubusercontent.com/kubernetes/examples/master/guestbook-go/redis-master-service.json
* Create the Redis slave replication controller.
kubectl apply -f https://raw.githubusercontent.com/kubernetes/examples/master/guestbook-go/redis-slave-controller.json
* Create the Redis slave service.
kubectl apply -f https://raw.githubusercontent.com/kubernetes/examples/master/guestbook-go/redis-slave-service.json
* Create the guestbook replication controller.
kubectl apply -f https://raw.githubusercontent.com/kubernetes/examples/master/guestbook-go/guestbook-controller.json
* Create the guestbook service.
kubectl apply -f https://raw.githubusercontent.com/kubernetes/examples/master/guestbook-go/guestbook-service.json
* Query the services in your cluster and wait until the External IP column for the guestbook service is populated.
kubectl get services -o wide
After your external IP address is available, point a web browser to that address at port 3000 to view your guest book. For example,
http://a2444a44644eb431ca8b9f7617d85aad-1238693525.ap-southeast-2.elb.amazonaws.com:3000/
<file_sep>+++
title = "Map, Function & Closure"
description="Golang Introduction: Map & Function & Closure "
weight=2
+++
### Map
- Maps are a special kind of data structure: an unordered collection of pairs of items, where one element of the pair is the key, and the other element, associated with the key, is the data or the value, hence they are also called associative arrays or dictionaries.
- The key type can be any type for which the operations == and != are defined, like string, int, float. The value type can be any type.
- Map is much faster than a linear search, but still around 100x slower than direct indexing in an array or slice; so if performance is very important try to solve the problem with slices.
#### Definition
Maps are reference types: memory is allocated with the make -function
* Initialization of a map:
var map1[keytype]valuetype = make(map[keytype]valuetype)
* or shorter with:
map1 := make(map[keytype]valuetype)
* mapCreated is made in this way:
mapCreated := make(map[string]float)
* which is equivalent to:
mapCreated := map[string]float{}
#### Does the key exist
- Delete the pair from the map
```
if _, ok := map1[key1]; ok {
delete(map1, key1)
}
```
### Function
- Function overloading, that is coding two or more functions in a program with the same function name but a different parameter list and/or a different return-type(s), is not allowed in Go.
- The default way in Go is to pass a variable as an argument to a function by value: a copy is made of that variable (and the data in it).
- Named variables used as result parameters are automatically initialized to their zero-value, and once they receive their value, a simple (empty) return statement is sufficient; furthermore even when there is only 1 named return variable, it has to be put inside ( )
#### Defer
- The defer keyword allows us to postpone the execution of a statement or a function until the end of the enclosing (calling) function: it executes something (a function or an expression) when the enclosing function returns (after every return and even when an error occurred in the midst of executing the function, not only a return at the end of the function), but before the }
- When many defer’s are issued in the code, they are executed at the end of the function in the inverse order (like a stack or LIFO): the last defer is first executed, and so on.
- Sample
```go
func f() {
for i := 0; i < 5; i++ {
defer fmt.Printf(“%d “, i)
}
}
// output : 4 3 2 1 0
```
- Defer allows us to guarantee that certain clean-up tasks are performed before we return from a function.
#### Recursive
* A function that call itself in its body is called recursive.
* An important problem when using recursive functions is stack overflow: this can occur when a large number of recursive calls are needed and the programs runs out of allocated stack memory. This can be solved by using a technique called lazy evaluation, implemented in Go with a channel and a goroutine.
```go
package main
import "fmt"
func main() {
result := 0
for i:=0; i <= 10; i++ {
result = fibonacci(i)
fmt.Printf("fibonacci(%d) is: %d\n", i, result)
}
}
func fibonacci(n int) (res int) {
if n <= 1 {
res = 1
} else {
res = fibonacci(n-1) + fibonacci(n-2)
}
return
}
```
#### Callback
* Functions can be used as parameters in another function, the passed function can then be called within the body of that function, that is why it is commonly called a callback.
```go
func main() {
callback(1, Add)
}
func Add(a, b int) {
fmt.Printf("The sum of %d and %d is: %d\n", a, b, a+b)
}
func callback(y int, f func(int, int)) {
f(y, 2)
// this becomes Add(1, 2)
}
```
#### Closures (function literals)
* Sample
```go
plus := func(x, y int) int { return x + y }
plus( 1,2) // 3
// invoke func immediatley
func(x, y int) int { return x + y }( 1, 2) // 3
```
#### Closures - return another function
* Use return function for Debugging
```go
where := func() {
_, file, line, _ := runtime.Caller(1)
log.Printf(“%s:%d”, file, line)
}
func Func () {
//....do sth
where ()
// ....do another thing
}
```
<file_sep>+++
title = "Blogs"
weight = 5
+++
{{%children style="card" description="true" %}}<file_sep>+++
date = "2018-01-06T14:59:31+11:00"
title = "Raspberry Pi setup"
description="How to setup Raspberry Pi as file server"
draft = false
+++
Prelude
> *This note is mainly to record how to setup Raspberry Pi as file server.
## Prerequisites
* You have a Raspberry Pi with pre-installed raspbian SD card
* You are happy to get your hands dirty
* You have some basic computer concept.
* RPi is short for Raspberry Pi
## My Raspberry Pi is a bit old
I only have the RPi 1 model B with pre-installed raspbian SD card in place. It is quite outdated. If you don’t know the model of your RPi, please don’t worry it now. I will explain how to get the info later. I got this RPi as a gift 2 years ago. I left it in the garage and totally forgot it, until I cleaned up my garage a couple months ago. Actually I loved the old model with transparent plastic box more than the new one. I knew if I continued to leave it in the garage, it would be a rubbish soon, because it is not easy to find some equipment or software compatible with the old RPi. Luckily the lifespan of RPi is much longer than the mobile phone, but it still took me some effort to setup the wifi adapter.
After 6-hour on and off, I got it up and run. Honestly I’m not a hardware guru, but I’m so happy not to throw this beautiful (my aesthetics is sort of quirky) box into the bin. I captured the home screen of Kodi, the media center and mobile control app.
Home page of kodai

Screenshot of kodi mobile app on my android phone.

## How to start
There is no wifi or bluetooth support on this model. I have to connect this tiny box to my switch via cable all the time. There is a small problem, because my switch is far away from my laptop, monitor, keyboard, etc. and I don’t have a cable long enough to connect the RPi and switch.
First thing first, I need to setup ssh server, and change the configuration to allow password login, also make it auto-start after reboot. To do so I just need monitor and keyboard.
### Connect the RPi with monitor and keyboard
* Reset pasword of `pi`
```bash
sudo passwd pi
```
### SSH server setup
```bash
sudo apt-get install openssh-server
### backup default config
sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.ori
sudo chmod a-w /etc/ssh/sshd_config.ori
### use any editor to update sshd_config
sudo nano /etc/ssh/sshd_config
### uncomment PasswordAuthentication yes to allow remote password login
### setup ssh auto-start onboot
sudo update-rc.d ssh defaults
### reboot
sudo reboot
### Check the ssh is running after reboot
sudo service ssh status
### You should see sth as below
[ok] sshd is running
### Turn off Pi
sudo poweroff
```
### Connect RPi with the switch
After all above is done, you can disconnect the monitor and keyboard, and connect the RPi with the switch (or modem). Once the power is on, you should be able to access the RPi from you PC or laptop.
### Find the ip address
Access the admin home page of my switch via browser. e.g. `http://192.168.0.1/index.html` (The actual URL depends on your switch or modem. You can find it on the label sticked on the back or bottom.)
If you forget your password to login the admin page, you still can reset your swtich. If your modem is 3 in 1 model including switch, you need to make sure you have the `ID and Password` to access the internet before you reset it.
After you login successfully, you just need to expand main menus find a menu called `DHCP`. e.g.
```
Basic Setup
|__ ...
Advanced Setup
|__ ...
Device
|__ DHCP
|__ WAN
|__ ...
```
You will see table as blew.
Hostname | MAC Address | IP Address | Expires In
-----|---|----|---
PC-1 | 2f:3f:09:ff:f5:24 | 192.168.1.7 | x hours x mins
PC-2 | c0:9f:05:ff:f9:14 | 192.168.1.8 | x hours x mins
Laptop-1 | b0:f6:05:e2:f5:99 | 192.168.1.9 | x hours x mins
raspberrypi| a5:06:b2:07:c4:03 | 192.168.1.10 | x hours x mins
### Access RPi with your laptop
* From Linux or Mac
```
ssh [email protected]
### type yes
### type the password
```
* For widnows
You need to download a ssh tool. If you installed git before, you would have it on your computer. Otherwise, you need to install a SSH too. I recommend you to install [Putty](http://www.putty.org/). It is free and quite handy.
After you install and launch Putty, you just need to type in the IP address `192.168.1.10` to the field `Host Name (or IP Address)`, then click button `Open`.
```
### type in pi as login user
login as: pi
### type in password
[email protected]'s password:
```
## Access RPi via VNC
### Setup VNC server on RPi
```bash
sudo apt-get update
sudo apt-get install tightvncserver
```
* Launch VNC server and setup pasword
```bash
/usr/bin/tightvncserver
### Setup password for remote access.
### View only password is not necessary
### setup VNC server to auto start
sudo update_rc.d tightvncserver defaults
sudo reboot
```
### Setup VNC client on your PC
Linux: Use __xRDP__
I believe you can figure it out yourself, if you used Linux as desktop.
Windows: Install [RealVNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) as VNC client
* Laucn the VNC Viewer and create a new connection

* Type in the VNC password and you can login RPi with GUI

After all above is done, you have your RPi ready. You can choose what you want to build on it. Considering its CPU and RAM, it is not sufficient to be used as daily desktop PC, but it is still enough to work as a server. e.g. File Server, Web Server, Email Server, FTP server or Media Center.
Now I want to make a file server and media center on it.
### Setup File Server via Samba
* Attach external storage to your RPi. The capacity of preinstalled SD card has only 8G space, so I attached my portal hard drives to RPi. You can attach the PC hard drive, USD or another SD card via adapter. It is really up to what you have in place.
> I want to make one as public share folder without authentication, and the other needs password to access.
* If your hard drive or USB is `ntfs`, the RPi might not recognize your device. You can simply install a package to make it work.
```bash
sudo apt-get install ntfs-3g
```
* Get drive info after attach two hard drives
```bash
sudo lsblk
### You will see the tree structure of drives
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
sda 8:0 0 1.8T 0 disk
└─sda1 8:1 0 1.8T 0 part /media/mydrive1
└─sda2 8:2 0 870G 0 part /media/mydrive2
mmcblk0 179:0 0 7.4G 0 disk
├─mmcblk0p1 179:1 0 56M 0 part /boot
└─mmcblk0p2 179:2 0 7.4G 0 part /
```
* Remount the drives with proper name
```bash
sudo su ### switch to root
cd /media
umount mydrive1
umount mydrive2
mkdir public private
mount -o rw /sda/sda1 public
mount -o rw /sda/sda2 private
```
* Change `fstab` to support read and write permission
```bash
sudo nano /etc/fstab
```
* Add following lines to the end of file. The format type of my drives are `ntfs`. If you are not sure what file system type is, you can run this command to check `sudo lsblk -o name,fstype`
```
/dev/sda1 /media/public ntfs nofail,noatime 0 0
/dev/sda2 /media/private ntfs nofail,noatime 0 0
```
* After you complete above changes, you will see the difference by typing the command `sudo lsblk`
```
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
sda 8:0 0 1.8T 0 disk
└─sda1 8:1 0 1.8T 0 part /media/public
└─sda2 8:2 0 870G 0 part /media/private
mmcblk0 179:0 0 7.4G 0 disk
├─mmcblk0p1 179:1 0 56M 0 part /boot
└─mmcblk0p2 179:2 0 7.4G 0 part /
```
* Install Samba
```bash
sudo apt-get update
sudo apt-get upgrade
sudo apt-get install samba samba-common-bin
```
* Setup Samba configuration
## Backup original config and update the config
```bash
sudo su
cd /etc/samba
cp smb.conf smb.conf.ori
nano smb.conf
```
### Change the line below:
* `wins support = no` to `wins support = yes`
### Add follow lines to end of the file
```
[public]
comment = Share Folder
path = /media/public
create mask = 0665
directory mask = 0775
read only = no
guest ok = yes
[private]
comment = Private Folder
path = /media/private
valid users = root,smbu
force user = smbu
create mask = 0777
directory mask = 0777
writable = yes
browsable = yes
read only = no
guest ok = yes
```
### Add new user `smbu` for remote access. In case you
```bash
sudo useradd smbu
sudo passwd smbu
sudo usermod -a -G root smbu
sudo smbpasswd smbu ## setup pasword for remote access
```
## Access the network folder
### Linx
I have no any problem to access the both netowrk drives via Linux.
### Windows
It took me some time to make it work for me. There are some bullet points, which may help you for trouble shooting.
* Please use `WORKGROUP` instead of domain.
* Please keep name of workgroup as `WORKGROUP`
* Turn on the network discovery
`Control Panel > All Control Panel Items> Network and Sharing Cente > Advanced sharing settings`
* Reboot the PC or laptop
## Get accurate version of RPi model
* Get the revision code
```bash
cat /etc/cpuinfo
```
* Check the table below to find your model
MODEL AND PI REVISION | MEMORY | HARDWARE REVISION CODE FROM CPUINFO
---|-----|------
Model B Revision 1.0 | 256MB | 0002
Model B Revision 1.0 + ECN0001 (no fuses, D14 removed) | 256MB | 0003
Model B Revision 2.0 Mounting holes | 256MB | 0004 0005 0006
Model A Mounting holes | 256MB | 0007,0008,0009
Model B Revision 2.0 Mounting holes | 512MB | 000d 000e 000f
Model B+ | 512MB | 0010
Compute Module | 512MB | 0011
Model A+ | 256MB | 0012
Pi 2 Model B | 1GB | a01041 (Sony, UK) a21041 (Embest, China)
PiZero | 512MB | 900092(no camera connector) 900093(camera connector)
Pi 3 Model B | 1GB | a02082 (Sony, UK) a22082 (Embest, China)
PiZero W | 512MB | 9000c1
## Setup Wifi Adapter
Wifi adapter is not necessary for media centre, but it would save some effort to move your RPi around in your place, especially you want to connect your RPi with different devices from time to time.
I bought a D-Link adapter, which is dwa-131 with usb 2.0. This is the oldest one I can find in the store. If you are going to buy wifi adapter for old Unix-like system, please don’t buy the latest model. You will find you are trapped into incompatible issue between wifi drive and Linux kernel. You may have to upgrade the kernel or rebuild the drive.
As you know, there is always some hiccup to find the correct wifi drive to support your portable wifi adapter. It took me a while to find the proper way to install the wifi adapter drive. If you have the RPi 2⁄3, it would be much more easier. My RPi 1 model B comes with kernel 4.1.18*. I cannot find the source code of wifi drive which supports this old kernel today, and I don’t want to upgrade and rebuild the kernel.
Finally, I found a post on RPi's forum which solved my problem. Link of [MrEngman's post](https://www.raspberrypi.org/forums/viewtopic.php?f=45&t=103989&p=1048709&hilit=Realtek+RTL8192EU+ID+0BDA%3A818B+WiFi+drivers+for+Raspbian#p1048709). He updated on April aobut the dropbox issue and alternative solution.
Download and install the new version of the script with commands
```
sudo wget http://www.fars-robotics.net/install-wifi -O /usr/bin/install-wifi
sudo chmod +x /usr/bin/install-wifi
### Shows details on using it.
sudo install-wifi -h
### To install the driver on your current kernel you should just need to run command
sudo install-wifi
### Check the wifi interface after installation
ifconfig -a
```
Setup Wifi password. You can simply do it via GUI application or via command lines if you like. Please check out the official document as below.
https://www.raspberrypi.org/documentation/configuration/wireless/
## Install Kodi as media centre
If you have NOOBS in the place, then you have everything you need. Because I don't have it, I follow the official instruction to install kodi. It is a simple way to convert your RPi into a media centre without scratching your head too much.
```bash
### Install kodi
sudo apt-get update
sudo apt-get install kodi
### Config kodi
sudo nano /etc/default/kodi
ENABLED=1
```
Reboot the RPi, before you reboot it please make sure your TV’s HDMI has plugged into RPi. After a couple minutes, you will see the home page as I posted above. Don’t forget to install remote control app on your mobile. I pretty sure you find some remote control app for Kodi on your phone. Finally, you can enjoy your home media center.
<file_sep>+++
title = "F# Collections 1"
description = "F# Array & Slicing"
weight = 5
+++
## Array
Arrays are fixed-size, zero-based, mutable collections of consecutive data elements that are all of the same type.
### Create array
```fsharp
//
let array1 = [| 1; 2; 3 |]
// Put each element on a separate line, in which case the semicolon separator is optional.
let array1 =
[|
1
2
3
|]
// The type of the array elements is inferred from the literals used and must be consistent.
// The following code causes an error because 1.0 is a float and 2 and 3 are integers.
// Causes an error.
// let array2 = [| 1.0; 2; 3 |]
// Use sequence expressions to create arrays.
let array3 = [| for i in 1 .. 10 -> i * i |]
// use Array.zeroCreate
let arrayOfTenZeroes : int array = Array.zeroCreate 10
// use Array.empty
let myEmptyArray = Array.empty
printfn "Length of empty array: %d" myEmptyArray.Length
// Length of empty array: 0
// use Array.create
printfn "Array of floats set to 5.0: %A" (Array.create 10 5.0)
// Area of floats set to 5.0: [|5.0; 5.0; 5.0; 5.0; 5.0; 5.0; 5.0; 5.0; 5.0; 5.0|]
// Array.init
printfn "Array of squares: %A" (Array.init 10 (fun index -> index * index))
// Array of squares: [|0; 1; 4; 9; 16; 25; 36; 49; 64; 81|]
```
### Copy
```fsharp
open System.Text
let firstArray : StringBuilder array = Array.init 3 (fun index -> new StringBuilder(""))
let secondArray = Array.copy firstArray
// Reset an element of the first array to a new value.
firstArray[0] <- new StringBuilder("Test1")
// Change an element of the first array.
firstArray[1].Insert(0, "Test2") |> ignore
printfn "%A" firstArray
printfn "%A" secondArray
// output
// [|Test1; Test2; |]
// [|; Test2; |]
```
### Access
```fsharp
// Accesses elements from 0 to 2.
array1[0..2]
// Accesses elements from the beginning of the array to 2.
array1[..2]
// Accesses elements from 2 to the end of the array.
array1[2..]
```
### Functions
```fsharp
// Array.sub
let a1 = [| 0 .. 99 |]
let a2 = Array.sub a1 5 10
printfn "%A" a2
// output
// [|5; 6; 7; 8; 9; 10; 11; 12; 13; 14|]
// Array.append creates a new array by combining two existing arrays.
printfn "%A" (Array.append [| 1; 2; 3|] [| 4; 5; 6|])
// output
// [|1; 2; 3; 4; 5; 6|]
// Array.choose selects elements of an array to include in a new array.
printfn "%A" (Array.choose (fun elem -> if elem % 2 = 0 then
Some(float (elem*elem - 1))
else
None) [| 1 .. 10 |])
// output
// [|3.0; 15.0; 35.0; 63.0; 99.0|]
// Array.collect runs a specified function on each array element of an existing array and then collects the elements generated by the function and combines them into a new array.
printfn "%A" (Array.collect (fun elem -> [| 0 .. elem |]) [| 1; 5; 10|])
// output
// [|0; 1; 0; 1; 2; 3; 4; 5; 0; 1; 2; 3; 4; 5; 6; 7; 8; 9; 10|]
Array.concat takes a sequence of arrays and combines them into a single array. The following code demonstrates Array.concat.
F#
Copy
Array.concat [ [|0..3|] ; [|4|] ]
//output [|0; 1; 2; 3; 4|]
Array.concat [| [|0..3|] ; [|4|] |]
//output [|0; 1; 2; 3; 4|]
// Array.filter takes a Boolean condition function and generates a new array that contains only those elements from the input array for which the condition is true.
printfn "%A" (Array.filter (fun elem -> elem % 2 = 0) [| 1 .. 10|])
// The output
// [|2; 4; 6; 8; 10|]
// Array.rev generates a new array by reversing the order of an existing array.
let stringReverse (s: string) =
System.String(Array.rev (s.ToCharArray()))
printfn "%A" (stringReverse("!dlrow olleH"))
// The output
// "Hello world!"
// You can easily combine functions in the array module that transform arrays by using the pipeline operator (|>)
[| 1 .. 10 |]
|> Array.filter (fun elem -> elem % 2 = 0)
|> Array.choose (fun elem -> if (elem <> 8) then Some(elem*elem) else None)
|> Array.rev
|> printfn "%A"
// output
// [|100; 36; 16; 4|]
```
### Multidimensional arrays
```fsharp
let my2DArray = array2D [ [ 1; 0]; [0; 1] ]
let arrayOfArrays = [| [| 1.0; 0.0 |]; [|0.0; 1.0 |] |]
let twoDimensionalArray = Array2D.init 2 2 (fun i j -> arrayOfArrays[i][j])
```
### Slicing
```fsharp
// Get rows 1 to N from an NxM matrix (returns a matrix):
matrix[1.., *]
// Get rows 1 to 3 from a matrix (returns a matrix):
matrix[1..3, *]
// Get columns 1 to 3 from a matrix (returns a matrix):
matrix[*, 1..3]
// Get a 3x3 submatrix:
matrix[1..3, 1..3]
// Get row 3 from a matrix as a vector:
matrix[3, *]
// Get column 3 from a matrix as a vector:
matrix[*, 3]
```
### Matrix & slicing
```fsharp
type Matrix<'T>(N: int, M: int) =
let internalArray = Array2D.zeroCreate<'T> N M
member this.Item
with get(a: int, b: int) = internalArray[a, b]
and set(a: int, b: int) (value:'T) = internalArray[a, b] <- value
member this.GetSlice(rowStart: int option, rowFinish : int option, colStart: int option, colFinish : int option) =
let rowStart =
match rowStart with
| Some(v) -> v
| None -> 0
let rowFinish =
match rowFinish with
| Some(v) -> v
| None -> internalArray.GetLength(0) - 1
let colStart =
match colStart with
| Some(v) -> v
| None -> 0
let colFinish =
match colFinish with
| Some(v) -> v
| None -> internalArray.GetLength(1) - 1
internalArray[rowStart..rowFinish, colStart..colFinish]
member this.GetSlice(row: int, colStart: int option, colFinish: int option) =
let colStart =
match colStart with
| Some(v) -> v
| None -> 0
let colFinish =
match colFinish with
| Some(v) -> v
| None -> internalArray.GetLength(1) - 1
internalArray[row, colStart..colFinish]
member this.GetSlice(rowStart: int option, rowFinish: int option, col: int) =
let rowStart =
match rowStart with
| Some(v) -> v
| None -> 0
let rowFinish =
match rowFinish with
| Some(v) -> v
| None -> internalArray.GetLength(0) - 1
internalArray[rowStart..rowFinish, col]
module test =
let generateTestMatrix x y =
let matrix = new Matrix<float>(3, 3)
for i in 0..2 do
for j in 0..2 do
matrix[i, j] <- float(i) * x - float(j) * y
matrix
let test1 = generateTestMatrix 2.3 1.1
let submatrix = test1[0..1, 0..1]
printfn $"{submatrix}"
let firstRow = test1[0,*]
let secondRow = test1[1,*]
let firstCol = test1[*,0]
printfn $"{firstCol}"
```
### Search
```fsharp
// Array.find takes a Boolean function and returns the first element for which
// the function returns true, or raises a
// System.Collections.Generic.KeyNotFoundException if no element that satisfies
// the conditionis found.
// Array.findIndex is like Array.find, except that it returns the index of
// the element instead of the element itself.
// The following code uses Array.find and Array.findIndex to locate a number that
// is both a perfect square and perfect cube.
let arrayA = [| 2 .. 100 |]
let delta = 1.0e-10
let isPerfectSquare (x:int) =
let y = sqrt (float x)
abs(y - round y) < delta
let isPerfectCube (x:int) =
let y = System.Math.Pow(float x, 1.0/3.0)
abs(y - round y) < delta
let element = Array.find (fun elem -> isPerfectSquare elem && isPerfectCube elem) arrayA
let index = Array.findIndex (fun elem -> isPerfectSquare elem && isPerfectCube elem) arrayA
printfn "The first element that is both a square and a cube is %d and its index is %d." element index
// The output is as follows.
// The first element that is both a square and a cube is 64 and its index is 62.
// ----------------------------------------------------------------------------
// Array.tryFind is like Array.find, except that its result is an option type,
// and it returns None if no element is found. Array.tryFind should be used
// instead of Array.find when you do not know whether a matching element is
// in the array. Similarly, Array.tryFindIndex is like Array.findIndex except
// that the option type is the return value. If no element is found, the option is None.
let delta = 1.0e-10
let isPerfectSquare (x:int) =
let y = sqrt (float x)
abs(y - round y) < delta
let isPerfectCube (x:int) =
let y = System.Math.Pow(float x, 1.0/3.0)
abs(y - round y) < delta
let lookForCubeAndSquare array1 =
let result = Array.tryFind (fun elem -> isPerfectSquare elem && isPerfectCube elem) array1
match result with
| Some x -> printfn "Found an element: %d" x
| None -> printfn "Failed to find a matching element."
lookForCubeAndSquare [| 1 .. 10 |]
lookForCubeAndSquare [| 100 .. 1000 |]
lookForCubeAndSquare [| 2 .. 50 |]
// The output is as follows.
// Found an element: 1
// Found an element: 729
// Failed to find a matching element.
// ----------------------------------------------------------------------------
// Use Array.tryPick when you need to transform an element in addition to finding it.
// The result is the first element for which the function returns the transformed
// element as an option value, or None if no such element is found.
// The following code shows the use of Array.tryPick. In this case, instead of
// a lambda expression, several local helper functions are defined to simplify the code.
let findPerfectSquareAndCube array1 =
let delta = 1.0e-10
let isPerfectSquare (x:int) =
let y = sqrt (float x)
abs(y - round y) < delta
let isPerfectCube (x:int) =
let y = System.Math.Pow(float x, 1.0/3.0)
abs(y - round y) < delta
// intFunction : (float -> float) -> int -> int
// Allows the use of a floating point function with integers.
let intFunction function1 number = int (round (function1 (float number)))
let cubeRoot x = System.Math.Pow(x, 1.0/3.0)
// testElement: int -> (int * int * int) option
// Test an element to see whether it is a perfect square and a perfect
// cube, and, if so, return the element, square root, and cube root
// as an option value. Otherwise, return None.
let testElement elem =
if isPerfectSquare elem && isPerfectCube elem then
Some(elem, intFunction sqrt elem, intFunction cubeRoot elem)
else None
match Array.tryPick testElement array1 with
| Some (n, sqrt, cuberoot) -> printfn "Found an element %d with square root %d and cube root %d." n sqrt cuberoot
| None -> printfn "Did not find an element that is both a perfect square and a perfect cube."
findPerfectSquareAndCube [| 1 .. 10 |]
findPerfectSquareAndCube [| 2 .. 100 |]
findPerfectSquareAndCube [| 100 .. 1000 |]
findPerfectSquareAndCube [| 1000 .. 10000 |]
findPerfectSquareAndCube [| 2 .. 50 |]
// The output is as follows.
// Found an element 1 with square root 1 and cube root 1.
// Found an element 64 with square root 8 and cube root 4.
// Found an element 729 with square root 27 and cube root 9.
// Found an element 4096 with square root 64 and cube root 16.
// Did not find an element that is both a perfect square and a perfect cube.
```
### Modify arrays
Array.set sets an element to a specified value. Array.fill sets a range of elements in an array to a specified value. The following code provides an example of Array.fill.
```fsharp
let arrayFill1 = [| 1 .. 25 |]
Array.fill arrayFill1 2 20 0
printfn "%A" arrayFill1
// output
// [|1; 2; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 23; 24; 25|]
```
### Convert to and from other types
Array.ofList creates an array from a list. Array.ofSeq creates an array from a sequence. Array.toList and Array.toSeq convert to these other collection types from the array type.
<file_sep>+++
title = "AWS: EKS - 5"
description = "Monitoring, Analytics"
weight=15
+++
## EKS - Part 5
### Metrics Server
The Kubernetes metrics server is an aggregator of resource usage data in your cluster, and it is not deployed by default in Amazon EKS clusters. The metrics server is commonly used by other Kubernetes add ons, such as the Horizontal Pod Autoscaler or the Kubernetes Dashboard.
* Deploy the metrics server
kubectl apply -f https://github.com/kubernetes-sigs/metrics-server/releases/download/v0.3.6/components.yaml
* Verify that the metrics-server deployment
kubectl get deployment metrics-server -n kube-system
### Prometheus
The Kubernetes API server exposes a number of metrics that are useful for monitoring and analysis. These metrics are exposed internally through a metrics endpoint that refers to the /metrics HTTP API. Like other endpoints, this endpoint is exposed on the Amazon EKS control plane.
Prometheus is an open-source systems monitoring and alerting toolkit originally built at SoundCloud. Since its inception in 2012, many companies and organizations have adopted Prometheus, and the project has a very active developer and user community. It is now a standalone open source project and maintained independently of any company. To emphasize this, and to clarify the project's governance structure, Prometheus joined the Cloud Native Computing Foundation in 2016 as the second hosted project, after Kubernetes.
#### Deploying Prometheus
* Create a Prometheus namespace.
kubectl create namespace prometheus
* Install __helm__
## MAC
brew install helm
## Linux
curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 > get_helm.sh
chmod 700 get_helm.sh
./get_helm.sh
## Add stable repo to helm
helm repo add stable https://kubernetes-charts.storage.googleapis.com/
* Deploy Prometheus.
helm install prometheus stable/prometheus \
--namespace prometheus \
--set alertmanager.persistentVolume.storageClass="gp2",server.persistentVolume.storageClass="gp2"
* Verify that all of the pods in the prometheus namespace are in the READY state.
kubectl get pods -n prometheus
### Grafana
Set the storage class to gp2, admin password, configuring the datasource to point to Prometheus and creating an external load balancer for the service.
kubectl create namespace grafana
helm install grafana stable/grafana \
--namespace grafana \
--set persistence.storageClassName="gp2" \
--set adminPassword='<PASSWORD>' \
--set datasources."datasources\.yaml".apiVersion=1 \
--set datasources."datasources\.yaml".datasources[0] name=Prometheus \
--set datasources."datasources\.yaml".datasources[0] type=prometheus \
--set datasources."datasources\.yaml".datasources[0] url=http://prometheus-server.prometheus.svc.cluster.local \
--set datasources."datasources\.yaml".datasources[0] access=proxy \
--set datasources."datasources\.yaml".datasources[0] isDefault=true \
--set service.type=LoadBalancer
Get your 'admin' user password
kubectl get secret --namespace grafana grafana \
-o jsonpath="{.data.admin-password}" | base64 --decode ; echo
The Grafana server can be accessed via port 80 on the following DNS name from within your cluster: grafana.grafana.svc.cluster.local
Get the Grafana URL to visit by running these commands in the same shell:
export SERVICE_IP=$(kubectl get svc --namespace grafana grafana -o jsonpath='{.status.loadBalancer.ingress[0].ip}')
http://$SERVICE_IP:80
#### Import dashboard
* Cluster Monitoring Dashboard
- Click ’+’ button on left panel and select ‘Import’.
- Enter 3119 dashboard id under Grafana.com Dashboard.
- Click ‘Load’.
- Select ‘Prometheus’ as the endpoint under prometheus data sources drop down.
- Click ‘Import’.
* Pods Monitoring Dashboard
- Click ’+’ button on left panel and select ‘Import’.
- Enter 6417 dashboard id under Grafana.com Dashboard.
- Click ‘Load’.
- Enter Kubernetes Pods Monitoring as the Dashboard name.
- Click change to set the Unique identifier (uid).
- Select ‘Prometheus’ as the endpoint under prometheus data sources drop down.s
- Click ‘Import’.
### Kubernetes Dashboard
* Deploy the Kubernetes Metrics Server
* Deploy the Dashboard
kubectl apply -f https://raw.githubusercontent.com/kubernetes/dashboard/v2.0.0-beta8/aio/deploy/recommended.yaml
* Create an eks-admin Service Account and Cluster Role Binding
```
cat<<EOF | kubectl apply -f -
apiVersion: v1
kind: ServiceAccount
metadata:
name: eks-admin
namespace: kube-system
---
apiVersion: rbac.authorization.k8s.io/v1beta1
kind: ClusterRoleBinding
metadata:
name: eks-admin
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: cluster-admin
subjects:
- kind: ServiceAccount
name: eks-admin
namespace: kube-system
EOF
```
* Get login token
kubectl -n kube-system describe secret $(kubectl -n kube-system get secret | grep eks-admin | awk '{print $1}')
* Start the kubectl proxy
kubectl proxy
* Access dashboard via browser
http://localhost:8001/api/v1/namespaces/kubernetes-dashboard/services/https:kubernetes-dashboard:/proxy/#!/login.
* Use token from above to login
* Expose the dashboard to public
```
cat<<EOF | kubectl apply -f -
apiVersion: v1
kind: Service
metadata:
annotations:
kubectl.kubernetes.io/last-applied-configuration: |
{"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"creationTimestamp":"2020-04-12T12:34:10Z","labels":{"k8s-app":"kubernetes-dashboard"},"name":"kubernetes-dashboard","namespace":"kubernetes-dashboard","resourceVersion":"380715","selfLink":"/api/v1/namespaces/kubernetes-dashboard/services/kubernetes-dashboard","uid":"31489c5f-2dff-4b88-9a36-46b248bf9ce2"},"spec":{"externalTrafficPolicy":"Cluster","ports":[{"port":80,"protocol":"TCP","targetPort":8443}],"selector":{"k8s-app":"kubernetes-dashboard"},"sessionAffinity":"None","type":"LoadBalancer"},"status":{"loadBalancer":{}}}
creationTimestamp: "2020-04-12T13:23:52Z"
labels:
k8s-app: kubernetes-dashboard
name: kubernetes-dashboard
namespace: kubernetes-dashboard
resourceVersion: "402339"
selfLink: /api/v1/namespaces/kubernetes-dashboard/services/kubernetes-dashboard
uid: d29bab71-d159-4c33-9ba1-00f05138ecb6
spec:
externalTrafficPolicy: Cluster
ports:
- nodePort: 30556
port: 443
protocol: TCP
targetPort: 8443
selector:
k8s-app: kubernetes-dashboard
sessionAffinity: None
type: LoadBalancer
status:
loadBalancer:{}
```
<file_sep>+++
title = "Windows Terminal"
description = "Empower Windows with Terminal"
+++
### Windows Terminal
> The Windows Terminal is a modern, fast, efficient, powerful, and productive terminal application for users of command-line tools and shells like Command Prompt, PowerShell, and WSL. Its main features include multiple tabs, panes, Unicode and UTF-8 character support, a GPU accelerated text rendering engine, and custom themes, styles, and configurations.
#### Prerequisite
- PowerShell 7
- Install latest PowerShell (version 7.x) from Windows Store
- Scoop
- [Scoop](https://scoop.sh) can be found here or simply execute the command below
```powershell
Invoke-Expression (New-Object System.Net.WebClient).DownloadString('https://get.scoop.sh')
# or shorter
iwr -useb get.scoop.sh | iex
```
#### Install Windows Terminal
This is an open source project and we welcome community participation. To participate please visit https://github.com/microsoft/terminal
#### Install Nerd font
- [Nerd Font](https://www.nerdfonts.com/font-downloads)
- Download Meslo Nerd Font
- Install font file
#### Update Setting
- Settings > Startups
- Profile -> PowerShell ( Default is Windows PowerShell )
- Launch mode -> Maximised
- Update Setting file
- Hide the proflle Windows PowerShell & move the PowerShell to the top
```json
"list": [
{
"guid": "{574e775e-4f2a-5b96-ac1e-a2962a402336}",
"hidden": false,
"name": "PowerShell",
"source": "Windows.Terminal.PowershellCore"
},
...
{
"commandline": "powershell.exe",
"font":
{
"face": "MesloLGM NF"
},
"guid": "{61c54bbd-c2c6-5271-96e7-009a87ff44bf}",
"hidden": true,
"name": "Windows PowerShell"
}
```
- Create a customzed color scheme
```json
{
"background": "#001D26",
"black": "#282C34",
"blue": "#61AFEF",
"brightBlack": "#9aabc5",
"brightBlue": "#61AFEF",
"brightCyan": "#56B6C2",
"brightGreen": "#98C379",
"brightPurple": "#C678DD",
"brightRed": "#E06C75",
"brightWhite": "#DCDFE4",
"brightYellow": "#E5C07B",
"cursorColor": "#FFFFFF",
"cyan": "#56B6C2",
"foreground": "#DCDFE4",
"green": "#98C379",
"name": "One Half Dark (mod)",
"purple": "#C678DD",
"red": "#E06C75",
"selectionBackground": "#FFFFFF",
"white": "#DCDFE4",
"yellow": "#E5C07B"
}
```
- Settings > Default > Appearence
- Color scheme : One Half Dark (mod)
- Font face: MesloLGM NF
- Acrylic opacity: 60%
#### Git
```
winget install -e --id Git.Git
```
#### Other tools
```
scoop install curl sudo jq yq
scoop install neovim gcc
scoop install 7zip bat
```
#### Setup User Profile
- Create user profile folder
```cmd
mkdir ~\.config\powershell
```
- Create a launch script
```powershell
type nul > $env:USERPROFILE\.config\powershell\profile.ps1
```
- Add alias to `$env:USERPROFILE\.config\powershell\profile.ps1`
```powershell
# Alias
Set-Alias vi nvim
Set-Alias ll ls
Set-Alias g git
Set-Alias grep findstr
Set-Alias tig $env:USERPROFILE\app\git\usr\bin\tig.exe
Set-Alias less $env:USERPROFILE\app\git\usr\bin\less.exe
```
- Update built-in profile
```powershell
# Get all profiles
$PROFILE | Get-Member -Type NoteProperty
# If the file of $PROFILE.CurrentUserCurrentHost does not exist
type nul > $PROFILE.CurrentUserCurrentHost
```
- Copy content below to `$PROFILE.CurrentUserCurrentHost`
```
. $env:USERPROFILE\.config\powershell\profile.ps1
```
#### Oh My Posh
- Installation
[Please follow latest instruction](https://ohmyposh.dev/docs/migrating)
- Create customized theme file `sample.omp.json`
```
{
"$schema": "https://raw.githubusercontent.com/JanDeDobbeleer/oh-my-posh/main/themes/schema.json",
"final_space": false,
"osc99": true,
"blocks": [
{
"type": "prompt",
"alignment": "left",
"segments": [
{
"type": "shell",
"style": "diamond",
"leading_diamond": "╭─",
"trailing_diamond": "",
"foreground": "#ffffff",
"background": "#0077c2",
"properties": {
}
},
{
"type": "root",
"style": "diamond",
"leading_diamond": "",
"trailing_diamond": "",
"foreground": "#FFFB38",
"background": "#ef5350",
"properties": {
"root_icon": "\uf292",
"prefix": "<parentBackground>\uE0B0</> "
}
},
{
"type": "path",
"style": "powerline",
"powerline_symbol": "\uE0B0",
"foreground": "#E4E4E4",
"background": "#444444",
"properties": {
"style": "full",
"enable_hyperlink": true
}
},
{
"type": "git",
"style": "powerline",
"powerline_symbol": "\uE0B0",
"foreground": "#011627",
"background": "#FFFB38",
"background_templates": [
"{{ if or (.Working.Changed) (.Staging.Changed) }}#ffeb95{{ end }}",
"{{ if and (gt .Ahead 0) (gt .Behind 0) }}#c5e478{{ end }}",
"{{ if gt .Ahead 0 }}#C792EA{{ end }}",
"{{ if gt .Behind 0 }}#C792EA{{ end }}"
],
"properties": {
"branch_icon": "\ue725 ",
"fetch_status": true,
"fetch_upstream_icon": true,
"template": "{{ .HEAD }} {{ if .Working.Changed }}{{ .Working.String }}{{ end }}{{ if and (.Working.Changed) (.Staging.Changed) }} |{{ end }}{{ if .Staging.Changed }}<#ef5350> \uF046 {{ .Staging.String }}</>{{ end }}"
}
}
]
},
{
"type": "prompt",
"alignment": "right",
"segments": [
{
"type": "node",
"style": "diamond",
"leading_diamond": " \uE0B6",
"trailing_diamond": "\uE0B4",
"foreground": "#3C873A",
"background": "#303030",
"properties": {
"prefix": "\uE718 ",
"postfix": "",
"display_package_manager": true,
"yarn_icon": " <#348cba></>",
"npm_icon": " <#cc3a3a></> "
}
},
{
"type": "time",
"style": "diamond",
"invert_powerline": true,
"leading_diamond": " \uE0B6",
"trailing_diamond": "\uE0B4",
"background": "#40c4ff",
"foreground": "#ffffff",
"properties": {
"prefix": " \uf5ef ",
"postfix": " "
}
}
]
},
{
"type": "prompt",
"alignment": "left",
"newline": true,
"segments": [
{
"type": "text",
"style": "plain",
"foreground": "#21c7c7",
"properties": {
"prefix": "",
"postfix": "",
"text": "╰─"
}
},
{
"type": "exit",
"style": "plain",
"foreground": "#e0f8ff",
"properties": {
"prefix": "\u276F",
"display_exit_code": false,
"always_enabled": true,
"error_color": "#ef5350"
}
}
]
}
]
}
```
- Update user profile
- Add content to `$env:USERPROFILE\.config\powershell\profile.ps1`
```powershell
# Prompt
Import-Module posh-git
$omp_config = "$env:USERPROFILE\sample.omp.json"
oh-my-posh --init --shell pwsh --config $omp_config | Invoke-Expression
```
#### Posh-Git
```
Install-Module posh-git -Repository PSGallery -Force
```
#### Terminal Icons
- Installation
```powershell
Install-Module -Name Terminal-Icons -Repository PSGallery -Force
```
- Update user profile
- Add content to `$env:USERPROFILE\.config\powershell\profile.ps1`
```powershell
Import-Module -Name Terminal-Icons
```
#### PSReadLine
- Installation
```powershell
Install-Module -Name PSReadLine -AllowPrerelease -Scope CurrentUser -Force -SkipPublisherCheck
```
- Update user profile
- Add content to `$env:USERPROFILE\.config\powershell\profile.ps1`
```powershell
# PSReadLine
Set-PSReadLineOption -EditMode Emacs
Set-PSReadLineOption -BellStyle None
Set-PSReadLineKeyHandler -Chord 'Ctrl+d' -Function DeleteChar
Set-PSReadLineOption -PredictionSource History
```
#### FZF
- Installation
```powershell
scoop install fzf
Install-Module -Name PSFzf -Scope CurrentUser -Force
```
- Update user profile
- Add content to `$env:USERPROFILE\.config\powershell\profile.ps1`
```powershell
# Fzf
Import-Module PSFzf
Set-PsFzfOption -PSReadlineChordProvider 'Ctrl+f' -PSReadlineChordReverseHistory 'Ctrl+r'
```
#### Z
```
Install-Module -Name z --Respository SGallery -Force
```
#### Exmple of launch script
```powershell
# Alias
Set-Alias vi nvim
Set-Alias ll ls
Set-Alias g git
Set-Alias grep findstr
Set-Alias tig $env:USERPROFILE\app\git\usr\bin\tig.exe
Set-Alias less $env:USERPROFILE\app\git\usr\bin\less.exe
# Prompt
Import-Module posh-git
$omp_config = "$env:USERPROFILE\sample.omp.json"
oh-my-posh --init --shell pwsh --config $omp_config | Invoke-Expression
# Terminal Icons
Import-Module -Name Terminal-Icons
# PSReadLine
Set-PSReadLineOption -EditMode Emacs
Set-PSReadLineOption -BellStyle None
Set-PSReadLineKeyHandler -Chord 'Ctrl+d' -Function DeleteChar
Set-PSReadLineOption -PredictionSource History
# Fzf
Import-Module PSFzf
Set-PsFzfOption -PSReadlineChordProvider 'Ctrl+f' -PSReadlineChordReverseHistory 'Ctrl+r'
# Customized functions
# Add your functions here
```<file_sep>+++
title = "Scheduled task with window service"
description="How to build a configurable scheduled task on Window Service"
+++
## Problem
> *Set up the some scheduled tasks running in the backgroud to take care of data update or sync for every 15 mins, or everyday or every week*
## Solution
### Option 1
## Windows Task Scheduler
* Click the Start button.
* Click Control Panel.
* Click System and Maintenance.
* Click Administrative Tools.
* Double-click Task Scheduler.
### Option 2
## Use Window Service as task scheduler
### Overview of design
**The design here is a simplified version, which I built for previous projects. In the real world, I need to tailor it for different project with different purpose, but fundamental design of architect is the same. IMO, the design below can support most cases, which needs scheduled backgroud task service.**
```ini
+----------------------------------+ Register as
| System.ServiceProces ----- |--------o Window Service
+----------------------------------+
| +----------------------------+ |
| | Thread (Infinite) | |
| +----------------------------+ |
| | +----------------------+ | |
| | | MySchedulerService | | |
| | +----------------------+ | |
| | | while ( true ) | | |
| | | { | | |
| | | Task.Process() | | |
| | | } | | |
| | +----------------------+ | |
| +----------------------------+ |
+----------------------------------+
+------------+
| ITask |
+------------+
| Process() |
+------------+
/|\
|
+------------------+
| BaseTask | -----------o Customized Task inherit BaseTask
+------------------+
| lastProcessTime | ------------o Last process time
| intervalTime | ------------o Customize for next process time
| IsReadyProcess() | ------------o Check taks is ready to process
+------------------+
```
### Create customized Window Service
### Use ServiceProcess to create Window Service
* The System.ServiceProcess allow you to implement, install, and control Windows service applications. Services are long-running executables that run without a user interface.
* The project must have main method the entry point
```cs
namespace MyScheduler
{
using System.ServiceProcess;
class Program
{
static void Main(string[] args)
{
#if DEBUG
MySchedulerService debugService = new MySchedulerService();
debugService.onDebug();
System.Threading.Thread.Sleep(System.Threading.Timeout.Infinite);
#else
ServiceBase.Run(new ServiceBase[] { new MySchedulerService() });
#endif
}
}
}
```
### Create customized ServiceBase
* Implementing a service involves inheriting from the ServiceBase class and defining specific behavior to process when start, stop, pause, and continue commands are passed in, as well as custom behavior and actions to take when the system shuts down.
```cs
namespace MyScheduler
{
using System;
using System.IO;
using System.Collections.Generic;
using System.Threading;
public partial class MySchedulerService : System.ServiceProcess.ServiceBase
{
public const string START_SERVICE = "start.service";
public const string STOP_SERVICE = "stop.service";
public void onDebug()
{
OnStart(null);
}
protected override void OnStart(string[] args)
{
System.IO.File.Create(AppDomain.CurrentDomain.BaseDirectory
+ TART_SERVICE);
ThreadStart tsTask = new ThreadStart(TaskLoop);
Thread rtkTask = new Thread(tsTask);
rtkTask.Start();
}
static void TaskLoop()
{
while (true)
{
// Exceute scheduled task
ScheduledTask();
// Then, wait for certain time interval
System.Threading.Thread.Sleep(TimeSpan.FromMilliseconds(500));
}
}
static void ScheduledTask()
{
// Task code which is executed periodically
try
{
// Call customized tasks
var types = Assembly.GetExecutingAssembly()
.GetExportedTypes()
.Where(p => typeof(ITask)
.IsAssignableFrom(p.BaseType));
foreach (var t in types)
{
var task = (ITask)Activator.CreateInstance(t);
if (taskSettings.Keys.Contains(t.Name))
{
task.TaskSetting = taskSettings[t.Name];
}
tasks.Add(task);
}
}
catch (Exception e)
{
// TO Something here
}
}
protected override void OnStop()
{
// Insert code here to close all the open IO or conection.
System.IO.File.Create(AppDomain.CurrentDomain.BaseDirectory
+ STOP_SERVICE);
}
private void InitializeComponent()
{
this.ServiceName = "MySchedulerService";
}
protected override void Dispose(bool disposing)
{
OnStop();
base.Dispose(disposing);
}
}
}
```
### Task interface and class
* interface ITask has only one method
```cs
namespace MyScheduler
{
public interface ITask
{
void Process();
}
}
```
* BaseTask
```cs
namespace MyScheduler
{
using System;
public class BaseTask : ITask
{
protected DateTime? lastProcessTime = null;
// interval time uses second as unit
// e.g. 1 min of intervaling time is 60
protected int intervalTime = 0 ;
protected bool IsReadyToProcess()
{
bool isReadyToProcess = true;
if (lastProcessTime.HasValue)
{
if (lastProcessTime.Value.AddSeconds(intervalTime) > DateTime.Now)
{
isReadyToProcess = false;
}
}
return isReadyToProcess;
}
public virtual void Process()
{
throw new NotImplementedException();
}
}
}
```
* Sample Task1 and Task2
**Task1**
```cs
namespace MyScheduler
{
using System;
using System.Linq;
using System.Data.Entity;
public class Task1 : BaseTask
{
public override void Process()
{
if (base.IsReadyToProcess())
{
System.IO.File.Create(AppDomain.CurrentDomain.BaseDirectory
+ "Task-1-" + DateTime.Now.ToString("dd-MM-yyyy"));
}
}
}
}
```
**Task2**
```cs
namespace MyScheduler
{
using System;
using System.Linq;
using System.Data.Entity;
public class Task2 : BaseTask
{
public override void Process()
{
if (base.IsReadyToProcess())
{
System.IO.File.Create(AppDomain.CurrentDomain.BaseDirectory
+ "Task-2-"
+ DateTime.Now.ToString("dd-MM-yyyy"));
}
}
}
}
```
## Create Window Service installer
* In Solution Explorer, double-click MyScheduledService.cs.
* In the Code Editor window, right-click `Design View`, and then click `Properties`
* In the `Properties` pane, click the `Add Installer` link.
* In the Properties pane for MyScheduledServiceInstaller, change the `ServiceNameproperty` to `MyScheduledService`.
* In the Code Editor window in `Design view`, click `MyScheduledServiceProcessInstaller`.
* In the Properties pane, change the `Account` property to `LocalSystem` (The `LocalService` and `NetworkService` values are available only in Microsoft Windows XP).
**PreInstaller**
* It inherits from System.Configuration.Install.Installer. This is the base class for all custom installers in the .NET Framework. Installers are components that help install applications on a computer.
```cs
namespace MyScheduler
{
[RunInstaller(true)]
public partial class ProjectInstaller : System.Configuration.Install.Installer
{
public ProjectInstaller()
{
InitializeComponent();
}
private void MySchedulerServiceInstaller_AfterInstall(
object sender, InstallEventArgs e)
{
}
}
}
```
## Install Window Service
```bash
C:\Windows\Microsoft.Net\Framework\v4.0.30319\InstallUtil.exe /i <app_path>\MyScheduler.exe
```
## Uninstall Window Service
```bash
C:\Windows\Microsoft.Net\Framework\v4.0.30319\InstallUtil.exe /u <app_path>\MyScheduler.exe
```<file_sep>+++
title = "Cloud"
weight = 3
+++
{{%children style="card" description="true" %}}
<file_sep>#!/bin/bash
VPC_ID="vpc-04c97179894199703"
AWS_REGION="ap-southeast-2"
# Create a NACL for private subnet
NACL_PRVSUB_ID=$(aws ec2 create-network-acl \
--vpc-id $VPC_ID \
--query 'NetworkAcl.{NetworkAclId:NetworkAclId}' \
--output text \
--region $AWS_REGION)
echo $NACL_PRVSUB_ID
# Create NACL rule
aws ec2 create-network-acl-entry \
--network-acl-id $NACL_PRVSUB_ID \
--ingress --rule-number 100 \
--protocol udp --port-range \
From=53,To=53 --cidr-block 0.0.0.0/0 \
--rule-action allow
# Describe NACL
aws ec2 describe-network-acl \
--network-acl-id $NACL_PRVSUB_ID \
--output text
<file_sep>+++
title = "AWS: S3 - 2"
description = "S3 Part 2 - Access & Management"
weight=7
+++
## S3 Part 2
### Access
#### Access status
The list buckets view shows whether your bucket is publicly accessible. Amazon S3 labels the permissions for a bucket as follows:
* Public – Everyone has access to one or more of the following: List objects, Write objects, Read and write permissions.
* Objects can be public – The bucket is not public, but anyone with the appropriate permissions can grant public access to objects.
* Buckets and objects not public – The bucket and objects do not have any public access.
* Only authorized users of this account – Access is isolated to IAM users and roles in this account and AWS service principals because there is a policy that grants public access.
### Management
Bucket and object permissions are independent of each other. An object does not inherit the permissions from its bucket. For example, if you create a bucket and grant write access to a user, you can't access that user’s objects unless the user explicitly grants you access.
Each permission you grant for a user or a group adds an entry in the ACL that is associated with the object. The ACL lists grants, which identify the grantee and the permission granted. ACLs are resource-based access policies that grant access permissions to buckets and objects.
### Block public access settings
S3 Block Public Access provides four settings. You can apply these settings in any combination to individual access points, buckets, or entire AWS accounts. If you apply a setting to an account, it applies to all buckets and access points that are owned by that account. Similarly, if you apply a setting to a bucket, it applies to all access points associated with that bucket.
#### ACLs
Amazon S3 considers a bucket or object ACL public if it grants any permissions to members of the predefined AllUsers or AuthenticatedUsers groups.
#### Policies
When evaluating a bucket policy, Amazon S3 begins by assuming that the policy is public. It then evaluates the policy to determine whether it qualifies as non-public. To be considered non-public, a bucket policy must grant access only to fixed values.
#### Access points
Amazon S3 evaluates block public access settings slightly differently for access points compared to buckets. The rules that Amazon S3 applies to determine when an access point policy is public are generally the same for access points as for buckets, except in the following situations:
* An access point that has a VPC network origin is always considered non-public, regardless of the contents of its access point policy.
* An access point policy that grants access to a set of access points using s3:DataAccessPointArn is considered public.
#### Permissions
| Operation | Required permissions
|:----|---
| GET bucket policy status | s3:GetBucketPolicyStatus
| GET bucket Block Public Access settings | s3:GetBucketPublicAccessBlock
| PUT bucket Block Public Access settings | s3:PutBucketPublicAccessBlock
| DELETE bucket Block Public Access settings | s3:PutBucketPublicAccessBlock
| GET account Block Public Access settings | s3:GetAccountPublicAccessBlock
| PUT account Block Public Access settings | s3:PutAccountPublicAccessBlock
| DELETE account Block Public Access settings | s3:PutAccountPublicAccessBlock
| PUT access point Block Public Access settings | s3:PutAccessPointPublicAccessBlock
<file_sep>+++
date = "2011-03-09T10:59:31+11:00"
title = "CMMI 2.0"
description="CMMI Version 2.0"
weight=1
+++
## CMMI 2.0
### Guilding Principles
* Focus of delivering business value
* Focus on business performance and results
* Flexible, scalable architecture
* Reduce redundancy
* Use of plain language
* Increased use of graphics and iconography for understanding and adoption
### Key Improvements
* Demonstrate the value and ROI of aclopting CMMI
Performance capabilities built-in at every level of the model will help organizations to:
* Understand performance needs
* Track, measure and achieve
* Establish performance goals those goals
* Improve the overall value for CMMI appraisals and lower time, effort and cost of the appraisal process
* New appraisal method intended to improve confidence and reliability of results lower total life cycle costs of appraisals by decreasing the appraisal preparation fo the appraised organization.
* Organizations can extend the validity of benchmark appraisals through the lighter-weight Sustainment appraisal.
* Keep CMMI current and up-to-date with latest trend methodologies used in the market
* Scalable architecture platform to include additional method guidance, such as built-in agile with Scrum guidance.
* Ability to add new content additions, such as Safety and Security, address critical business needs
* Make CMMI easier to use and more user friendly
* Non-technical language makes it easier for users to read and understand the model.
* Online platform allows users to tailor the model to fit specific organizational needs.
* Tools provide guidance for the successful adoption of CMMI & transition to
* Model, training, and usage guidance will be translated into several languages.
### Product Suite
#### Model
Clear pathway to performance improvement. Simplified for accelerated adoption.
#### Appraisal Method
A new appraisal method helps to increase reliability while reducing overall cost.
#### Training & Certification
Updated training has modular components with virtual and in-person options. The training is more learning objective oriented.
#### Systems & Tools
Redesigned system to access online models and resources.
#### Adoption Guidance
Guidance to help new adopters and users get started with CMMI V2.0. Provides a smooth transition from CMMI V1.3 to V2.0
### Transformation
CMMI V2.0 transforms to a business performance model
The addition of the intent and value statements
* The Intent statement provides insight into why the practice area is important to take into consideration in your process
* The Value statement explains the business impact achieved when the intent of the practice area and individual practices are met
### Appraisal Changes
* Simplified presentation of information
* Types of appraisals
* Benchmark
* Sustainment
* Evaluation
* Action Plan Reappraisal
* Model Views
* Benchmark
* Custom
* Using Building and Sustaining Capability practice areas (GOV, II)
* Appraisal Team Members
* ATM Qualification: CMMI Associate
* High Maturity: Complete High Maturity Concepts
* Random Sampling
* Data adequacy and sufficiency
* Characterization
* Rating Process
* Performance Report
* Validity period for appraisals
### Capability Areas
#### DOING
Doing includes Capability Areas for producing, buying, and delivering quality solutions.
Ensuring Quality (ENQ) – Helps to improve product and service quality.
Requirements Development and Management enables developing and keeping updated a common understanding of needs and expectations for the solution.
* Intent: Elicit requirements, ensure common understanding by stakeholders, and align requirements, plans, and work products.
* Value: Ensures that customer’s needs and expectations are satisfied
Process Quality Assurance ensures the process is followed and quality solutions are produced
* Intent: Identify causes of selected outcomes and take action to either prevent recurrence of undesirable outcomes or ensure recurrence of positive outcomes.
* Value: Addressing root cause issues eliminates rework and directly improves quality and productivity.
Verification and Validation ensures requirements are met and the solution functions as intended in the target environment.
Intent: Verification and validation includes activities that confirm selected solutions and components meet their requirements and validate selected solutions and components fulfill their intended use in their target environment
Value: Verification and validation of selected solutions and components throughout the project increases the likelihood that the solution will satisfy the customer.
Peer Reviews identify solution defects or issues.
* Intent: Identify and address work product issues through reviews by the producer’s peers or Subject Matter Experts (SMEs).
* Value: Reduce cost and rework by uncovering issues or defects early.
Engineering the Developing Products (EDP) (Development View)
Technical Solution focuses on designing and building products and product components.
Intent: Design and build solutions that meet customer requirements.
Value: Provides a cost-effective design and solution that meets customer requirements and reduces rework.
Product Integration covers the assembly of the products and product components and their delivery to the customer and ensures inclusion of required functionality and quality characteristics.
* Intent: Integrate and deliver the solution that addresses functionality and quality requirements.
* Value: Increases customers’ satisfaction by giving them a solution that meets or exceeds their functionality and quality requirements.
Delivering and Managing Services (Services View)
Service Delivery Management includes delivering services in accordance with the established service level agreements.
* Intent: Deliver services and manage the service delivery system.
* Value: Increase customer satisfaction by delivering services that meet or exceed customer expectations.
Strategic Service Management includes developing and keeping a portfolio of updated standard services that are compatible with strategic needs and plans.
* Intent: Develop and deploy standard services that are compatible with strategic business needs and plans.
* Value: Increases likelihood of meeting business objectives by aligning standard services with customer needs.
Supplier Agreement Management involves: developing and keeping updated the supplier agreement and ensuring that the supplier and the buyer perform according to the terms of the supplier agreement
* Intent: Establish an agreement with selected suppliers, ensure that the supplier and the acquirer perform according to the terms over the course of the agreement, and evaluate the supplier’s deliverables.
* Value: Provides an explicit understanding between the acquirer and supplier to maximize the success of agreed-on efforts to deliver a supplier deliverable.
#### MANAGING
Managing includes capability areas for planning and managing work and the workforce.
Planning and Managing Work – involves determining the amount of work that needs to be done, planning and scheduling the work, and then ensuring the work is being done in accordance with the plans and schedules. It also ensures that resources are adequate to meet the plan and schedule.
Estimating includes forecasting the size, effort, and cost for the work required to develop, acquire, or deliver the solution
* Intent: Estimate the size, effort, duration, and cost of the work and resources needed to develop, acquire, or deliver the solution.
* Value: Estimation provides a basis for making commitments, planning, and reducing uncertainty, which allows for early corrective actions and increases the likelihood of meeting objectives.
Planning involves: Using the estimates to develop a work plan, schedule, and budget; Determining the necessary resources to accomplish the plan, within schedule and budget; Obtaining commitment to the work plan from stakeholders
* Intent: Develop plans to describe what is needed to accomplish the work within the standards and constraints of the organization, including the: Budget; Schedule; Resource demand, capacity and availability; Quality; Functionality requirements; Risks and opportunities
Plans also describe: The work to be performed; Applicable organizational set of standard processes, assets, and tailoring guidelines; Dependencies; Who performs the work; Relationships with other plans; Stakeholders and their role
* Value: Optimizes cost, functionality, and quality to increase the likelihood of meeting objectives.
Monitor and Control provides an understanding of progress so appropriate corrective actions can be taken when performance deviates significantly from the plan, schedule, and budget.
* Intent: Provide an understanding of the project progress so appropriate corrective actions can be taken when performance deviates significantly from plans.
* Value: Increases the probability of meeting objectives by taking early actions to adjust for significant performance deviations.
Managing Business Resilience addresses the ability to anticipate, prepare for, and respond to interruptions in order to continue operations. It involves identifying, evaluating, prioritizing and handling risks. It ensures timely and effective resolution and prevention of interruptions to minimize the impact on business operations and ensures the best possible level of service quality. It addresses defining a minimum set of critical functions that must continue in the event of significant interruption of normal operations.
Risk and Opportunity Management includes: Identifying threats and opportunities; Evaluating their likelihood of occurrence and impact; Mitigating potential threats; Leveraging potential opportunities
* Intent: Identify, record, analyze, and manage potential risks or opportunities.
* Value: Mitigate adverse impacts or capitalize on positive impacts to increase the likelihood of meeting objectives.
Incident Resolution and Prevention includes: Identifying actual and potential incidents that may impact delivery; Establishing the approach for addressing incidents as they occur; Analyzing incidents to prevent recurrence
* Intent: Resolve and prevent disruptions promptly to sustain service delivery levels.
* Value: Minimize the impact of disruptions to meet objectives and customer commitments more effectively.
Managing the Workforce addresses the way an organization develops and retains the human resources needed to perform current and future work.
Organizational Training provides a strategy and capability for training to support the organization’s strategic business objectives, meet common tactical needs, and deliver training across the organization.
* Intent: Develop the skills and knowledge of personnel so they perform their roles efficiently and effectively.
* Value: Enhances individuals’ skills and knowledge to improve organizational work performance.
Practice Summary
#### ENABLING
Enabling focuses on analyzing causes, making decisions, maintaining integrity of work products, and communicating to stakeholders.
Supporting Implementation involves identifying and addressing the causes of selected outcomes, creating a decision-making approach and structure, maintaining the integrity of work products, and fostering communication and coordination among stakeholders.
Causal Analysis and Resolution identifies causes of selected outcomes and acts to either prevent reoccurrence of undesirable outcomes or ensure reoccurrence of positive outcomes.
* Intent: Identify causes of selected outcomes and take action to either prevent recurrence of undesirable outcomes or ensure recurrence of positive outcomes
* Value: Addressing root cause issues eliminates rework and directly improves quality and productivity.
Decision Analysis and Resolution aids in making decisions using criteria-based evaluation of alternatives and recording the results.
* Intent: Make and record decisions using a recorded process that analyzes alternatives
* Value: Increases the objectivity of decision making and the probability of selecting the optimal solution.
Configuration Management establishes and maintains the integrity of work products using configuration identification, control, and audits.
* Intent: Manage the integrity of work products using configuration identification, version control, change control, and audits.
* Value: Reduces loss of work and increases the ability to deliver the correct version of the solution to the customer.
#### IMPROVING
Improving involves developing, managing, and improving processes and their related assets with a primary focus on improving organizational performance.
Sustaining Habit and Persistence ensures that processes are persistent and habitually performed and sustained throughout the organization and effectively contribute to meeting business performance objectives.
Governance provides guidance to senior management on their role in ensuring that work is performed in a way that is relevant and important to the business and organization.
* Intent: Provides guidance to senior management on their role in the sponsorship and governance of process activities.
* Value: Minimizes the cost of process implementation, increases the likelihood of meeting objectives, and ensures that the implemented processes support and contribute to the success of the business.
Implementation Infrastructure provides a framework that ensures the processes of an organization are persistently used and improved.
* Intent: Ensure that the processes important to an organization are persistently and habitually used and improved.
* Value: Sustains the ability to consistently achieve goals and objectives efficiently and effectively.
Improving Performance focuses on measuring, analyzing and understanding an organization’s or project’s capability and performance along with their process improvement priorities and infrastructure needs. Once this is understood, the organization or project can identify performance and process improvement actions and assets needed to continually improve capability and performance.
Process Management develops capabilities and improves performance though planning, implementing, and deploying improvements based on a thorough understanding of the current strengths and weaknesses of the organization’s processes and process assets.
* Intent: Manages and implements the continuous improvement of processes and infrastructure to: Support accomplishing business objectives; Identify and implement the most beneficial process improvements; Make the results of process improvement visible, accessible, and sustainable
* Value: Ensures that processes, infrastructure, and their improvement contribute to successfully meeting business objectives.
Process Asset Development develops and keeps updated a usable set of organizational processes and process assets for performing the work.
* Intent: Develop and keep updated the process assets necessary to perform the work.
* Value: Provides a capability to understand and repeat successful performance
Managing Performance and Measurement involves:
Ensuring that benefits and business objectives are the leading factors driving performance and improvement
Changing the paradigm: From process improvement leads to performance improvement; Performance needs are the primary drivers of process improvements; Using the results of measurement and analysis to manage and control performance at various work and business levels
* Intent: Manage performance using measurement and analysis to achieve business objectives.
* Value: Maximizes business return on investment by focusing management and improvement efforts on cost, schedule, and quality performance.
Selecting and Managing Suppliers (Supplier Management View) establishes the buyer and supplier partnership to ensure that quality solutions are delivered to the customer and end user
Supplier Source Selection involves: Selecting one or more suppliers to deliver the solution; Preparing a solicitation package; Evaluating the supplier’s solution and managing selected connections of that solution
* Intent: Develop and keep updated a package of materials used to seek proposals from potential suppliers and select one or more suppliers to deliver the solution.
* Value: Improves the ability to select the most qualified suppliers to deliver solutions.
<file_sep>+++
title = "C# - .Net Core 6"
description = ".Net Core 6 - Part 1"
draft=true
+++
## .Net Core 6
<file_sep>+++
title = "AWS : CLI - 1"
description = "AWS CLI & Sample"
weight=2
+++
### AWS CLI
Amazon Virtual Private Cloud (Amazon VPC) enables you to launch AWS resources into a virtual network that you've defined. This virtual network closely resembles a traditional network that you'd operate in your own data center, with the benefits of using the scalable infrastructure of AWS.
### CLI installation (Version 1)
#### Install Python 3
* Python 2.7 is no longer supported. Please install python3
sudo yum install python3
pip3 --version
*
### Profile Setup
A named profile is a collection of settings and credentials that you can apply to a AWS CLI command. When you specify a profile to run a command, the settings and credentials are used to run that command.
* `~/.aws/credentials`
[default]
aws_access_key_id=your_aws_access_key_id
aws_secret_access_key=your_aws_secret_access_key
* `~/.aws/config`
[default]
region=us-west-2
output=json
* The AWS CLI supports using any of multiple named profiles that are stored in the config and credentials files.
* `~/.aws/credentials` (Linux / Mac)
[default]
aws_access_key_id=your_aws_access_key_id
aws_secret_access_key=your_aws_secret_access_key
[user1]
aws_access_key_id=your_aws_access_key_id
aws_secret_access_key=your_aws_secret_access_key
* `~/.aws/config` (Linux & Mac)
[default]
region=us-west-2
output=json
[profile user1]
region=us-east-1
output=text
### Samples
#### EC2
* Get EC2 instances
aws ec2 describe-instances \
--filters "Name=instance-type,Values=t2.micro" \
--query "Reservations[].Instances[].InstanceId" \
--profile user1
* Stop EC2 instance
aws ec2 stop-instances --instance-ids i-1234567890abcdef0
* Find out all stopped micro EC2 instancs
instances=($(aws ec2 describe-instances \
--filters Name=instance-type,Values=t2.micro \
Name=instance-state-name,Values=stopped \
--query "Reservations[].Instances[].InstanceId" \
--profile ad1 --output text ))
for i in $instances; do echo "$i" ; done;
* Find out all unused security group
comm -23 \
<(aws ec2 describe-security-groups \
--profile user1 --query 'SecurityGroups[*].GroupId' \
--output text \
| tr '\t' '\n'| sort) \
<(aws ec2 describe-instances \
--profile user1 --output text \
--query 'Reservations[*].Instances[*].SecurityGroups[*].GroupId' \
| tr '\t' '\n' | sort | uniq)
* Find security group by group name
aws ec2 describe-security-groups \
--filters Name=group-name,Values=AD1_Web_Pub_SG \
--query="SecurityGroups[*].{ID:GroupId,Tags:Tags[*]}" \
--profile user1
* Search security group by tags
aws ec2 describe-security-groups \
--query="SecurityGroups[*].{GroupName:GroupName,ID:GroupId,Tags:Tags[*]}" \
--profile user1 | grep "your_group_name"
* Search EC2 by specific tag value
aws ec2 describe-instances \
--filters "Name=tag-value,Values=AD1_DEV_Web_Pub_Host_A" \
--profile user1
#### S3
* List Buckets
aws s3 ls --profile user1
* List content within the bucket
aws s3 ls --profile user1 s3://your_bucket_name --recursive
* Sync everything to curerent directory
aws s3 sync --profile user1 s3://your_bucket_name .
* Copy local file to S3 bucket
aws s3 cp --profile user1 file_name s3://your_bucket_name/file_name
<file_sep>+++
title = "F# Collections 3"
description = "F# Seq, Slice & Map"
weight = 7
+++
## Seq
A sequence is a logical series of elements all of one type. Sequences are particularly useful when you have a large, ordered collection of data but do not necessarily expect to use all of the elements. Individual sequence elements are computed only as required, so a sequence can provide better performance than a list in situations in which not all the elements are used.
```fsharp
// Sequence that has an increment.
seq { 0 .. 10 .. 100 }
// uses the -> operator, which allows you to specify an expression
// whose value will become a part of the sequence.
seq { for i in 1 .. 10 -> i * i }
// specify the do keyword, with an optional yield that follows:
seq { for i in 1 .. 10 do yield i * i }
// The 'yield' is implicit and doesn't need to be specified in most cases.
seq { for i in 1 .. 10 do i * i }
// The following code generates a list of coordinate pairs along with
// an index into an array that represents the grid.
// Note that the first for expression requires a do to be specified.
let (height, width) = (10, 10)
let sequence = seq {
for row in 0 .. width - 1 do
for col in 0 .. height - 1 ->
(row, col, row*width + col)
}
for e in sequence do
printfn "%A" e
// (0, 0, 0)
// (0, 1, 1)
// (0, 2, 2)
// ...
// (0, 9, 9)
// (1, 0, 10)
// (1, 1, 11)
// (1, 2, 12)
// ...
// (9, 9, 99)
```
### yield! keyword
```fsharp
// Repeats '1 2 3 4 5' ten times
seq {
for _ in 1..10 do
yield! seq { 1; 2; 3; 4; 5}
}
// When yield! is used in an expression, all other single values
// must use the yield keyword:
// Combine repeated values with their values
seq {
for x in 1..10 do
yield x
yield! seq { for i in 1..x -> i}
}
// Prime number
// Recursive isprime function.
let isprime n =
let rec check i =
i > n/2 || (n % i <> 0 && check (i + 1))
check 2
let aSequence =
seq {
for n in 1..100 do
if isprime n then
n
}
for x in aSequence do
printfn "%d" x
// output
// 1 2 3 5 7 ...47 53 ... 89 97
// Following example creates a multiplication table that consists of
// tuples of three elements, each consisting of two factors and the product
let multiplicationTable =
seq {
for i in 1..9 do
for j in 1..9 ->
(i, j, i*j)
}
// Yield the values of a binary tree in a sequence.
type Tree<'a> =
| Tree of 'a * Tree<'a> * Tree<'a>
| Leaf of 'a
// inorder : Tree<'a> -> seq<'a>
let rec inorder tree =
seq {
match tree with
| Tree(x, left, right) ->
yield! inorder left
yield x
yield! inorder right
| Leaf x -> yield x
}
let mytree = Tree(6, Tree(2, Leaf(1), Leaf(3)), Leaf(9))
let seq1 = inorder mytree
printfn "%A" seq1
// outut
// seq [1; 2; 3; 6; ...]
```
### Functions
```fsharp
// Using Seq.empty, or you can create a sequence of just one
// specified element by using Seq.singleton.
let seqEmpty = Seq.empty
let seqOne = Seq.singleton 10
printfn "%A" seqOne
// output: seq [10]
// use Seq.init to create a sequence for which the elements are created
// by using a function that you provide. You also provide a size for
// the sequence. This function is just like List.init, except that the
// elements are not created until you iterate through the sequence.
let seqFirst5MultiplesOf10 = Seq.init 5 (fun n -> n * 10)
Seq.iter (fun elem -> printf "%d " elem) seqFirst5MultiplesOf10
// The output is
// 0 10 20 30 40
// Convert an array to a sequence by using a cast.
let seqFromArray1 = [| 1 .. 10 |] :> seq<int>
// Convert an array to a sequence by using Seq.ofArray.
let seqFromArray2 = [| 1 .. 10 |] |> Seq.ofArray
// using Seq.cast, you can create a sequence from a weakly typed
// collection, such as those defined in System.Collections.
open System
let arr = ResizeArray<int>(10)
for i in 1 .. 10 do
arr.Add(10)
let seqCast = Seq.cast arr
// Seq.unfold generates a sequence from a computation function that takes
// a state and transforms it to produce each subsequent element in the sequence
let seq1 =
0 // Initial state
|> Seq.unfold (fun state ->
if (state > 20) then
None
else
Some(state, state + 1))
printfn "The sequence seq1 contains numbers from 0 to 20."
for x in seq1 do
printf "%d " x
// output
// The sequence seq1 contains numbers from 0 to 20.
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
let fib =
(1, 1) // Initial state
|> Seq.unfold (fun state ->
if (snd state > 1000) then
None
else
Some(fst state + snd state, (snd state, fst state + snd state)))
printfn "\nThe sequence fib contains Fibonacci numbers."
for x in fib do printf "%d " x
// output
// The sequence fib contains Fibonacci numbers.
// 2 3 5 8 13 21 34 55 89 144 233 377 610 987 1597
```
##### Example
```fsharp
// generateInfiniteSequence generates sequences of floating point
// numbers. The sequences generated are computed from the fDenominator
// function, which has the type (int -> float) and computes the
// denominator of each term in the sequence from the index of that
// term. The isAlternating parameter is true if the sequence has
// alternating signs.
let generateInfiniteSequence fDenominator isAlternating =
if (isAlternating) then
Seq.initInfinite (fun index ->
1.0 /(fDenominator index) * (if (index % 2 = 0) then -1.0 else 1.0))
else
Seq.initInfinite (fun index -> 1.0 /(fDenominator index))
// The harmonic alternating series is like the harmonic series
// except that it has alternating signs.
let harmonicAlternatingSeries = generateInfiniteSequence (fun index -> float index) true
// This is the series of reciprocals of the odd numbers.
let oddNumberSeries = generateInfiniteSequence (fun index -> float (2 * index - 1)) true
// This is the series of recipocals of the squares.
let squaresSeries = generateInfiniteSequence (fun index -> float (index * index)) false
// This function sums a sequence, up to the specified number of terms.
let sumSeq length sequence =
(0, 0.0)
|>
Seq.unfold (fun state ->
let subtotal = snd state + Seq.item (fst state + 1) sequence
if (fst state >= length) then
None
else
Some(subtotal, (fst state + 1, subtotal)))
// This function sums an infinite sequence up to a given value
// for the difference (epsilon) between subsequent terms,
// up to a maximum number of terms, whichever is reached first.
let infiniteSum infiniteSeq epsilon maxIteration =
infiniteSeq
|> sumSeq maxIteration
|> Seq.pairwise
|> Seq.takeWhile (fun elem -> abs (snd elem - fst elem) > epsilon)
|> List.ofSeq
|> List.rev
|> List.head
|> snd
// Compute the sums for three sequences that converge, and compare
// the sums to the expected theoretical values.
let result1 = infiniteSum harmonicAlternatingSeries 0.00001 100000
printfn "Result: %f ln2: %f" result1 (log 2.0)
// output
// Result: 0.693152 ln2: 0.693147
let pi = Math.PI
let result2 = infiniteSum oddNumberSeries 0.00001 10000
printfn "Result: %f pi/4: %f" result2 (pi/4.0)
// output
// Result: 0.785373 pi/4: 0.785398
// Because this is not an alternating series, a much smaller epsilon
// value and more terms are needed to obtain an accurate result.
let result3 = infiniteSum squaresSeries 0.0000001 1000000
printfn "Result: %f pi*pi/6: %f" result3 (pi*pi/6.0)
// output
// Result: 1.644618 pi*pi/6: 1.644934
```
#### Transforming
```fsharp
// Seq.pairwise creates a new sequence in which successive elements of the input
// sequence are grouped into tuples.
let printSeq seq1 = Seq.iter (printf "%A ") seq1; printfn ""
let seqPairwise = Seq.pairwise (seq { for i in 1 .. 10 -> i*i })
printSeq seqPairwise
// output
// (1, 4) (4, 9) (9, 16) (16, 25) (25, 36) (36, 49) (49, 64) (64, 81) (81, 100)
printfn ""
let seqDelta = Seq.map (fun elem -> snd elem - fst elem) seqPairwise
printSeq seqDelta
// output
// 3 5 7 9 11 13 15 17 19
// Seq.windowed is like Seq.pairwise, except that instead of producing a
// sequence of tuples, it produces a sequence of arrays that contain copies
// of adjacent elements (a window) from the sequence. You specify the number
// of adjacent elements you want in each array.
let seqNumbers = [ 1.0; 1.5; 2.0; 1.5; 1.0; 1.5 ] :> seq<float>
let seqWindows = Seq.windowed 3 seqNumbers
let seqMovingAverage = Seq.map Array.average seqWindows
printfn "Initial sequence: "
printSeq seqNumbers
// 1.0 1.5 2.0 1.5 1.0 1.5
printfn "\nWindows of length 3: "
printSeq seqWindows
// [|1.0; 1.5; 2.0|] [|1.5; 2.0; 1.5|] [|2.0; 1.5; 1.0|] [|1.5; 1.0; 1.5|]
printfn "\nMoving average: "
printSeq seqMovingAverage
// 1.5 1.666666667 1.5 1.333333333
```
#### Sorting, Comparing & Grouping
```fsharp
// Seq.compareWith function. The function compares successive elements in
// turn, and stops when it encounters the first unequal pair. Any additional
// elements do not contribute to the comparison.
let sequence1 = seq { 1 .. 10 }
let sequence2 = seq { 10 .. -1 .. 1 }
// Compare two sequences element by element.
let compareSequences =
Seq.compareWith (fun elem1 elem2 ->
if elem1 > elem2 then 1
elif elem1 < elem2 then -1
else 0)
let compareResult1 = compareSequences sequence1 sequence2
match compareResult1 with
| 1 -> printfn "Sequence1 is greater than sequence2."
| -1 -> printfn "Sequence1 is less than sequence2."
| 0 -> printfn "Sequence1 is equal to sequence2."
| _ -> failwith("Invalid comparison result.")
// val compareResult1: int = -1
// Seq.countBy takes a function that generates a value called a key for
// each element. A key is generated for each element by calling this function
// on each element. Seq.countBy then returns a sequence that contains the key
// values, and a count of the number of elements that generated each value of
// the key.
let mySeq1 = seq { 1.. 100 }
let printSeq seq1 = Seq.iter (printf "%A ") seq1
let seqResult =
mySeq1
|> Seq.countBy (fun elem ->
if elem % 3 = 0 then 0
elif elem % 3 = 1 then 1
else 2)
printSeq seqResult
// (1, 34) (2, 33) (0, 33)
// Seq.groupBy takes a sequence and a function that generates a key from an
// element. The function is executed on each element of the sequence.
// Seq.groupBy returns a sequence of tuples, where the first element of each
// tuple is the key and the second is a sequence of elements that produce
// that key.
let sequence = seq { 1 .. 100 }
let printSeq seq1 = Seq.iter (printf "%A ") seq1
let sequences3 =
sequences
|> Seq.groupBy (fun index ->
if (index % 3 = 0) then 0
elif (index % 3 = 1) then 1
else 2)
sequences3 |> printSeq
// (1, seq [1; 4; 7; 10; ...]) (2, seq [2; 5; 8; 11; ...]) (0, seq [3; 6; 9; 12; ...])
// Seq.distinct. Or Seq.distinctBy, which takes a key-generating function to
// be called on each element. The resulting sequence contains elements of the
// original sequence that have unique keys; later elements that produce a
// duplicate key to an earlier element are discarded.
let binary n =
let rec generateBinary n =
if (n / 2 = 0) then [n]
else (n % 2) :: generateBinary (n / 2)
generateBinary n
|> List.rev
|> Seq.ofList
printfn "%A" (binary 1024)
// [1; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0]
let resultSequence = Seq.distinct (binary 1024)
printfn "%A" resultSequence
// seq [1; 0]
let inputSequence = { -5 .. 10 }
let printSeq seq1 = Seq.iter (printf "%A ") seq1
printfn "Original sequence: "
printSeq inputSequence
// -5 -4 -3 -2 -1 0 1 2 3 4 5 6 7 8 9 10
printf ""
printfn "\nSequence with distinct absolute values: "
let seqDistinctAbsoluteValue = Seq.distinctBy (fun elem -> abs elem) inputSequence
printSeq seqDistinctAbsoluteValue
// -5 -4 -3 -2 -1 0 6 7 8 9 10
```
## Slice
In F#, a slice is a subset of any data type. Slices are similar to indexers, but instead of yielding a single value from the underlying data structure, they yield multiple ones. Slices use the .. operator syntax to select the range of specified indices in a data type.
## Map
Immutable maps based on binary trees, where keys are ordered by F# generic comparison. By default comparison is the F# structural comparison function or uses implementations of the IComparable interface on key values.
<file_sep>+++
title = "AWS: VPC - 1"
description = "VPC - Virtual Private Cloud: Subnet, Internet Gateway, Virtual Gateway etc. "
weight=3
+++
## VPC Part 1
Amazon Virtual Private Cloud (Amazon VPC) enables you to launch AWS resources into a virtual network that you've defined. This virtual network closely resembles a traditional network that you'd operate in your own data center, with the benefits of using the scalable infrastructure of AWS.
### Key concepts
* A virtual private cloud (VPC) is a virtual network dedicated to your AWS account.
* A subnet is a range of IP addresses in your VPC.
* A route table contains a set of rules, called routes, that are used to determine where network traffic is directed.
* An internet gateway is a horizontally scaled, redundant, and highly available VPC component that allows communication between instances in your VPC and the internet. It therefore imposes no availability risks or bandwidth constraints on your network traffic.
* A VPC endpoint enables you to privately connect your VPC to supported AWS services and VPC endpoint services powered by PrivateLink without requiring an internet gateway, NAT device, VPN connection, or AWS Direct Connect connection. Instances in your VPC do not require public IP addresses to communicate with resources in the service. Traffic between your VPC and the other service does not leave the Amazon network.
### IP addressing
* Once the VPC is created, its CIDR block range can NOT be chagned.
* To change CIDR size, you need to create a new VPC
* The different subnets within a VPC can NOT be overlap.
* Can expand VPC by adding secondary IPv4 CIDR blocks
### Default VPC
AWS creates a default VPC for you in each region. The default VPC will include 1 CIDR block, 1 route table, 1 DHCP options set, 1 Network ACL, 1 Security Group, 1 Internet Gateway, and 3~6 Subnets. The number of subnet depends on the number of Available Zone in the region.
### Subnet
When you create a VPC, you must specify a range of IPv4 addresses for the VPC in the form of a Classless Inter-Domain Routing (CIDR) block; for example, 10.0.0.0/16. This is the primary CIDR block for your VPC.
#### Public subnet
A subnet's traffic is routed to an internet gateway.
#### Private subnet
A subnet doesn't have a route an internet gateway.
#### VPN-only subnet
A subnet has traffic routed to a virtual private gateway for a Site-to-Site VPN connection.
#### Reserved IPs
In each subnet CIDR block are not available for you to use, and cannot be assigned to an instance. For example, in a subnet with CIDR block 10.0.1.0/24, the following five IP addresses are reserved:
* 10.0.1.0: Network address.
* 10.0.1.1: Reserved by AWS for the VPC router.
* 10.0.1.2: Reserved by AWS.DNS related. The IP address of the DNS server is the base of the VPC network range plus two. For VPCs with multiple CIDR blocks, the IP address of the DNS server is located in the primary CIDR. We also reserve the base of each subnet range plus two for all CIDR blocks in the VPC. For more information, see Amazon DNS Server.
* 10.0.1.3: Reserved by AWS for future use.
* 10.0.1.255: Network broadcast address. We do not support broadcast in a VPC, therefore we reserve this address.
### Internet Gateway
* It is the gateway through which your VPC comminuicates with the internet, and with other AWS services
* It is horizontally scaled, redundant, and highly available VPC component
* It performant NAT (static one-to-one) between your Private and Public (or Elastic) IPv4 addresses
* Every VPC can have only one Internet Gateway
### VPN Connection
VPN connection links your data center (or network) to your Amazon Virtual Private Cloud (VPC). A customer gateway device is the anchor on your side of that connection. It can be a physical or software appliance. The anchor on the AWS side of the VPN connection is called a virtual private gateway.
<file_sep>+++
title = "Error handling"
description="Rustlang Introduction: Error "
weight = 3
+++
### Error
* Rust groups errors into two major categories: recoverable and unrecoverable errors.
* Rust doesn’t have exceptions. Instead, it has the type Result<T, E> for recoverable errors and the panic! macro that stops execution when the program encounters an unrecoverable error.
#### panic! - Unrecoverable Errors
* Rust has the panic! macro. When the panic! macro executes, your program will print a failure message, unwind and clean up the stack, and then quit. This most commonly occurs when a bug of some kind has been detected and it’s not clear to the programmer how to handle the error.
#### Unwinding the Stack or Aborting in Response
* By default, when a panic occurs, the program starts unwinding, which means Rust walks back up the stack and cleans up the data from each function it encounters. But this walking back and cleanup is a lot of work. The alternative is to immediately abort, which ends the program without cleaning up. Memory that the program was using will then need to be cleaned up by the operating system.
* Abort on panic in release mode
```toml
[profile.release]
panic = 'abort'
```
##### Using a panic! Backtrace
* panic! call comes from a library because of a bug in our code instead of from our code calling the macro directly.
* Backtraces in Rust work as they do in other languages: the key to reading the backtrace is to start from the top and read until you see files you wrote.
#### Result - Recoverable Errors
* Most errors aren’t serious enough to require the program to stop entirely.
* Result enum is defined as having two variants, Ok and Err
```rs
enum Result<T, E> {
Ok(T),
Err(E),
}
```
* Matching on Different Errors
```rs
use std::fs::File;
use std::io::ErrorKind;
fn main() {
let f = File::open("hello.txt");
let f = match f {
Ok(file) => file,
Err(error) => match error.kind() {
ErrorKind::NotFound => match File::create("hello.txt") {
Ok(fc) => fc,
Err(e) => panic!("Problem creating the file: {:?}", e),
},
other_error => panic!("Problem opening the file: {:?}", other_error),
},
};
}
```
#### Shortcuts for Panic on Error: unwrap and expect
* If the Result value is the Ok variant, unwrap will return the value inside the Ok. If the Result is the Err variant, unwrap will call the panic! macro for us.
* Another method, expect, which is similar to unwrap, lets us also choose the panic! error message. Using expect instead of unwrap and providing good error messages can convey your intent and make tracking down the source of a panic easier.
```rs
use std::fs::File;
fn main() {
let f = File::open("hello.txt").unwrap();
let f = File::open("hello.txt")
.expect("Failed to open hello.txt");
}
```
#### Propagating Errors
* Propagating the error gives more control to the calling code, where there might be more information or logic that dictates how the error should be handled than what you have available in the context of your code.
* Example of reading file content to string
```rs
use std::io;
use std::io::Read;
use std::fs::File;
fn read_username_from_file() -> Result<String, io::Error> {
let f = File::open("hello.txt");
let mut f = match f {
Ok(file) => file,
Err(e) => return Err(e),
};
let mut s = String::new();
match f.read_to_string(&mut s) {
Ok(_) => Ok(s),
Err(e) => Err(e),
}
}
```
* This pattern of propagating errors is so common in Rust that Rust provides the question mark operator ? to make this easier.
* A Shortcut for Propagating Errors: the ? Operator
```rs
use std::io;
use std::io::Read;
use std::fs::File;
fn read_username_from_file() -> Result<String, io::Error> {
let mut f = File::open("hello.txt")?;
let mut s = String::new();
f.read_to_string(&mut s)?;
Ok(s)
}
```
* A more concise sample
```rs
use std::io;
use std::io::Read;
use std::fs::File;
fn read_username_from_file() -> Result<String, io::Error> {
let mut s = String::new();
File::open("hello.txt")?.read_to_string(&mut s)?;
Ok(s)
}
```
##### The ? Operator Can Only Be Used in Functions
* The ? operator can only be used in functions that have a return type of Result, because it is defined to work in the same way as the match expression
#### To panic! or Not to panic!
__When code panics, there’s no way to recover. You could call panic! for any error situation, whether there’s a possible way to recover or not, but then you’re making the decision on behalf of the code calling your code that a situation is unrecoverable. When you choose to return a Result value, you give the calling code options rather than making the decision for it. The calling code could choose to attempt to recover in a way that’s appropriate for its situation, or it could decide that an Err value in this case is unrecoverable, so it can call panic! and turn your recoverable error into an unrecoverable one.__
* Similarly, the unwrap and expect methods are very handy when prototyping, before you’re ready to decide how to handle errors. They leave clear markers in your code for when you’re ready to make your program more robust.
* If a method call fails in a test, you’d want the whole test to fail, even if that method isn’t the functionality under test. Because panic! is how a test is marked as a failure, calling unwrap or expect is exactly what should happen.
#### Guidelines for Error Handling
* The bad state is not something that’s expected to happen occasionally.
* Your code after this point needs to rely on not being in this bad state.
* There’s not a good way to encode this information in the types you use.
* When failure is expected, it’s more appropriate to return a Result than to make a panic!
#### Custom Types for Validation
* Sample of validation with loop
```rs
loop {
// --snip--
let guess: i32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
if guess < 1 || guess > 100 {
println!("The secret number will be between 1 and 100.");
continue;
}
match guess.cmp(&secret_number) {
// --snip--
}
```
* Above is not an ideal solution. it’s safe for functions to use the new type in their signatures and confidently use the values they receive.
```rs
pub struct Guess {
value: i32,
}
impl Guess {
pub fn new(value: i32) -> Guess {
if value < 1 || value > 100 {
panic!("Guess value must be between 1 and 100, got {}.", value);
}
Guess {
value
}
}
pub fn value(&self) -> i32 {
self.value
}
}
```
* The panic! macro signals that your program is in a state it can’t handle and lets you tell the process to stop instead of trying to proceed with invalid or incorrect values. The Result enum uses Rust’s type system to indicate that operations might fail in a way that your code could recover from.
<file_sep>+++
title = "F# Functions"
description = "F# Function, Rec Func & Inline Func"
weight = 3
+++
## F# Function
> Functions are the fundamental unit of program execution in any programming language. As in other languages, an F# function has a name, can have parameters and take arguments, and has a body. F# also supports functional programming constructs such as treating functions as values, using unnamed functions in expressions, composition of functions to form new functions, curried functions, and the implicit definition of functions by way of the partial application of function arguments.
- Syntax
```fsharp
// Non-recursive function definition.
let [inline] function-name parameter-list [ : return-type ] = function-body
// Recursive function definition.
let rec function-name parameter-list = recursive-function-body
```
### F# Recursive Function
> The rec keyword is used together with the let keyword to define a recursive function.
- Example - Bad implementation
```fsharp
let rec fib n =
match n with
| 0 | 1 -> n
| n -> fib (n-1) + fib (n-2)
```
- Example - Good one: Tail Recursiot
Tail recursion is important because it can be implemented more efficiently than general recursion. When we make a normal recursive call, we have to push the return address onto the call stack then jump to the called function. This means that we need a call stack whose size is linear in the depth of the recursive calls. When we have tail recursion we know that as soon as we return from the recursive call we're going to immediately return as well, so we can skip the entire chain of recursive functions returning and return straight to the original caller. That means we don't need a call stack at all for all of the recursive calls, and can implement the final call as a simple jump, which saves us space.
```fsharp
let fib n =
let rec loop acc1 acc2 n =
match n with
| 0 -> acc1
| 1 -> acc2
| _ ->
loop acc2 (acc1 + acc2) (n - 1)
loop 0 1 n
```
### F# Inline Function
> Inline functions are functions that are integrated directly into the calling code.
When you use static type parameters, any functions that are parameterized by type parameters must be inline. This guarantees that the compiler can resolve these type parameters.
You should avoid using inline functions for optimization unless you have tried all other optimization techniques.
```fsharp
// The following code example illustrates an inline function at the top level, an inline instance method, and an inline static method.
let inline increment x = x + 1
type WrapInt32() =
member inline this.incrementByOne(x) = x + 1
static member inline Increment(x) = x + 1
```
<file_sep>+++
title = "MySql: Getting Started"
description="Start / Stop MySql, Reset root credential"
+++
> MySQL is the world's most popular open source database. Whether you are a fast growing web property, technology ISV or large enterprise, MySQL can cost-effectively help you deliver high performance, scalable database applications.
### Prerequisite
* Install MySql 5.6+ on your PC or server
### Launch & Stop MySql server
```bash
# use systemctl
systemctl status mysql.service
systemctl restart mysql.service
# use service
service mysql status
service mysql restart
```
### Set root password
```bash
mysql_secure_installation
```
### Reset root password
```sql
-- MySql 5.6.x
mysql> UPDATE mysql.user SET password = PASSWORD('<PASSWORD>')
-> WHERE User = 'root' AND Host = 'localhost';
-- MySql 5.7+
mysql> UPDATE mysql.user SET authentication_string = PASSWORD('<PASSWORD>')
-> WHERE User = 'root' AND Host = 'localhost';
mysql> FLUSH PRIVILEGES;
```
### Connect to MySql server
```bash
mysql -u <user_name> -p -P <port> -h <host_name>
```
### Add a new user
```sql
CREATE USER 'user1'@'%' IDENTIFIED BY '<PASSWORD>';
GRANT ALL PRIVILEGES ON * . * TO 'user1'@'%';
```
### Delete a user
```sql
REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'user1'@'%';
DROP USER 'user1'@'%';
```
<file_sep>+++
title = "Management"
weight = 4
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "AWS : CLI - 2"
description = "AWS CLI & VPC"
weight=2
+++
### AWS CLI & VPC
Following is a sample to create a VPC with 2 private subnets, 2 public subnets across 2 avaliable zones and NAT Gateway.
```bash
#!/bin/bash
#******************************************************************************
# AWS VPC CLI Script
#******************************************************************************
#
# SYNOPSIS
# Automates the creation of a custom IPv4 VPC, having both a public and a
# private subnet, and a NAT gateway.
#
#==============================================================================
#
# NOTES
# VERSION: 1.0
# AUTHOR: <NAME>
#
#==============================================================================
# MODIFY THE SETTINGS BELOW
#==============================================================================
#
AWS_REGION="ap-southeast-2"
VPC_NAME="DEV-PG-II"
VPC_CIDR="10.5.0.0/16"
SUBNET_PUBLIC_CIDR="10.5.1.0/24"
SUBNET_PUBLIC_AZ="ap-southeast-2a"
SUBNET_PUBLIC_NAME="$VPC_NAME-PubSub-AZ2a"
SUBNET_PRIVATE_CIDR="10.5.2.0/24"
SUBNET_PRIVATE_AZ="ap-southeast-2b"
SUBNET_PRIVATE_NAME="$VPC_NAME-PrvSub-AZ2b"
IGW_NAME="$VPC_NAME-IGW"
NAT_GW_NAME="$VPC_NAME-NAT-GW"
CHECK_FREQUENCY=5
#
#==============================================================================
# DO NOT MODIFY CODE BELOW
#==============================================================================
#
# Create VPC
echo "Creating VPC in preferred region..."
VPC_ID=$(aws ec2 create-vpc \
--cidr-block $VPC_CIDR \
--query 'Vpc.{VpcId:VpcId}' \
--output text \
--region $AWS_REGION)
echo " VPC ID '$VPC_ID' CREATED in '$AWS_REGION' region."
# Add Name tag to VPC
aws ec2 create-tags \
--resources $VPC_ID \
--tags "Key=Name,Value=$VPC_NAME" \
--region $AWS_REGION
echo " VPC ID '$VPC_ID' NAMED as '$VPC_NAME'."
# Create Public Subnet
echo "Creating Public Subnet..."
SUBNET_PUBLIC_ID=$(aws ec2 create-subnet \
--vpc-id $VPC_ID \
--cidr-block $SUBNET_PUBLIC_CIDR \
--availability-zone $SUBNET_PUBLIC_AZ \
--query 'Subnet.{SubnetId:SubnetId}' \
--output text \
--region $AWS_REGION)
echo " Subnet ID '$SUBNET_PUBLIC_ID' CREATED in '$SUBNET_PUBLIC_AZ'" \
"Availability Zone."
# Add Name tag to Public Subnet
aws ec2 create-tags \
--resources $SUBNET_PUBLIC_ID \
--tags "Key=Name,Value=$SUBNET_PUBLIC_NAME" \
--region $AWS_REGION
echo " Subnet ID '$SUBNET_PUBLIC_ID' NAMED as" \
"'$SUBNET_PUBLIC_NAME'."
# Create Private Subnet
echo "Creating Private Subnet..."
SUBNET_PRIVATE_ID=$(aws ec2 create-subnet \
--vpc-id $VPC_ID \
--cidr-block $SUBNET_PRIVATE_CIDR \
--availability-zone $SUBNET_PRIVATE_AZ \
--query 'Subnet.{SubnetId:SubnetId}' \
--output text \
--region $AWS_REGION)
echo " Subnet ID '$SUBNET_PRIVATE_ID' CREATED in '$SUBNET_PRIVATE_AZ'" \
"Availability Zone."
# Add Name tag to Private Subnet
aws ec2 create-tags \
--resources $SUBNET_PRIVATE_ID \
--tags "Key=Name,Value=$SUBNET_PRIVATE_NAME" \
--region $AWS_REGION
echo " Subnet ID '$SUBNET_PRIVATE_ID' NAMED as '$SUBNET_PRIVATE_NAME'."
# Create Internet gateway
echo "Creating Internet Gateway..."
IGW_ID=$(aws ec2 create-internet-gateway \
--query 'InternetGateway.{InternetGatewayId:InternetGatewayId}' \
--output text \
--region $AWS_REGION)
echo " Internet Gateway ID '$IGW_ID' CREATED."
# Add Name tag to Internet gateway
aws ec2 create-tags \
--resources $IGW_ID \
--tags "Key=Name,Value=$IGW_NAME" \
--region $AWS_REGION
echo " Internet gateway '$IGW_ID' NAMED as '$IGW_NAME'."
# Attach Internet gateway to your VPC
aws ec2 attach-internet-gateway \
--vpc-id $VPC_ID \
--internet-gateway-id $IGW_ID \
--region $AWS_REGION
echo " Internet Gateway ID '$IGW_ID' ATTACHED to VPC ID '$VPC_ID'."
# Create Route Table
echo "Creating Route Table..."
ROUTE_TABLE_ID=$(aws ec2 create-route-table \
--vpc-id $VPC_ID \
--query 'RouteTable.{RouteTableId:RouteTableId}' \
--output text \
--region $AWS_REGION)
echo " Route Table ID '$ROUTE_TABLE_ID' CREATED."
# Create route to Internet Gateway
RESULT=$(aws ec2 create-route \
--route-table-id $ROUTE_TABLE_ID \
--destination-cidr-block 0.0.0.0/0 \
--gateway-id $IGW_ID \
--region $AWS_REGION)
echo " Route to '0.0.0.0/0' via Internet Gateway ID '$IGW_ID' ADDED to" \
"Route Table ID '$ROUTE_TABLE_ID'."
# Associate Public Subnet with Route Table
RESULT=$(aws ec2 associate-route-table \
--subnet-id $SUBNET_PUBLIC_ID \
--route-table-id $ROUTE_TABLE_ID \
--region $AWS_REGION)
echo " Public Subnet ID '$SUBNET_PUBLIC_ID' ASSOCIATED with Route Table ID" \
"'$ROUTE_TABLE_ID'."
# Enable Auto-assign Public IP on Public Subnet
aws ec2 modify-subnet-attribute \
--subnet-id $SUBNET_PUBLIC_ID \
--map-public-ip-on-launch \
--region $AWS_REGION
echo " 'Auto-assign Public IP' ENABLED on Public Subnet ID" \
"'$SUBNET_PUBLIC_ID'."
# Allocate Elastic IP Address for NAT Gateway
echo "Creating NAT Gateway..."
EIP_ALLOC_ID=$(aws ec2 allocate-address \
--domain vpc \
--query '{AllocationId:AllocationId}' \
--output text \
--region $AWS_REGION)
echo " Elastic IP address ID '$EIP_ALLOC_ID' ALLOCATED."
# Create NAT Gateway
NAT_GW_ID=$(aws ec2 create-nat-gateway \
--subnet-id $SUBNET_PUBLIC_ID \
--allocation-id $EIP_ALLOC_ID \
--query 'NatGateway.{NatGatewayId:NatGatewayId}' \
--output text \
--region $AWS_REGION)
FORMATTED_MSG="Creating NAT Gateway ID '$NAT_GW_ID' and waiting for it to "
FORMATTED_MSG+="become available.\n Please BE PATIENT as this can take some "
FORMATTED_MSG+="time to complete.\n ......\n"
printf " $FORMATTED_MSG"
FORMATTED_MSG="STATUS: AVAILABLE - Total of %02d seconds elapsed for process"
FORMATTED_MSG+="\n ......\n NAT Gateway ID '%s' is now AVAILABLE.\n"
start_time="$(date -u +%s)"
aws ec2 wait nat-gateway-available \
--nat-gateway-ids $NAT_GW_ID
end_time="$(date -u +%s)"
elapsed="$(($end_time-$start_time))"
printf " $FORMATTED_MSG" $elapsed $NAT_GW_ID
# Add Name tag to NAT Gateway
aws ec2 create-tags \
--resources $NAT_GW_ID \
--tags "Key=Name,Value=$NAT_GW_NAME" \
--region $AWS_REGION
echo " Internet gateway '$NAT_GW_ID' NAMED as '$NAT_GW_NAME'."
# Create route to NAT Gateway
MAIN_ROUTE_TABLE_ID=$(aws ec2 describe-route-tables \
--filters Name=vpc-id,Values=$VPC_ID Name=association.main,Values=true \
--query 'RouteTables[*].{RouteTableId:RouteTableId}' \
--output text \
--region $AWS_REGION)
echo " Main Route Table ID is '$MAIN_ROUTE_TABLE_ID'."
RESULT=$(aws ec2 create-route \
--route-table-id $MAIN_ROUTE_TABLE_ID \
--destination-cidr-block 0.0.0.0/0 \
--gateway-id $NAT_GW_ID \
--region $AWS_REGION)
echo " Route to '0.0.0.0/0' via NAT Gateway with ID '$NAT_GW_ID' ADDED to" \
"Route Table ID '$MAIN_ROUTE_TABLE_ID'."
echo "COMPLETED"
```
<file_sep>+++
date = "2016-12-07T16:56:21+11:00"
title = "Tensorflow Note - 2"
description = "Tensorflow Note - 2"
+++
## Prerequisites
* You are using Windows 7 or higher version
* You are using Anaconda to setup the environment
## Install Anaconda
> Anaconda® is a package manager, an environment manager, a Python/R data science distribution, and a collection of over 1,500+ open source packages. Anaconda is free and easy to install, and it offers free community support.
* [Please download Anaconda from the official site](https://www.continuum.io/downloads)
## Create tensorflow virtualenv with python 3.5
* Anaconda uses python 3.6 by default. Tensorflow only supports python 3.5.
cd /path/to/envs
conda create -n tensorflow
## Install tensorflow
activate tensorflow
## For CPU
pip install --ignore-installed --upgrade \
https://storage.googleapis.com/tensorflow/windows/cpu/tensorflow-1.1.0-cp35-cp35m-win_amd64.whl
## Or for GPU
pip install --ignore-installed --upgrade \
https://storage.googleapis.com/tensorflow/windows/gpu/tensorflow_gpu-1.1.0-cp35-cp35m-win_amd64.whl
## Use sample code to test Tensorflow
### Save code below to file test.py
```python
import numpy as np
import tensorflow as tf
## Model parameters
W = tf.Variable([.3], tf.float32)
b = tf.Variable([-.3], tf.float32)
## Model input and output
x = tf.placeholder(tf.float32)
linear_model = W * x + b
y = tf.placeholder(tf.float32)
## loss
loss = tf.reduce_sum(tf.square(linear_model - y)) ## sum of the squares
## optimizer
optimizer = tf.train.GradientDescentOptimizer(0.01)
train = optimizer.minimize(loss)
## training data
x_train = [1,2,3,4]
y_train = [0,-1,-2,-3]
## training loop
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init) ## reset values to wrong
for i in range(1000):
sess.run(train, {x:x_train, y:y_train})
## evaluate training accuracy
curr_W, curr_b, curr_loss = sess.run([W, b, loss], {x:x_train, y:y_train})
print("W: %s b: %s loss: %s"%(curr_W, curr_b, curr_loss))
```
### Test with tensorflow-gpu (GPU enabled)
activate tensorflow
cd /ws/python/tf
python3 test.py
## You will probably see the result as follow
## ....
## name: GeForce GTX 850M
## major: 5 minor: 0 memoryClockRate (GHz) 0.9015
## pciBusID 0000:0a:00.0
## Total memory: 3.95GiB
## Free memory: 3.58GiB
## 2017-04-25 10:25:59.640621: I tensorflow/core/common_runtime/gpu/gpu_device.cc:908] DMA: 0
## 2017-04-25 10:25:59.640626: I tensorflow/core/common_runtime/gpu/gpu_device.cc:918] 0: Y
## 2017-04-25 10:25:59.640640: I tensorflow/core/common_runtime/gpu/gpu_device.cc:977]
## Creating TensorFlow device (/gpu:0) -> (device: 0, name: GeForce GTX 850M, pci ## bus id: 0000:0a:00.0)
## W: [-0.9999969] b: [ 0.99999082] loss: 5.69997e-11
<file_sep>+++
title = "F# Async"
description = "F# Async expressions"
weight = 12
+++
## Async expressions
Async expressions provide one way of performing computations asynchronously, that is, without blocking execution of other work.
### Asynchronous Binding by Using let!
The effect of let! is to enable execution to continue on other computations or threads as the computation is being performed. After the right side of the let! binding returns, the rest of the async expression resumes execution.
```fsharp
// let just stores the result as an asynchronous operation.
let (result1 : Async<byte[]>) = stream.AsyncRead(bufferSize)
// let! completes the asynchronous operation and returns the data.
let! (result2 : byte[]) = stream.AsyncRead(bufferSize)
```
`let!` can only be used to await F# async computations `Async<T>` directly. You can await other kinds of asynchronous operations indirectly:
- .NET tasks, `Task<TResult>` and the non-generic Task, by combining with `Async.AwaitTask`
- .NET value tasks, `ValueTask<TResult>` and the non-generic ValueTask, by combining with `.AsTask()` and `Async.AwaitTask`
- Any object following the "GetAwaiter" pattern specified in F# RFC FS-1097, by combining with `task { return! expr } |> Async.AwaitTask`.
### use and use! bindings
In addition to `let!`, you can use use! to perform asynchronous bindings. The difference between let! and use! is the same as the difference between `let` and `use`. For `use!,` the object is disposed of at the close of the current scope. Note that in the current release of F#, `use!` does not allow a value to be initialized to null, even though use does.
### Asynchronous Primitives
A method that performs a single asynchronous task and returns the result is called an asynchronous primitive, and these are designed specifically for use with let!. Several asynchronous primitives are defined in the F# core library.
You use the function `Async.RunSynchronously` to execute an asynchronous operation and wait for its result. As an example, you can execute multiple asynchronous operations in parallel by using the Async.Parallel function together with the `Async.RunSynchronously` function. The `Async.Parallel` function takes a list of the Async objects, sets up the code for each Async task object to run in parallel, and returns an Async object that represents the parallel computation. Just as for a single operation, you call `Async.RunSynchronously` to start the execution.
```fsharp
open System.Net
open Microsoft.FSharp.Control.WebExtensions
let urlList = [ "Microsoft.com", "http://www.microsoft.com/"
"MSDN", "http://msdn.microsoft.com/"
"Bing", "http://www.bing.com"
]
let fetchAsync(name, url:string) =
async {
try
let uri = new System.Uri(url)
let webClient = new WebClient()
let! html = webClient.AsyncDownloadString(uri)
printfn "Read %d characters for %s" html.Length name
with
| ex -> printfn "%s" (ex.Message);
}
let runAll() =
urlList
|> Seq.map fetchAsync
|> Async.Parallel
|> Async.RunSynchronously
|> ignore
runAll()
```
<file_sep>#!/bin/bash
export SITE_URL="www.us2.io"
export SITE_SSL_PORT="443"
export SITE_URLS=(
www.applydirect.com.au
www.applydirect.co.nz
careersvic.applydirect.com.au
iworkfornsw.applydirect.com.au
live-iworkfornsw.applydirect.com.au
www.ad1holdings.com.au
mypharmacycareer.com.au
core-blue.us2.io
esi.uss.technology
www.utilitysoftwareservices.com
www.utilitiessoftwareservices.com
www.arcenergygroup1.com.au
www.planenergy1.com.au
nextconnectnbe.com.au
ws-proxy.us2.uss
uss.igeno.com.au
)
DOMAIN_LIST="domain_list.csv"
echo "Domain, Issuer, Start Date, End Date" > ${DOMAIN_LIST}
for SITE_URL in "${SITE_URLS[@]}"; do
echo $SITE_URL
data=$(true | openssl s_client -connect ${SITE_URL}:${SITE_SSL_PORT} \
-servername ${SITE_URL} 2> /dev/null | \
openssl x509 -text | grep "Issuer:" -A 3 ) # -issuer -email -startdate -enddate
if [[ ! -z $data ]]; then
data=${data/"Issuer:"/"$SITE_URL,\""}
data=${data/"Not Before:"/"\","}
data=${data/"Not After :"/","}
echo $data >> ${DOMAIN_LIST}
else
echo "$SITE_URL,N/A,N/A,N/A">> ${DOMAIN_LIST}
fi;
sleep 5
done;
cat $DOMAIN_LIST<file_sep>+++
title = "Adv Bash - 1"
description = "Reference Cards - Special Characters & Operators"
+++
### Special Characters
What makes a character special? If it has a meaning beyond its literal meaning, a meta-meaning, then we refer to it as a special character. Along with commands and keywords, special characters are building blocks of Bash scripts.
#### Special Shell Variables
Variable | Meaning
|--------|--------|
$0 | Filename of script
$1 | Positional parameter #1
$2 - $9 | Positional parameters #2 - #9
${10} | Positional parameter #10
$# | Number of positional parameters
"$*" | All the positional parameters (as a single word) *
"$@" | All the positional parameters (as separate strings)
${#*} | Number of positional parameters
${#@} | Number of positional parameters
$? | Return value
$$ | Process ID (PID) of script
$- | Flags passed to script (using set)
$_ | Last argument of previous command
$! | Process ID (PID) of last job run in background
#### Operator Precedence
In a script, operations execute in order of precedence: the higher precedence operations execute before the lower precedence ones.
Operator | Meaning | Comments
|----|----|------
|| HIGHEST PRECEDENCE
|var++ var-- |post-increment, post-decrement |C-style operators
| ++var --var |pre-increment, pre-decrement
| ! ~ |negation| logical / bitwise, inverts sense of following operator
| ** |exponentiation| arithmetic operation
| * / % |multiplication, division, modulo| arithmetic operation
| + - |addition, subtraction| arithmetic operation
| << >> |left, right| shift bitwise
| -z -n |unary comparison| string is/is-not null
| -e -f -t -x, etc. |unary comparison| file-test
| < -lt > -gt <= -le >= -ge |compound comparison| string and integer
| -nt -ot -ef |compound comparison| file-test
| == -eq != -ne|equality / inequality | test operators, string and integer
| & |AND |bitwise
| ^ |XOR |exclusive OR, bitwise
| \| |OR |bitwise
| && -a |AND| logical, compound comparison
| \| -o |OR |logical, compound comparison
| ?: |trinary operator |C-style
| = |assignment| (do not confuse with equality test)
| *= /= %= += -= <<= >>= &= |combination assignment| times-equal, divide-equal,| mod-equal, etc.
| , |comma| links a sequence of operations
|||LOWEST PRECEDENCE
#### TEST Operators: Binary Comparison
Operator|Meaning|Operator|Meaning
----|----|----|----
Arithmetic Comparison| |String Comparison|
-eq |Equal to| = | Equal to
||| == | Equal to
-ne | Not equal to | != | Not equal to
-lt | Less than \< Less than (ASCII) *
-le | Less than or equal to | |
-gt | Greater than | \> | Greater than (ASCII) *
-ge | Greater than or equal to | | |
||| -z | String is empty
||| -n | String is not empty
Arithmetic Comparison| within double parentheses (( ... )) | |
> |Greater than
>= |Greater than or equal to
< |Less than
<= |Less than or equal to
<file_sep>+++
title = "JS & ES Note - 1"
description="The most unpredictable [this] in JavaScript"
+++
## The most unpredictable keyword and feature
You may already guessed what I am talking about. Yes, the**this** keyword. It is not only a powerful feature, but also often misinterpreted keyword.
In JavaScript, we also have this concept inside a Function constructor when it is invoked using the “new” keyword, however it is not the only rule and “this” can often refer to a different object from a different execution context.
### **this** can be anything
* **this** represents different things.
```
var a = "abc"
console.log(a);
this.a = "123";
console.log(a);
console.log(this);
```
- Do you know what output will be?
- Running in the console of Chrome / Firefox
```
abc
123
Window {postMessage: ƒ, blur: ƒ, focus: ƒ, close: ƒ, parent: Window, …}
```
- Running in the js file via node
```
# Save the content as index.js
# Run command - node index.js
abc
abc
{ a: '123' }
```
- Running in the node command prompt
```
abc
123
Object [global] {global: [Circular],...}
```
* Basically the **this** represents the context environment, AKA, the global object. The best way to confirm what the **this** means is better to print it out.
### **this** inside a function
From 2015, ECMAScript 6, aka ES 2015, has been supported by most modern browsers. So when we talk about function in JavaScript, literally we are talking two slightly different functions. One is defined with keyword `function`, another one is aka arrow function or fat arrow. For most scenario, arrow function is the same as old style function, but fat arrow handles the **this** keyword in a more stable and predictable way.
> Mozilla - MDN: The value of this is determined by how a function is called (runtime binding). It can't be set by assignment during execution, and it may be different each time the function is called. ES5 introduced the bind() method to set the value of a function's this regardless of how it's called. The arrow functions which don't provide their own this binding (it retains the this value of the enclosing lexical context).
According MDN's explanation, the arrow function is a better implementation. It is more predictable and stable. Arrow function is highly recommended to replace the old style function.
#### **this** inside a regular function
* The samples below show the different *this*es
```js
var obj0 = {
prop: 'obj0',
func_1: function () {
console.log(' func_1-> ', this);
var func_2 = {
prop: 'func_2',
func_3: function () {
console.log(' func_3 -> ', this);
}
};
func_2.func_3();
}
};
obj0.func_1();
////////////////////////////////
var obj1 = {
prop: 'obj1',
func_1: function () {
console.log(' func_1-> ', this);
func_2 = function () {
this.prop = 'func_2';
var func_3 = function () {
this.p = 'func_3';
console.log(' func_3 -> ', this);
}
return func_3
};
func_2()();
}
};
obj1.func_1();
```
* Test (Fat Arrow)
```js
var obj2 = {
prop: 123,
func_1: () => {
console.log(' func_1-> ', this);
func_2 = {
prop: 'func_2',
func_3: () => {
this.p = 'func_3';
console.log(' func_3 -> ', this);
}
};
func_2.func_3();
}
};
obj2.func_1();
```
#### Fat arrow has no binding
* The sample below show the different binding behavior between regular function and arrow function
```js
var obj3 = {
p: 'obj3',
toBeCalled: function() {
console.log(' this is toBeCalled ', this.p);
},
toBind: function(obj) {
obj.toBeCalled();
}
};
var testBind = obj3.toBind;
testBind(obj3);
/// output:
/// this is toBeCalled obj3
var obj4 = {
p: 'obj4',
toBeCalled: () => {
console.log(' this is toBeCalled ', this.p);
},
toBind: obj => {
obj.toBeCalled();
}
};
var testBind2 = obj4.toBind;
testBind2(obj4);
/// output
/// this is toBeCalled undefined
```
<file_sep>+++
title = "Azure: CAF - 3"
weight = 1
description="Strategy of CAF"
+++
## cloud adoption strategy
- Define and document your motivations: Meet with key stakeholders and executives to document the motivations behind cloud adoption.
- Document business outcomes: Engage motivated stakeholders and executives to document specific business outcomes.
- Develop a business case: Develop a business case to validate the financial model that supports your motivations and outcomes.
- Choose the right first project: Your first cloud adoption project will help align motivations with technical effort. This article can help you choose your first project wisely.
### Motivation-driven strategies
Business transformations that are supported by cloud adoption can be driven by various motivations.
#### Migration
- Cost savings.
- Reduction in vendor or technical complexity.
- Optimization of internal operations.
- Increasing business agility.
- Preparing for new technical capabilities.
- Scaling to meet market demands.
- Scaling to meet geographic demands.
#### Innovation
- Increasing business agility.
- Preparing for new technical capabilities.
- Building new technical capabilities.
- Scaling to meet market demands.
- Scaling to meet geographic demands.
- Improving customer experiences and engagements.
- Transforming products or services.
### Business Outcomes
* Fiscal outcomes -Financial or fiscal performance is the cleanest business outcome for many business leaders, but not the only one.
* Agility outcomes - Today's fast-changing business environment places a premium on time. The ability to respond to and drive market change quickly is the fundamental measure of business agility.
* Reach outcomes - In a constantly shrinking market, global reach (ability to support global customers and users) can be measured by compliance in geographies that are relevant to the business.
* Customer engagement outcomes - Social marketplaces are redefining winners and losers at an unheard-of pace. Responding to user needs is a key measure of customer engagement.
* Performance outcomes - Performance and reliability are assumed. When either falters, reputation damage can be painful and long-lasting.
### Business Justification
On a basic level, the business justification focuses on the return on investment (ROI) associated with the proposed technical change. The generic formula for ROI is:
> ROI (Return of Investment) = (Gain from Investment - Investment) / Investment
When the ROI is below 20%, consider a digital estate planning exercise, paying specific attention to rationalization.
Calculating the gain from investment often requires a second formula that's specific to the business outcomes and associated technical changes. Calculating earnings is harder than calculating cost reductions.
> Gain from Investment = Revenue deltas - Cost deltas
#### Revenue deltas
Revenue deltas should be forecast in partnership with business stakeholders.
#### Cost deltas
Cost deltas are the amount of increase or decrease that will be caused by the transformation.
<file_sep>+++
date = "2016-12-07T16:56:21+11:00"
title = "CNTK Note - 1"
description="AI Framework from Microsoft"
+++
## Prerequisites
* You are using Windows 7 or higher version
* You are using Anaconda to setup the environment
## Create CNTK virtual environment
### use follow command to remove existing virtual environment
conda remove -n cntk --all
conda create -n cntk
### Activate virtual environment and install CNTK
activate cntk
pip install https://cntk.ai/PythonWheel/CPU-Only/cntk-2.0rc3-cp36-cp36m-win_amd64.whl
## Test CNTK
python
>>> import cntk
>>> cntk.__version__
'2.0rc3'
<file_sep>+++
date = "2018-12-04T14:59:31+11:00"
title = "VPN VyOS setup"
description = "VPN VyOS setup"
+++
## VPN VyOS
VyOS is a fully open source network OS that runs on a wide range of hardware, virtual machines, and cloud providers and offers features for any networks, small and large.
### VyOS on AWS
#### Setup VyOS
* Launch instance with community AMI - VyOS (HVM) 1.x.x
* Customize the setup script
```bash
#!/bin/bash
source /opt/vyatta/etc/functions/script-template
AWS_PRIVATE_IP=10.104.16.128
AWS_PUBLIC_IP=172.16.31.10
AWS_NAT_SUBNET=10.104.0.0/16
REMOTE_NAT_IP=127.17.12.172
REMOTE_VPN_SUBNET=172.16.58.3/24
REMOTE_1ST_VPN_IP=192.168.127.12
# REMOTE_2ND_VPN_IP=192.168.3.11 # redundant connection not currently used
REMOTE_PRE_SHARED_KEY=Your_Remote_Key
# begin configuration
configure
# input settings using set
set system host-name vyos-vpn
# setting up NAT
set interfaces ethernet eth0 description 'aws-internal'
# create dummy ethernet device to represent REMOTE-provided private IP
set interfaces dummy dum0 address ${REMOTE_NAT_IP}/32
set interfaces dummy dum0 description 'remote-vpn-ip'
# configure SNAT
set nat source rule 100 description 'Internal to REMOTE'
set nat source rule 100 destination address ${REMOTE_VPN_SUBNET}
set nat source rule 100 outbound-interface 'any'
set nat source rule 100 source address ${AWS_NAT_SUBNET}
set nat source rule 100 translation address ${REMOTE_NAT_IP}
# setting up VPN
# set primary ethernet interface as the VPN interface
set vpn ipsec ipsec-interfaces interface 'eth0'
set vpn ipsec nat-traversal 'enable'
set vpn ipsec logging log-modes 'all'
# esp-group
set vpn ipsec esp-group vpn-nat-esp compression 'disable'
set vpn ipsec esp-group vpn-nat-esp lifetime '28800'
set vpn ipsec esp-group vpn-nat-esp mode 'tunnel'
set vpn ipsec esp-group vpn-nat-esp pfs 'dh-group2'
set vpn ipsec esp-group vpn-nat-esp proposal 1 encryption 'aes256'
set vpn ipsec esp-group vpn-nat-esp proposal 1 hash 'sha1'
# ike-group
set vpn ipsec ike-group vpn-nat-ike ikev2-reauth 'no'
set vpn ipsec ike-group vpn-nat-ike key-exchange 'ikev1'
set vpn ipsec ike-group vpn-nat-ike lifetime '28800'
set vpn ipsec ike-group vpn-nat-ike proposal 1 encryption 'aes256'
set vpn ipsec ike-group vpn-nat-ike proposal 1 hash 'sha512'
set vpn ipsec ike-group vpn-nat-ike proposal 1 dh-group '5'
set vpn ipsec ike-group vpn-nat-ike dead-peer-detection action 'restart'
set vpn ipsec ike-group vpn-nat-ike dead-peer-detection interval '30'
set vpn ipsec ike-group vpn-nat-ike dead-peer-detection timeout '30'
# site-to-site peer
edit vpn ipsec site-to-site peer ${REMOTE_NSW_VPN_IP}
set authentication mode 'pre-shared-secret'
set authentication pre-shared-secret ${REMOTE_PRE_SHARED_KEY}
set authentication id ${AWS_PUBLIC_IP}
set connection-type 'initiate'
set default-esp-group 'vpn-nat-esp'
set ike-group 'vpn-nat-ike'
set ikev2-reauth 'inherit'
set local-address ${AWS_PRIVATE_IP}
set tunnel 0 local prefix ${REMOTE_NAT_IP}/32
set tunnel 0 remote prefix ${REMOTE_VPN_SUBNET}
# commit command applies changes to VyOS device
commit
# save configuration to machine
save
# exit configuration mode
exit
# check status of VPN tunnel
show vpn ipsec sa
# commands to check VPN status/logs/information:
# monitor vpn ipsec
# show vpn debug
# show log vpn ipsec
```
#### Update VyOS config
* Manual update the key **Your_Remote_Key** or remote IP, e.g. 192.168.127.12
```
interfaces {
dummy dum0 {
address 127.17.12.172/32
address 172.17.130.96/32
description remote-vpn-ip
}
ethernet eth0 {
address dhcp
description aws-internal
duplex auto
hw-id 06:73:3f:28:dd:68
smp_affinity auto
speed auto
}
loopback lo {
}
}
nat {
source {
rule 100 {
description "Internal to REMOTE"
destination {
address 172.16.58.3/24
}
outbound-interface any
source {
address 10.104.0.0/16
}
translation {
address 127.17.12.172
}
}
}
}
service {
ssh {
disable-password-authentication
port 22
}
}
system {
config-management {
commit-revisions 20
}
console {
device ttyS0 {
speed 9600
}
}
host-name vyos-vpn
login {
user vyos {
authentication {
encrypted-password "*"
plaintext-password ""
public-keys aws.vpn.vyos.key.io-bd:dc:ae:d6:28:b3:5f:5b:2e:43:6f:31:b8:b3:a0:58 {
key <KEY>
type ssh-rsa
}
}
level admin
}
}
ntp {
server 0.pool.ntp.org {
}
server 1.pool.ntp.org {
}
server 2.pool.ntp.org {
}
}
package {
auto-sync 1
repository community {
components main
distribution helium
password ""
url http://packages.vyos.net/vyos
username ""
}
}
syslog {
global {
facility all {
level notice
}
facility protocols {
level debug
}
}
}
time-zone UTC
}
vpn {
ipsec {
esp-group vpn-nat-esp {
compression disable
lifetime 28800
mode tunnel
pfs dh-group14
proposal 1 {
encryption aes256
hash sha256
}
}
ike-group vpn-nat-ike {
dead-peer-detection {
action restart
interval 30
timeout 30
}
ikev2-reauth no
key-exchange ikev1
lifetime 28800
proposal 1 {
dh-group 14
encryption aes256
hash sha256
}
}
ipsec-interfaces {
interface eth0
}
logging {
log-modes all
}
nat-traversal enable
site-to-site {
peer 192.168.127.12 {
authentication {
id 13.14.15.16
mode pre-shared-secret
pre-shared-secret Your_Remote_Key
}
connection-type initiate
default-esp-group vpn-nat-esp
ike-group vpn-nat-ike
ikev2-reauth inherit
local-address 10.104.16.128
tunnel 0 {
allow-nat-networks disable
allow-public-networks disable
local {
prefix 127.17.12.172/32
}
remote {
prefix 172.16.58.3/24
}
}
}
}
}
}
```
* Reboot the VyOS
```
_vyatta_op_run reboot
```<file_sep>+++
title = "Java Note - 4: Date Time "
description="Java 8 provides a comprehensive Date-Time API to work with date, time, and datetime"
+++
## Date-Time API
Through the java.time packages, Java 8 provides a comprehensive Date-Time API to work with date, time, and datetime. By default, most of the classes are based on the ISO-8601 standards. The main classes are
* Instant
* represents an instant on the timeline and it is suitable for machines, for example, as timestamps for event
* LocalDate, LocalTime, LocalDateTime
* represents human readable date, time, and datetime without a time zone.
* OffsetTime, OffsetDateTime
* It represent a time and datetime with a zone offset from UTC.
* ZonedDateTime
* It represents a datetime for a time zone with zone rules, which will adjust the time according to the daylight saving time changes in the time zone.
### ISO-8601 Standards for Datetime
* [date]T[time][zone offset]
* A date component consists of three calendar fields: year, month, and day. Two fields in a date are separated by a hyphen: year-month-day\
* Epoch is Midnight January 1, 1970 UTC
### Useful Datetime-Related Enums
* Month
* DayOfWeek
* ChronoField
* ChronoUnit
## Period
A period is an amount of time defined in terms of calendar fields years, months, and days. A duration is also an amount of time measured in terms of seconds and nanoseconds. Negative periods are supported. What is the difference between a period and a duration? A duration represents an exact number of nanoseconds, whereas a period represents an inexact amount of time. A period is for humans what a duration is for machines.
## Partial
Partials
A partial is a date, time, or datetime that does not fully specify an instant on a timeline, but still makes sense to humans. With some more information, a partial may match multiple instants on the timeline.
## Adjusting Dates
Sometimes you want to adjust a date and time to have a particular characteristic, for example, the first Monday of the month, the next Tuesday, etc. You can perform adjustments to a date and time using an instance of the `TemporalAdjuster` interface. The interface has one method, adjustInto(), that takes a Temporal and returns a `Temporal`.
## Formatting
The most important point to keep in mind is that formatting and parsing are
always performed by an object of the DateTimeFormatter class.
### DateTimeApiDemo
```java
import java.time.Duration;
import java.time.DayOfWeek;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.LocalDateTime;
import java.time.Month;
import java.time.MonthDay;
import java.time.OffsetDateTime;
import java.time.Period;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAdjusters;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.Year;
import java.time.YearMonth;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.ZoneId;
import java.util.Locale;
import java.util.Set;
import static java.time.Month.JANUARY;
import static java.time.temporal.ChronoUnit.DAYS;
import static java.time.temporal.ChronoUnit.HOURS;
import static java.time.temporal.ChronoUnit.MINUTES;
public class DateTimeApiDemo {
public static String format(Temporal co, String pattern) {
DateTimeFormatter fmt = DateTimeFormatter.ofPattern(pattern, Locale.US);
return fmt.format(co);
}
public static void parseStr(DateTimeFormatter formatter, String text) {
try {
TemporalAccessor ta = formatter.parseBest(text, OffsetDateTime::from, LocalDateTime::from, LocalDate::from);
if (ta instanceof OffsetDateTime) {
OffsetDateTime odt = OffsetDateTime.from(ta);
System.out.println("OffsetDateTime: " + odt);
} else if (ta instanceof LocalDateTime) {
LocalDateTime ldt = LocalDateTime.from(ta);
System.out.println("LocalDateTime: " + ldt);
} else if (ta instanceof LocalDate) {
LocalDate ld = LocalDate.from(ta);
System.out.println("LocalDate: " + ld);
} else {
System.out.println("Parsing returned: " + ta);
}
} catch (DateTimeParseException e) {
System.out.println(e.getMessage());
}
}
public static void main(String[] args) {
// Get current date, time, and datetime
LocalDate dateOnly = LocalDate.now(); // 2016-03-12
LocalTime timeOnly = LocalTime.now(); // 09:17:56.200
LocalDateTime dateTime = LocalDateTime.now(); // 2016-03-12T09:17:56.200
ZonedDateTime dateTimeWithZone = ZonedDateTime.now(); // 2016-03-12T09:17:56.202+11:00[Australia/Sydney]
// ofXXX() method
LocalDate ld1 = LocalDate.of(2012, 5, 2); // 2012-05-02
LocalDate ld2 = LocalDate.of(2012, Month.JULY, 4); // 2012-07-04
LocalDate ld3 = LocalDate.ofEpochDay(2002); // 1975-06-26
LocalDate ld4 = LocalDate.ofYearDay(2014, 40); // 2014-02-09
// The plusXXX( ) and minusXXX( ) Methods
LocalDate ld = LocalDate.of(2015, 5, 2); // 2015-05-02
LocalDate ldp1 = ld.plusDays(5); // 2015-05-07
LocalDate ldp2 = ld.plusMonths(3); // 2015-08-02
LocalDate ldp3 = ld.plusWeeks(3); // 2015-05-23
LocalDate ldm1 = ld.minusMonths(7); // 2014-10-02
LocalDate ldm2 = ld.minusWeeks(3); // 2015-04-11
// Instant
Instant i1 = Instant.ofEpochSecond(20); // i1:1970-01-01T00:00:20Z
Instant i2 = Instant.ofEpochSecond(55); // i2:1970-01-01T00:00:55Z
Duration d1 = Duration.ofSeconds(55);
Duration d2 = Duration.ofSeconds(-17);
// Compare instants
System.out.println("i1.isBefore(i2):" + i1.isBefore(i2)); // i1.isBefore(i2):true
System.out.println("i1.isAfter(i2):" + i1.isAfter(i2)); // i1.isAfter(i2):false
// Add and subtract durations to instants
Instant i3 = i1.plus(d1);
Instant i4 = i2.minus(d2);
System.out.println("i1.plus(d1):" + i3); // i1.plus(d1):1970-01-01T00:01:15Z
System.out.println("i2.minus(d2):" + i4); // i2.minus(d2):1970-01-01T00:01:12Z
// Add two durations
System.out.println("d1.plus(d2):" + d1.plus(d2)); // d1.plus(d2):PT38S
// Print All Zone Id
Set<String> zoneIds = ZoneId.getAvailableZoneIds();
for (String zoneId: zoneIds) {
System.out.println(zoneId);
}
// DayOfWeek
DayOfWeek dw1 = DayOfWeek.from(ld); // THURSDAY
// Chrono
LocalDateTime now = LocalDateTime.now();
System.out.println("Year: " + now.get(ChronoField.YEAR));
System.out.println("Month: " + now.get(ChronoField.MONTH_OF_YEAR));
System.out.println("Day: " + now.get(ChronoField.DAY_OF_MONTH));
System.out.println("Hour-of-day: " + now.get(ChronoField.HOUR_OF_DAY));
System.out.println("Hour-of-AMPM: " + now.get(ChronoField.HOUR_OF_AMPM));
System.out.println("AMPM-of-day: " + now.get(ChronoField.AMPM_OF_DAY));
Period p1 = Period.of(2, 3, 5); // 2 years, 3 months, and 5 days
Period p2 = Period.ofDays(25); // 25 days
Period p3 = Period.ofMonths(-3); // -3 months
Period p4 = Period.ofWeeks(3); // 3 weeks (21 days)
// Date Adjuster
LocalDate ld3 = ld1.with(TemporalAdjusters.next(DayOfWeek.MONDAY));
System.out.println(ld3);
ld3 = ld1.with(TemporalAdjusters.nextOrSame(DayOfWeek.TUESDAY));
System.out.println(ld3);
// Date Time Format
System.out.println(format(ld, "M/d/yyyy"));
System.out.println(format(ld, "MM/dd/yyyy"));
System.out.println(format(ld, "MMM dd, yyyy"));
System.out.println(format(ld, "MMMM dd, yyyy"));
System.out.println(format(ld, "EEEE, MMMM dd, yyyy"));
System.out.println(format(ld, "'Month' q 'in' QQQ"));
System.out.println(format(ld, "[MM-dd-yyyy][' at' HH:mm:ss]"));
// Parse date time
DateTimeFormatter parser = DateTimeFormatter.ofPattern("yyyy-MM-dd['T'HH:mm:ss[Z]]");
parseStr(parser, "2012-05-31"); // LocalDate: 2012-05-31
parseStr(parser, "2012-05-31T16:30:12"); // LocalDateTime: 2012-05-31T16:30:12
parseStr(parser, "2012-05-31T16:30:12-0500"); // OffsetDateTime: 2012-05-31T16:30:12-05:00
parseStr(parser, "2012-05-31Hello"); // Text '2012-05-31Hello' could not be parsed, unparsed text found at index 10
}
}
```
<file_sep>+++
title = "VirtualBox Notes"
description = "VirtualBox practices"
draft = false
+++
## Network
### Not attached.
In this mode, Oracle VM VirtualBox reports to the guest that a network card is present, but that there is no connection. This is as if no Ethernet cable was plugged into the card. Using this mode, it is possible to "pull" the virtual Ethernet cable and disrupt the connection, which can be useful to inform a guest operating system that no network connection is available and enforce a reconfiguration.
### Network Address Translation (NAT).
If all you want is to browse the Web, download files, and view email inside the guest, then this default mode should be sufficient for you, and you can skip the rest of this section. Please note that there are certain limitations when using Windows file sharing. See Section 6.3.3, “NAT Limitations”.
### NAT Network.
A NAT network is a type of internal network that allows outbound connections. See Section 6.4, “Network Address Translation Service”.
### Bridged networking.
This is for more advanced networking needs, such as network simulations and running servers in a guest. When enabled, Oracle VM VirtualBox connects to one of your installed network cards and exchanges network packets directly, circumventing your host operating system's network stack.
### Internal networking.
This can be used to create a different kind of software-based network which is visible to selected virtual machines, but not to applications running on the host or to the outside world.
### Host-only networking.
This can be used to create a network containing the host and a set of virtual machines, without the need for the host's physical network interface. Instead, a virtual network interface, similar to a loopback interface, is created on the host, providing connectivity among virtual machines and the host.
### Overview of networking modes
Mode|VM -> Host|VM <- Host|VM1<->VM2| VM->Net/LAN|VM<-Net/LAN
----|:---:|:---:|:----:|:---:|:-----:|
Host-only | + | + | + | – | –
Internal | – | – | + | – | –
Bridged | + | + | + | + | +
NAT | + | Port forward | – | + | Port forward
NATservice | + | Port forward | + | + | Port forward
### How to SSH between HOST and VMS
- Enable `password` for SSH
- Create a new host-only network
- File > Network Manager > Host-only networks
- Create a new network `vboxnet0`
- Setup network adapter
- Adapter 1: Bridged Network
- Name: wifi
- Adapter 2: Host-only network
- Name: vboxnet0
<file_sep>+++
title = "Data Types & Ownership"
description="Rustlang Introduction: Mutability, Shadowing, Data Types, Ownership, Borrowing "
weight = 1
+++
### Mutability
Rust encourages you to favor immutability. It’s important that we get compile-time errors when we attempt to change a value that we previously designated as immutable because this very situation can lead to bugs.
But mutability can be very useful. To make them mutable is simply adding mut in front of the variable name. In addition to allowing this value to change, mut conveys intent to future readers of the code by indicating that other parts of the code will be changing this variable value.
#### Shadowing
* Rustaceans say that the first variable is shadowed by the second, which means that the second variable’s value is what appears when the variable is used.
* Sample
```rs
fn main() {
let x = 5;
let x = x + 1;
let x = x * 2;
println!("The value of x is: {}", x); // 12
}
```
* Shadowing is different from marking a variable as mut, because we’ll get a compile-time error if we accidentally try to reassign to this variable without using the let keyword. By using let, we can perform a few transformations on a value but have the variable be immutable after those transformations have been completed.
* The other difference between mut and shadowing is that because we’re effectively creating a new variable when we use the let keyword again, we can change the type of the value but reuse the same name.
### Data types
Every value in Rust is of a certain data type, which tells Rust what kind of data is being specified so it knows how to work with that data. We’ll look at two data type subsets: scalar and compound.
* A scalar type represents a single value. Rust has four primary scalar types: integers, floating-point numbers, Booleans, and characters.
* Compound types can group multiple values into one type. Rust has two primitive compound types: tuples and arrays.
#### Type parse
```rs
let guess: u32 = "42".parse().expect("Not a number!");
```
#### The Tuple Type
* A tuple is a general way of grouping together some number of other values with a variety of types into one compound type.
```rs
let tup: (i32, f64, u8) = (500, 6.4, 1);
let tup = (500, 6.4, 1);
let (x, y, z) = tup;
println!("The value of y is: {}", y); //6.4
```
### Ownership
Rust’s central feature is ownership. Although the feature is straightforward to explain, it has deep implications for the rest of the language.
#### Ownership Rules
* Each value in Rust has a variable that’s called its owner.
* There can only be one owner at a time.
* When the owner goes out of scope, the value will be dropped.
#### Variable Scope
* A scope is the range within a program for which an item is valid.
#### Memory and Allocation
* Rust takes a different path: the memory is automatically returned once the variable that owns it goes out of scope.
* Explanation of memory allocation and free
```go
{
let s = String::from("hello"); // s is valid from this point forward
} // this scope is now over, and s is no longer valid
```
* There is a natural point at which we can return the memory our String needs to the operating system: when s goes out of scope. When a variable goes out of scope, Rust calls a special function for us. This function is called drop, and it’s where the author of String can put the code to return the memory. Rust calls drop automatically at the closing curly bracket.
#### Move or Clone or Copy
* Sample of move; the scalar type has no this problem
```rs
let s1 = String::from("hello");
let s2 = s1; // move value from s1 to s2
// s1 is no longer valid
println!("{}, world!", s1); // Compile error - value used here after move
```
* If we do want to deeply copy the heap data of the String, not just the stack data, we can use a common method called clone.
```rs
let s2 = s1.clone();
```
* Rust has a special annotation called the Copy trait that we can place on types like integers that are stored on the stack. If a type has the Copy trait, an older variable is still usable after assignment. Rust won’t let us annotate a type with the Copy trait if the type, or any of its parts, has implemented the Drop trait. If the type needs something special to happen when the value goes out of scope and we add the Copy annotation to that type, we’ll get a compile-time error.
* Types for copy
* All the integer types, such as u32.
* The Boolean type, bool, with values true and false.
* All the floating point types, such as f64.
* The character type, char.
* Tuples, if they only contain types that are also Copy. For example, (i32, i32) is Copy, but (i32, String) is not.
### References & Borrowing
* At any given time, you can have either one mutable reference or any number of immutable references.
* References must always be valid.
#### Slice
* Another data type that does not have ownership is the slice. Slices let you reference a contiguous sequence of elements in a collection rather than the whole collection.
### Struct
* Structs are similar to tuples, which were discussed in Chapter 3. Like tuples, the pieces of a struct can be different types. Unlike with tuples, you’ll name each piece of data so it’s clear what the values mean.
* sample code of strut
```rs
struct User {
username: String,
email: String,
sign_in_count: u64,
active: bool,
}
fn build_user(email: String, username: String) -> User {
User {
email: email,
username: username,
active: true,
sign_in_count: 1,
}
}
```
* Creating Instances From Other Instances
```rs
let user2 = User {
email: String::from("<EMAIL>"),
username: String::from("anotherusername567"),
..user1
};
```
#### Unit-Like Struct
* structs that don’t have any fields! These are called unit-like structs because they behave similarly to (), the unit type.
#### Methods
* Methods are similar to functions: they’re declared with the fn keyword and their name, they can have parameters and a return value, and they contain some code that is run when they’re called from somewhere else. However, methods are different from functions in that they’re defined within the context of a struct (or an enum or a trait object. Their first parameter is always self, which represents the instance of the struct the method is being called on.
```rs
#[derive(Debug)]
struct Rectangle {
width: u32,
height: u32,
}
impl Rectangle {
fn area(&self) -> u32 {
self.width * self.height
}
fn can_hold(&self, other: &Rectangle) -> bool {
self.width > other.width && self.height > other.height
}
}
fn main() {
let rect1 = Rectangle {
width: 30,
height: 50,
};
println!(
"The area of the rectangle is {} square pixels.",
rect1.area()
);
let rect2 = Rectangle {
width: 10,
height: 40,
};
let rect3 = Rectangle {
width: 60,
height: 45,
};
println!("Can rect1 hold rect2? {}", rect1.can_hold(&rect2));
println!("Can rect1 hold rect3? {}", rect1.can_hold(&rect3));
}
```
### Enum & Option
* Enums allow you to define a type by enumerating its possible values. First, we’ll define and use an enum to show how an enum can encode meaning along with data.
* A particularly useful enum, called Option, which expresses that a value can be either something or nothing.
* Pattern matching in the match expression makes it easy to run different code for different values of an enum.
* Sample from Rust standard library
```rs
struct Ipv4Addr {
// --snip--
}
struct Ipv6Addr {
// --snip--
}
enum IpAddr {
V4(Ipv4Addr),
V6(Ipv6Addr),
}
```
* Rust does not have nulls, but it does have an enum that can encode the concept of a value being present or absent. This enum is Option<T>
```rs
enum Option<T> {
Some(T),
None,
}
```
#### Match
* Rust has an extremely powerful control flow operator called match that allows you to compare a value against a series of patterns and then execute code based on which pattern matches. Patterns can be made up of literal values, variable names, wildcards, and many other things;
```rs
#![allow(unused_variables)]
fn main() {
enum Coin {
Penny,
Nickel,
Dime,
Quarter,
}
fn value_in_cents(coin: Coin) -> u8 {
match coin {
Coin::Penny => 1,
Coin::Nickel => 5,
Coin::Dime => 10,
Coin::Quarter => 25,
}
}
}
```
* Match with Option<T>
```rs
#![allow(unused_variables)]
fn main() {
fn plus_one(x: Option<i32>) -> Option<i32> {
match x {
None => None,
Some(i) => Some(i + 1),
}
}
let five = Some(5);
let six = plus_one(five);
let none = plus_one(None);
}
```
#### Matches Are Exhaustive
* Rust knows that we didn’t cover every possible case and even knows which pattern we forgot! Matches in Rust are exhaustive: we must exhaust every last possibility in order for the code to be valid. Especially in the case of Option<T>, when Rust prevents us from forgetting to explicitly handle the None case.
* The _ Placeholder
```rs
let some_u8_value = 0u8;
match some_u8_value {
1 => println!("one"),
3 => println!("three"),
5 => println!("five"),
7 => println!("seven"),
_ => (),
}
```
### Control Flow with if let
* The if let syntax lets you combine if and let into a less verbose way to handle values that match one pattern while ignoring the rest.
```rs
#![allow(unused_variables)]
fn main() {
let some_u8_value = Some(0u8);
match some_u8_value {
Some(3) => println!("three"),
_ => (),
}
}
```
* with else
```rs
let mut count = 0;
if let Coin::Quarter(state) = coin {
println!("State quarter from {:?}!", state);
} else {
count += 1;
}
```
<file_sep>+++
title = "Adv Bash - 2"
description = "Reference Cards - String Manipulation"
+++
### Manipulating Strings
Bash supports a surprising number of string manipulation operations. Unfortunately, these tools lack a unified focus. Some are a subset of parameter substitution, and others fall under the functionality of the UNIX expr command. This results in inconsistent command syntax and overlap of functionality, not to mention confusion.
#### String Operations
Expression | Meaning
|-----|-----
| ${#string}| Length of $string
| ${string:position}| Extract substring from $string at $position
| ${string:position:length}| Extract $length characters substring from $string at $position [zero-indexed, | first character is at position 0]
| ${string#substring}| Strip shortest match of $substring from front of $string
| ${string##substring}| Strip longest match of $substring from front of $string
| ${string%substring}| Strip shortest match of $substring from back of $string
| ${string%%substring}| Strip longest match of $substring from back of $string
| ${string/substring/replacement}| Replace first match of $substring with $replacement
| ${string//substring/replacement}| Replace all matches of $substring with $replacement
| ${string/#substring/replacement}| If $substring matches front end of $string, substitute $replacement for $substring
| ${string/%substring/replacement}| If $substring matches back end of $string, substitute $replacement for $substring
|expr match "$string" '$substring' | Length of matching $substring* at beginning of $string
|expr "$string" : '$substring' | Length of matching $substring* at beginning of $string
|expr index "$string" $substring | Numerical position in $string of first character in $substring* that matches [0 if no match, first character counts as position 1]
|expr substr $string $position $length | Extract$length characters from $string starting at $position [0 if |no match, first character counts as position 1]
|expr match "$string" '\($substring\)' | Extract$substring*, searching from beginning of $string
|expr "$string" : '\($substring\)' | Extract$substring* , searching from beginning of $string
|expr match "$string" '.*\($substring\)' | Extract$substring*, searching from end of $string
|expr "$string" : '.*\($substring\)' | Extract$substring*, searching from end of $string
#### Parameter Substitution and Expansion
Expression | Meaning
|------|--------
| ${var} | Value of var (same as $var)
| ${var-$DEFAULT} | If var not set, evaluate expression as $DEFAULT *
| ${var:-$DEFAULT} | If var not set or is empty, evaluate expression as $DEFAULT *
| ${var=$DEFAULT} | If var not set, evaluate expression as $DEFAULT *
| ${var:=$DEFAULT} | If var not set or is empty, evaluate expression as $DEFAULT *
| ${var+$OTHER} | If var set, evaluate expression as $OTHER, otherwise as null string
| ${var:+$OTHER} | If var set, evaluate expression as $OTHER, otherwise as null string
| ${var?$ERR_MSG} | If var not set, print $ERR_MSG and abort script with an exit status of 1.*
| ${var:?$ERR_MSG} | If var not set, print $ERR_MSG and abort script with an exit status of 1.*
| ${!varprefix*} | Matches all previously declared variables beginning with varprefix
| ${!varprefix@} | Matches all previously declared variables beginning with varprefix
#### Samples
* Replace file name from abc_noteN to abc_N
```bash
touch abc_note1 abc_note2 abc_note3
for f in $(find . -type f -name *abc_note*);
do
mv -- $f "${f//note}"
done ;
ls abc*
# expected output
# abc_1 abc_2 abc_3
```
<file_sep>+++
title = "Frameworks"
weight = 7
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "MySql: JSON"
description="Json Support "
+++
## Json support
As of MySQL 5.7.8, MySQL supports a native JSON data type defined by RFC 7159 that enables efficient access to data in JSON (JavaScript Object Notation) documents. The JSON data type provides these advantages over storing JSON-format strings in a string column:
* Automatic validation of JSON documents stored in JSON columns. Invalid documents produce an error.
* Optimized storage format. JSON documents stored in JSON columns are converted to an internal format that permits quick read access to document elements. When the server later must read a JSON value stored in this binary format, the value need not be parsed from a text representation. The binary format is structured to enable the server to look up subobjects or nested values directly by key or array index without reading all values before or after them in the document.
### Caveats
* The size of any JSON document stored in a JSON column is limited to the value of the max_allowed_packet system variable.
* A JSON column cannot have a non-NULL default value.
### Json functions
> Along with the JSON data type, a set of SQL functions is available to enable operations on JSON values, such as creation, manipulation, and searching.
Name | Description
:--|:--|
-> | Return value from JSON column after evaluating path; equivalent to JSON_EXTRACT().
->> | Return value from JSON column after evaluating path and unquoting the result; equivalent to JSON_UNQUOTE(JSON_EXTRACT()).
JSON_APPEND() | (deprecated 5.7.9) Append data to JSON document
JSON_ARRAY() | Create JSON array
JSON_ARRAY_APPEND() | Append data to JSON document
JSON_ARRAY_INSERT() | Insert into JSON array
JSON_CONTAINS() | Whether JSON document contains specific object at path
JSON_CONTAINS_PATH() | Whether JSON document contains any data at path
JSON_DEPTH() | Maximum depth of JSON document
JSON_EXTRACT() | Return data from JSON document
JSON_INSERT() | Insert data into JSON document
JSON_KEYS() | Array of keys from JSON document
JSON_LENGTH() | Number of elements in JSON document
JSON_MERGE() | (deprecated 5.7.22) Merge JSON documents, preserving duplicate keys. Deprecated synonym for JSON_MERGE_PRESERVE() |
JSON_MERGE_PATCH() | Merge JSON documents, replacing values of duplicate keys
JSON_MERGE_PRESERVE() | Merge JSON documents, preserving duplicate keys
JSON_OBJECT() | Create JSON object
JSON_PRETTY() | Print a JSON document in human-readable format
JSON_QUOTE() | Quote JSON document
JSON_REMOVE() | Remove data from JSON document
JSON_REPLACE() | Replace values in JSON document
JSON_SEARCH() | Path to value within JSON document
JSON_SET() | Insert data into JSON document
JSON_STORAGE_SIZE() | Space used for storage of binary representation of a JSON document
JSON_TYPE() | Type of JSON value
JSON_UNQUOTE() | Unquote JSON value
JSON_VALID() | Whether JSON value is valid
### Export table to json file
* Return json object
```sql
--- Return record as json in query
SELECT
json_object(
'entityId', entityId,
'categoryName', categoryName,
'description', description,
'picture', picture
) as json, 'Category'
FROM Category
WHERE entityId = 1 ;
```
* Json object result
```json
{
"picture": null,
"entityId": 1,
"description": "Soft drinks, coffees, teas, beers, and ales",
"categoryName": "Beverages"
}
```
* Return record as json array
```sql
-- Use json_array and json_object functions
DROP TEMPORARY TABLE IF EXISTS tmp_json_data;
CREATE TEMPORARY TABLE tmp_json_data (
jsonText TEXT
)
SELECT
json_object(
'entityId', entityId,
'categoryName', categoryName,
'description', description,
'picture', picture
) as json
INTO jsonText
FROM Category ;
SET SESSION group_concat_max_len = 9999;
SELECT concat('[', group_concat(jsonText),']') jsonArray
FROM tmp_json_data;
```
* Json array result
```json
[
{
"categoryName" : "Beverages",
"description" : "Soft drinks, coffees, teas, beers, and ales",
"entityId" : 1,
"picture" : null
},
{
"categoryName" : "Condiments",
"description" : "Sweet and savory sauces, relishes, spreads, and seasonings",
"entityId" : 2,
"picture" : null
},
{
"categoryName" : "Confections",
"description" : "Desserts, candies, and sweet breads",
"entityId" : 3,
"picture" : null
}
]
```
<file_sep>+++
title = "F# Pattern Matching"
description = "F# Pattern Matching"
weight = 8
+++
## Pattern
Patterns are rules for transforming input data. They are used throughout F# to compare data with a logical structure or structures, decompose data into constituent parts, or extract information from data in various ways.
Name | Description | Example
--- | --- | ---
Constant pattern | Any numeric, character, or string literal, an enumeration constant, or a defined literal identifier | 1.0, "test", 30, Color.Red
Identifier pattern | A case value of a discriminated union, an exception label, or an active pattern case | Some(x) Failure(msg)
Variable pattern | identifier | a
as pattern | pattern as identifier | (a, b) as tuple1
OR pattern | pattern1 | pattern2 | ([h] | [h; _])
AND pattern | pattern1 & pattern2 | (a, b) & (_, "test")
Cons pattern | identifier :: list-identifier | h :: t
List pattern | [ pattern_1; ... ; pattern_n ] | [ a; b; c ]
Array pattern | [| pattern_1; ..; pattern_n |] | [| a; b; c |]
Parenthesized pattern | ( pattern ) | ( a )
Tuple pattern | ( pattern_1, ... , pattern_n ) | ( a, b )
Record pattern | { identifier1 = pattern_1; ... ; identifier_n = pattern_n } | { Name = name; }
Wildcard pattern | _ | _
Pattern together with type annotation | pattern : type | a : int
Type test pattern | :? type [ as identifier ] | :? System.DateTime as dt
Null pattern | null | null
Nameof pattern | nameof expr | nameof str
### Constant Patterns
```fsharp
[<Literal>]
let Three = 3
let filter123 x =
match x with
// The following line contains literal patterns combined with an OR pattern.
| 1 | 2 | Three -> printfn "Found 1, 2, or 3!"
// The following line contains a variable pattern.
| var1 -> printfn "%d" var1
for x in 1..10 do filter123 x
// Found 1, 2, or 3!
// Found 1, 2, or 3!
// Found 1, 2, or 3!
// 4
// 5
// 6
// 7
// 8
// 9
// 10
type Color =
| Red = 0
| Green = 1
| Blue = 2
let printColorName (color:Color) =
match color with
| Color.Red -> printfn "Red"
| Color.Green -> printfn "Green"
| Color.Blue -> printfn "Blue"
| _ -> ()
printColorName Color.Red
printColorName Color.Green
printColorName Color.Blue
// Red
// Green
// Blue
```
### Identifier Patterns
```fsharp
let printOption (data : int option) =
match data with
| Some var1 -> printfn "%d" var1
| None -> ()
type PersonName =
| FirstOnly of string
| LastOnly of string
| FirstLast of string * string
let constructQuery personName =
match personName with
| FirstOnly(firstName) -> printfn "May I call you %s?" firstName
| LastOnly(lastName) -> printfn "Are you Mr. or Ms. %s?" lastName
| FirstLast(firstName, lastName) -> printfn "Are you %s %s?" firstName lastName
constructQuery (FirstOnly("john"))
constructQuery (LastOnly("smith"))
constructQuery (FirstLast("john","smith"))
// May I call you john?
// Are you Mr. or Ms. smith?
// Are you <NAME>?
```
### Variable Patterns
```fsharp
let function1 x =
match x with
| (var1, var2) when var1 > var2 -> printfn "%d is greater than %d" var1 var2
| (var1, var2) when var1 < var2 -> printfn "%d is less than %d" var1 var2
| (var1, var2) -> printfn "%d equals %d" var1 var2
function1 (1,2)
function1 (2, 1)
function1 (0, 0)
// 1 is less than 2
// 2 is greater than 1
// 0 equals 0
```
### as Pattern
The `as` pattern is a pattern that has an as clause appended to it.
```fsharp
let (var1, var2) as tuple1 = (1, 2)
printfn "%d %d %A" var1 var2 tuple1
// 1 2 (1, 2)
```
### OR Pattern
The OR pattern is used when input data can match multiple patterns, and you want to execute the same code as a result.
```fsharp
let detectZeroOR point =
match point with
| (0, 0) | (0, _) | (_, 0) -> printfn "Zero found."
| _ -> printfn "Both nonzero."
detectZeroOR (0, 0)
detectZeroOR (1, 0)
detectZeroOR (0, 10)
detectZeroOR (10, 15)
// output
// Zero found.
// Zero found.
// Zero found.
// Both nonzero.
```
### AND Pattern
The AND pattern requires that the input match two patterns. The types of both sides of the AND pattern must be compatible.
```fsharp
let detectZeroAND point =
match point with
| (0, 0) -> printfn "Both values zero."
| (var1, var2) & (0, _) -> printfn "First value is 0 in (%d, %d)" var1 var2
| (var1, var2) & (_, 0) -> printfn "Second value is 0 in (%d, %d)" var1 var2
| _ -> printfn "Both nonzero."
detectZeroAND (0, 0)
detectZeroAND (1, 0)
detectZeroAND (0, 10)
detectZeroAND (10, 15)
// output
// Both values zero.
// Second value is 0 in (1, 0)
// First value is 0 in (0, 10)
// Both nonzero.
```
### Cons Pattern
The cons pattern is used to decompose a list into the first element, the head, and a list that contains the remaining elements, the tail.
```fsharp
let list1 = [ 1; 2; 3; 4 ]
// This example uses a cons pattern and a list pattern.
let rec printList l =
match l with
| head :: tail -> printf "%d " head; printList tail
| [] -> printfn ""
printList list1
```
### List Pattern
The list pattern enables lists to be decomposed into a number of elements. The list pattern itself can match only lists of a specific number of elements.
```fsharp
// This example uses a list pattern.
let listLength list =
match list with
| [] -> 0
| [ _ ] -> 1
| [ _; _ ] -> 2
| [ _; _; _ ] -> 3
| _ -> List.length list
printfn "%d" (listLength [ 1 ])
printfn "%d" (listLength [ 1; 1 ])
printfn "%d" (listLength [ 1; 1; 1; ])
printfn "%d" (listLength [ ] )
// output
// 1
// 2
// 3
// 0
```
### Array Pattern
The array pattern resembles the list pattern and can be used to decompose arrays of a specific length.
```fsharp
// This example uses array patterns.
let vectorLength vec =
match vec with
| [| var1 |] -> var1
| [| var1; var2 |] -> sqrt (var1*var1 + var2*var2)
| [| var1; var2; var3 |] -> sqrt (var1*var1 + var2*var2 + var3*var3)
| _ -> failwith (sprintf "vectorLength called with an unsupported array size of %d." (vec.Length))
printfn "%f" (vectorLength [| 1. |])
printfn "%f" (vectorLength [| 1.; 1. |])
printfn "%f" (vectorLength [| 1.; 1.; 1.; |])
printfn "%f" (vectorLength [| |] )
// output
// 1.000000
// 1.414214
// 1.732051
// System.Exception: vectorLength called with an unsupported array size of 0.
// at FSI_0055.vectorLength(Double[] vec)
// at <StartupCode$FSI_0055>.$FSI_0055.main@()
// Stopped due to error
```
### Tuple Pattern
The tuple pattern matches input in tuple form and enables the tuple to be decomposed into its constituent elements by using pattern matching variables for each position in the tuple.
```fsharp
let detectZeroTuple point =
match point with
| (0, 0) -> printfn "Both values zero."
| (0, var2) -> printfn "First value is 0 in (0, %d)" var2
| (var1, 0) -> printfn "Second value is 0 in (%d, 0)" var1
| _ -> printfn "Both nonzero."
detectZeroTuple (0, 0)
detectZeroTuple (1, 0)
detectZeroTuple (0, 10)
detectZeroTuple (10, 15)
// output
// Both values zero.
// Second value is 0 in (1, 0)
// First value is 0 in (0, 10)
// Both nonzero.
```
### Record Pattern
The record pattern is used to decompose records to extract the values of fields. The pattern does not have to reference all fields of the record; any omitted fields just do not participate in matching and are not extracted.
```fsharp
let IsMatchByName record1 (name: string) =
match record1 with
| { MyRecord.Name = nameFound; MyRecord.ID = _; } when nameFound = name -> true
| _ -> false
let recordX = { Name = "Parker"; ID = 10 }
let isNameMatched1 = IsMatchByName recordX "Parker"
let isNameMatched2 = IsMatchByName recordX "Hartono"
printfn "isNameMatched1 %A isNameMatched2 %A " isNameMatched1 isNameMatched2
// output
// isNameMatched1 true isNameMatched2 false
let IsMatchByNameOrId record1 (name: string, id: int) =
match record1 with
| { MyRecord.Name = nameFound; MyRecord.ID = _; } when nameFound = name -> true
| { MyRecord.ID = myid; MyRecord.Name = _ ;} when myid = id -> true
| _ -> false
let isAnyMatched1 = IsMatchByNameOrId recordX ("Parker", 2)
let isAnyMatched2 = IsMatchByNameOrId recordX ("Hartono", 10)
let isAnyMatched3 = IsMatchByNameOrId recordX ("XXX", 5)
printfn "isAnyMatched1 %A isAnyMatched2 %A isAnyMatched3 %A" isAnyMatched1 isAnyMatched2 isAnyMatched3
// output
// isAnyMatched1 true isAnyMatched2 true isAnyMatched3 false
```
### Patterns That Have Type Annotations
Patterns can have type annotations.
```fsharp
let detect1 x =
match x with
| 1 -> printfn "Found a 1!"
| (var1 : int) -> printfn "%d" var1
detect1 5
detect1 1
// output
// 5
// Found a 1!
```
### Type Test Pattern
The type test pattern is used to match the input against a type.
```fsharp
open System.Windows.Forms
let RegisterControl(control:Control) =
match control with
| :? Button as button -> button.Text <- "Registered."
| :? CheckBox as checkbox -> checkbox.Text <- "Registered."
| _ -> ()
// ------------------------------------------------------------------------
// If you're only checking if an identifier is of a particular derived type,
// you don't need the as identifier part of the pattern
type A() = class end
type B() = inherit A()
type C() = inherit A()
let m (a: A) =
match a with
| :? B -> printfn "It's a B"
| :? C -> printfn "It's a C"
| _ -> ()
```
### Null Pattern
The null pattern matches the null value that can appear when you are working with types that allow a null value.
```fsharp
let ReadFromFile (reader : System.IO.StreamReader) =
match reader.ReadLine() with
| null -> printfn "\n"; false
| line -> printfn "%s" line; true
let fs = System.IO.File.Open("./test.fs", System.IO.FileMode.Open)
let sr = new System.IO.StreamReader(fs)
while ReadFromFile(sr) = true do ()
sr.Close()
// output
// let ReadFromFile (reader : System.IO.StreamReader) =
// match reader.ReadLine() with
// | null -> printfn "\n"; false
// | line -> printfn "%s" line; true
// let fs = System.IO.File.Open("./test.fs", System.IO.FileMode.Open)
// let sr = new System.IO.StreamReader(fs)
// while ReadFromFile(sr) = true do ()
// sr.Close()
```
### Nameof pattern
The nameof pattern matches against a string when its value is equal to the expression that follows the nameof keyword.
```fsharp
let f (str: string) =
match str with
| nameof str -> "It's 'str'!"
| _ -> "It is not 'str'!"
f "str" // matches
f "asdf" // does not match
// output
// It's 'str'!
// It is not 'str'!
```
<file_sep>+++
title = "Hacks"
weight = 4
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "C Lecture - 2"
description = "Exercise 32 ~ 40"
+++
Author: <NAME>
All content comes from Zed's [Lecture Repository](https://github.com/zedshaw/learn-c-the-hard-way-lectures.git) and [Libraries Repository](https://github.com/zedshaw/liblcthw). All credit goes to Zed.
### Exercise 32 Double Linked Lists
The Plan
Learn about your very first data structure:
Double Linked Lists
Creating A liblcthw Project
We'll need a project for the rest of the book called *liblcthw*.
Algorithms and Data Structures
A big step in going from amateur to professional is learning
about data structures and algorithms.
A double linked list is the easiest one.
Double Linked Lists Visually
I'll quickly draw some diagrams to show you how they work.
Automated Testing Demo
You can enter the code just fine, but watch me write
the test.
Code Reviews
.\ex32\list.h
```c
#ifndef lcthw_List_h
#define lcthw_List_h
#include <stdlib.h>
struct ListNode;
typedef struct ListNode {
struct ListNode *next;
struct ListNode *prev;
void *value;
} ListNode;
typedef struct List {
int count;
ListNode *first;
ListNode *last;
} List;
List *List_create();
void List_destroy(List * list);
void List_clear(List * list);
void List_clear_destroy(List * list);
#define List_count(A) ((A)->count)
#define List_first(A) ((A)->first != NULL ? (A)->first->value : NULL)
#define List_last(A) ((A)->last != NULL ? (A)->last->value : NULL)
void List_push(List * list, void *value);
void *List_pop(List * list);
void List_unshift(List * list, void *value);
void *List_shift(List * list);
void *List_remove(List * list, ListNode * node);
#define LIST_FOREACH(L, S, M, V) ListNode *_node = NULL;\
ListNode *V = NULL;\
for(V = _node = L->S; _node != NULL; V = _node = _node->M)
#endif
```
.\ex32\list.c
```c
#include <lcthw/list.h>
#include <lcthw/dbg.h>
List *List_create()
{
return calloc(1, sizeof(List));
}
void List_destroy(List * list)
{
LIST_FOREACH(list, first, next, cur) {
if (cur->prev) {
free(cur->prev);
}
}
free(list->last);
free(list);
}
void List_clear(List * list)
{
LIST_FOREACH(list, first, next, cur) {
free(cur->value);
}
}
void List_clear_destroy(List * list)
{
List_clear(list);
List_destroy(list);
}
void List_push(List * list, void *value)
{
ListNode *node = calloc(1, sizeof(ListNode));
check_mem(node);
node->value = value;
if (list->last == NULL) {
list->first = node;
list->last = node;
} else {
list->last->next = node;
node->prev = list->last;
list->last = node;
}
list->count++;
error:
return;
}
void *List_pop(List * list)
{
ListNode *node = list->last;
return node != NULL ? List_remove(list, node) : NULL;
}
void List_unshift(List * list, void *value)
{
ListNode *node = calloc(1, sizeof(ListNode));
check_mem(node);
node->value = value;
if (list->first == NULL) {
list->first = node;
list->last = node;
} else {
node->next = list->first;
list->first->prev = node;
list->first = node;
}
list->count++;
error:
return;
}
void *List_shift(List * list)
{
ListNode *node = list->first;
return node != NULL ? List_remove(list, node) : NULL;
}
void *List_remove(List * list, ListNode * node)
{
void *result = NULL;
check(list->first && list->last, "List is empty.");
check(node, "node can't be NULL");
if (node == list->first && node == list->last) {
list->first = NULL;
list->last = NULL;
} else if (node == list->first) {
list->first = node->next;
check(list->first != NULL,
"Invalid list, somehow got a first that is NULL.");
list->first->prev = NULL;
} else if (node == list->last) {
list->last = node->prev;
check(list->last != NULL,
"Invalid list, somehow got a next that is NULL.");
list->last->next = NULL;
} else {
ListNode *after = node->next;
ListNode *before = node->prev;
after->prev = before;
before->next = after;
}
list->count--;
result = node->value;
free(node);
error:
return result;
}
```
.\ex32\list_tests.c
```c
#include "minunit.h"
#include <lcthw/list.h>
#include <assert.h>
static List *list = NULL;
char *test1 = "test1 data";
char *test2 = "test2 data";
char *test3 = "test3 data";
char *test_create()
{
list = List_create();
mu_assert(list != NULL, "Failed to create list.");
return NULL;
}
char *test_destroy()
{
List_clear_destroy(list);
return NULL;
}
char *test_push_pop()
{
List_push(list, test1);
mu_assert(List_last(list) == test1, "Wrong last value.");
List_push(list, test2);
mu_assert(List_last(list) == test2, "Wrong last value");
List_push(list, test3);
mu_assert(List_last(list) == test3, "Wrong last value.");
mu_assert(List_count(list) == 3, "Wrong count on push.");
char *val = List_pop(list);
mu_assert(val == test3, "Wrong value on pop.");
val = List_pop(list);
mu_assert(val == test2, "Wrong value on pop.");
val = List_pop(list);
mu_assert(val == test1, "Wrong value on pop.");
mu_assert(List_count(list) == 0, "Wrong count after pop.");
return NULL;
}
char *test_unshift()
{
List_unshift(list, test1);
mu_assert(List_first(list) == test1, "Wrong first value.");
List_unshift(list, test2);
mu_assert(List_first(list) == test2, "Wrong first value");
List_unshift(list, test3);
mu_assert(List_first(list) == test3, "Wrong last value.");
mu_assert(List_count(list) == 3, "Wrong count on unshift.");
return NULL;
}
char *test_remove()
{
// we only need to test the middle remove case since push/shift
// already tests the other cases
char *val = List_remove(list, list->first->next);
mu_assert(val == test2, "Wrong removed element.");
mu_assert(List_count(list) == 2, "Wrong count after remove.");
mu_assert(List_first(list) == test3, "Wrong first after remove.");
mu_assert(List_last(list) == test1, "Wrong last after remove.");
return NULL;
}
char *test_shift()
{
mu_assert(List_count(list) != 0, "Wrong count before shift.");
char *val = List_shift(list);
mu_assert(val == test3, "Wrong value on shift.");
val = List_shift(list);
mu_assert(val == test1, "Wrong value on shift.");
mu_assert(List_count(list) == 0, "Wrong count after shift.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_create);
mu_run_test(test_push_pop);
mu_run_test(test_unshift);
mu_run_test(test_remove);
mu_run_test(test_shift);
mu_run_test(test_destroy);
return NULL;
}
RUN_TESTS(all_tests);
```
Later videos will demonstrate how to code review to make code solid.
Improving It
* You can make ``List_clear_destroy`` more efficient by using
``LIST_FOREACH`` and doing both ``free`` calls inside one
loop.
* You can add asserts for preconditions so that the program isn't given a ``NULL``
value for the ``List *list`` parameters.
Improving It
* You can add invariants that check that the list's contents are always correct,
such as ``count`` is never ``< 0``, and if ``count > 0``, then ``first`` isn't NULL.
* You can add documentation to the header file in the form of comments before
each struct, function, and macro that describes what it does.
Extra Credit
* Research doubly vs. singly linked lists and when one is preferred over
the other.
* Research the limitations of a doubly linked list. For example, while they
are efficient for inserting and deleting elements, they are very slow for
iterating over them all.
* What operations are missing that you can imagine needing? Some examples
are copying, joining, and splitting. Implement these operations and write the
unit tests for them.
### Exercise 33 Linked List Algorithms
The Plan
Learn two sorting algorithms for double linked lists.
Watch how to conduct a simple code review.
The Code
.\ex33\list_algos.h
```c
#ifndef lcthw_List_algos_h
#define lcthw_List_algos_h
#include <lcthw/list.h>
typedef int (*List_compare) (const void *a, const void *b);
int List_bubble_sort(List * list, List_compare cmp);
List *List_merge_sort(List * list, List_compare cmp);
#endif
```
.\ex33\list_algos.c
```c
#include <lcthw/list_algos.h>
#include <lcthw/dbg.h>
inline void ListNode_swap(ListNode * a, ListNode * b)
{
void *temp = a->value;
a->value = b->value;
b->value = temp;
}
int List_bubble_sort(List * list, List_compare cmp)
{
int sorted = 1;
if (List_count(list) <= 1) {
return 0; // already sorted
}
do {
sorted = 1;
LIST_FOREACH(list, first, next, cur) {
if (cur->next) {
if (cmp(cur->value, cur->next->value) > 0) {
ListNode_swap(cur, cur->next);
sorted = 0;
}
}
}
} while (!sorted);
return 0;
}
inline List *List_merge(List * left, List * right, List_compare cmp)
{
List *result = List_create();
void *val = NULL;
while (List_count(left) > 0 || List_count(right) > 0) {
if (List_count(left) > 0 && List_count(right) > 0) {
if (cmp(List_first(left), List_first(right)) <= 0) {
val = List_shift(left);
} else {
val = List_shift(right);
}
List_push(result, val);
} else if (List_count(left) > 0) {
val = List_shift(left);
List_push(result, val);
} else if (List_count(right) > 0) {
val = List_shift(right);
List_push(result, val);
}
}
return result;
}
List *List_merge_sort(List * list, List_compare cmp)
{
List *result = NULL;
if (List_count(list) <= 1) {
return list;
}
List *left = List_create();
List *right = List_create();
int middle = List_count(list) / 2;
LIST_FOREACH(list, first, next, cur) {
if (middle > 0) {
List_push(left, cur->value);
} else {
List_push(right, cur->value);
}
middle--;
}
List *sort_left = List_merge_sort(left, cmp);
List *sort_right = List_merge_sort(right, cmp);
if (sort_left != left)
List_destroy(left);
if (sort_right != right)
List_destroy(right);
result = List_merge(sort_left, sort_right, cmp);
List_destroy(sort_left);
List_destroy(sort_right);
return result;
}
```
.\ex33\list_algos_tests.c
```c
#include "minunit.h"
#include <lcthw/list_algos.h>
#include <assert.h>
#include <string.h>
char *values[] = { "XXXX", "1234", "abcd", "xjvef", "NDSS" };
#define NUM_VALUES 5
List *create_words()
{
int i = 0;
List *words = List_create();
for (i = 0; i < NUM_VALUES; i++) {
List_push(words, values[i]);
}
return words;
}
int is_sorted(List * words)
{
LIST_FOREACH(words, first, next, cur) {
if (cur->next && strcmp(cur->value, cur->next->value) > 0) {
debug("%s %s", (char *)cur->value,
(char *)cur->next->value);
return 0;
}
}
return 1;
}
char *test_bubble_sort()
{
List *words = create_words();
// should work on a list that needs sorting
int rc = List_bubble_sort(words, (List_compare) strcmp);
mu_assert(rc == 0, "Bubble sort failed.");
mu_assert(is_sorted(words),
"Words are not sorted after bubble sort.");
// should work on an already sorted list
rc = List_bubble_sort(words, (List_compare) strcmp);
mu_assert(rc == 0, "Bubble sort of already sorted failed.");
mu_assert(is_sorted(words),
"Words should be sort if already bubble sorted.");
List_destroy(words);
// should work on an empty list
words = List_create(words);
rc = List_bubble_sort(words, (List_compare) strcmp);
mu_assert(rc == 0, "Bubble sort failed on empty list.");
mu_assert(is_sorted(words), "Words should be sorted if empty.");
List_destroy(words);
return NULL;
}
char *test_merge_sort()
{
List *words = create_words();
// should work on a list that needs sorting
List *res = List_merge_sort(words, (List_compare) strcmp);
mu_assert(is_sorted(res), "Words are not sorted after merge sort.");
List *res2 = List_merge_sort(res, (List_compare) strcmp);
mu_assert(is_sorted(res),
"Should still be sorted after merge sort.");
List_destroy(res2);
List_destroy(res);
List_destroy(words);
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_bubble_sort);
mu_run_test(test_merge_sort);
return NULL;
}
RUN_TESTS(all_tests);
```
You should be able to create this and figure out how it works.
I will assume you've done that, and now to code review.
Bubble Sort
Code review of bubble sort.
Start with the unit test and move from there.
Merge Sort
Code review of merge sort.
Improving It
* The merge sort does a crazy amount of copying and creating lists, so find ways to reduce this.
* The bubble sort description in Wikipedia mentions a few optimizations. Try to implement them.
* Can you use the ``List_split`` and ``List_join`` (if you implemented them) to improve merge sort?
* Go through of all the defensive programming checks and improve the robustness of
this implementation, protecting against bad ``NULL`` pointers, and then create
an optional debug level invariant that works like ``is_sorted`` does
after a sort.
Breaking It
* Overload the data structure to hit the worst case time complexity.
* Give it a bad data structure.
Extra Credit
* Create a unit test that compares the performance of the two algorithms. You'll want to look at ``man 3 time`` for a basic timer function,
and run enough iterations to at least have a few seconds of samples.
* Play with the amount of data in the lists that need to be sorted and see if that changes your timing.
* Find a way to simulate filling different sized random lists, measuring how long they take. Then, graph the result to see how it compares to the
description of the algorithm.
Extra Credit
* Try to explain why sorting linked lists is a really bad idea.
* Implement a ``List_insert_sorted`` that will take a given value, and using the ``List_compare``, insert the element at the
right position so that the list is always sorted. How does using this method compare to sorting a list after you've built it?
* Try implementing the bottom-up merge sort described on the Wikipedia page. The code there is already C, so it should be easy to
recreate, but try to understand how it's working compared to the slower one I have here.
### Exercise 34 Dynamic Array
The Plan
Learn about dynamic arrays, a very useful datastructure.
Code Review
.\ex34\darray.h
```c
#ifndef _DArray_h
#define _DArray_h
#include <stdlib.h>
#include <assert.h>
#include <lcthw/dbg.h>
typedef struct DArray {
int end;
int max;
size_t element_size;
size_t expand_rate;
void **contents;
} DArray;
DArray *DArray_create(size_t element_size, size_t initial_max);
void DArray_destroy(DArray * array);
void DArray_clear(DArray * array);
int DArray_expand(DArray * array);
int DArray_contract(DArray * array);
int DArray_push(DArray * array, void *el);
void *DArray_pop(DArray * array);
void DArray_clear_destroy(DArray * array);
#define DArray_last(A) ((A)->contents[(A)->end - 1])
#define DArray_first(A) ((A)->contents[0])
#define DArray_end(A) ((A)->end)
#define DArray_count(A) DArray_end(A)
#define DArray_max(A) ((A)->max)
#define DEFAULT_EXPAND_RATE 300
static inline void DArray_set(DArray * array, int i, void *el)
{
check(i < array->max, "darray attempt to set past max");
if (i > array->end)
array->end = i;
array->contents[i] = el;
error:
return;
}
static inline void *DArray_get(DArray * array, int i)
{
check(i < array->max, "darray attempt to get past max");
return array->contents[i];
error:
return NULL;
}
static inline void *DArray_remove(DArray * array, int i)
{
void *el = array->contents[i];
array->contents[i] = NULL;
return el;
}
static inline void *DArray_new(DArray * array)
{
check(array->element_size > 0,
"Can't use DArray_new on 0 size darrays.");
return calloc(1, array->element_size);
error:
return NULL;
}
#define DArray_free(E) free((E))
#endif
```
.\ex34\darray.c
```c
#include <lcthw/darray.h>
#include <assert.h>
DArray *DArray_create(size_t element_size, size_t initial_max)
{
DArray *array = malloc(sizeof(DArray));
check_mem(array);
array->max = initial_max;
check(array->max > 0, "You must set an initial_max > 0.");
array->contents = calloc(initial_max, sizeof(void *));
check_mem(array->contents);
array->end = 0;
array->element_size = element_size;
array->expand_rate = DEFAULT_EXPAND_RATE;
return array;
error:
if (array)
free(array);
return NULL;
}
void DArray_clear(DArray * array)
{
int i = 0;
if (array->element_size > 0) {
for (i = 0; i < array->max; i++) {
if (array->contents[i] != NULL) {
free(array->contents[i]);
}
}
}
}
static inline int DArray_resize(DArray * array, size_t newsize)
{
array->max = newsize;
check(array->max > 0, "The newsize must be > 0.");
void *contents = realloc(
array->contents, array->max * sizeof(void *));
// check contents and assume realloc doesn't harm the original on error
check_mem(contents);
array->contents = contents;
return 0;
error:
return -1;
}
int DArray_expand(DArray * array)
{
size_t old_max = array->max;
check(DArray_resize(array, array->max + array->expand_rate) == 0,
"Failed to expand array to new size: %d",
array->max + (int)array->expand_rate);
memset(array->contents + old_max, 0, array->expand_rate + 1);
return 0;
error:
return -1;
}
int DArray_contract(DArray * array)
{
int new_size = array->end < (int)array->expand_rate ?
(int)array->expand_rate : array->end;
return DArray_resize(array, new_size + 1);
}
void DArray_destroy(DArray * array)
{
if (array) {
if (array->contents)
free(array->contents);
free(array);
}
}
void DArray_clear_destroy(DArray * array)
{
DArray_clear(array);
DArray_destroy(array);
}
int DArray_push(DArray * array, void *el)
{
array->contents[array->end] = el;
array->end++;
if (DArray_end(array) >= DArray_max(array)) {
return DArray_expand(array);
} else {
return 0;
}
}
void *DArray_pop(DArray * array)
{
check(array->end - 1 >= 0, "Attempt to pop from empty array.");
void *el = DArray_remove(array, array->end - 1);
array->end--;
if (DArray_end(array) > (int)array->expand_rate
&& DArray_end(array) % array->expand_rate) {
DArray_contract(array);
}
return el;
error:
return NULL;
}
```
.\ex34\darray_tests.c
```c
#include "minunit.h"
#include <lcthw/darray.h>
static DArray *array = NULL;
static int *val1 = NULL;
static int *val2 = NULL;
char *test_create()
{
array = DArray_create(sizeof(int), 100);
mu_assert(array != NULL, "DArray_create failed.");
mu_assert(array->contents != NULL, "contents are wrong in darray");
mu_assert(array->end == 0, "end isn't at the right spot");
mu_assert(array->element_size == sizeof(int),
"element size is wrong.");
mu_assert(array->max == 100, "wrong max length on initial size");
return NULL;
}
char *test_destroy()
{
DArray_destroy(array);
return NULL;
}
char *test_new()
{
val1 = DArray_new(array);
mu_assert(val1 != NULL, "failed to make a new element");
val2 = DArray_new(array);
mu_assert(val2 != NULL, "failed to make a new element");
return NULL;
}
char *test_set()
{
DArray_set(array, 0, val1);
DArray_set(array, 1, val2);
return NULL;
}
char *test_get()
{
mu_assert(DArray_get(array, 0) == val1, "Wrong first value.");
mu_assert(DArray_get(array, 1) == val2, "Wrong second value.");
return NULL;
}
char *test_remove()
{
int *val_check = DArray_remove(array, 0);
mu_assert(val_check != NULL, "Should not get NULL.");
mu_assert(*val_check == *val1, "Should get the first value.");
mu_assert(DArray_get(array, 0) == NULL, "Should be gone.");
DArray_free(val_check);
val_check = DArray_remove(array, 1);
mu_assert(val_check != NULL, "Should not get NULL.");
mu_assert(*val_check == *val2, "Should get the first value.");
mu_assert(DArray_get(array, 1) == NULL, "Should be gone.");
DArray_free(val_check);
return NULL;
}
char *test_expand_contract()
{
int old_max = array->max;
DArray_expand(array);
mu_assert((unsigned int)array->max == old_max + array->expand_rate,
"Wrong size after expand.");
DArray_contract(array);
mu_assert((unsigned int)array->max == array->expand_rate + 1,
"Should stay at the expand_rate at least.");
DArray_contract(array);
mu_assert((unsigned int)array->max == array->expand_rate + 1,
"Should stay at the expand_rate at least.");
return NULL;
}
char *test_push_pop()
{
int i = 0;
for (i = 0; i < 1000; i++) {
int *val = DArray_new(array);
*val = i * 333;
DArray_push(array, val);
}
mu_assert(array->max == 1201, "Wrong max size.");
for (i = 999; i >= 0; i--) {
int *val = DArray_pop(array);
mu_assert(val != NULL, "Shouldn't get a NULL.");
mu_assert(*val == i * 333, "Wrong value.");
DArray_free(val);
}
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_create);
mu_run_test(test_new);
mu_run_test(test_set);
mu_run_test(test_get);
mu_run_test(test_remove);
mu_run_test(test_expand_contract);
mu_run_test(test_push_pop);
mu_run_test(test_destroy);
return NULL;
}
RUN_TESTS(all_tests);
```
Starting with the header file to implement, then the test, then the implementation.
The Analysis
DArray Advantages
* Iteration: You can just use a basic for-loop and ``DArray_count``
with ``DArray_get``, and you're done. No special macros needed, and
it's faster because you aren't walking through pointers.
* Indexing: You can use ``DArray_get`` and ``DArray_set`` to
access any element at random, but with a ``List`` you have to go
through N elements to get to N+1.
* Destroying: You can just free the struct and the ``contents`` in
two operations. A ``List`` requires a series of ``free`` calls
and walking every element.
DArray Advantages
* Cloning: You can also clone it in just two operations (plus whatever
it's storing) by copying the struct and ``contents``. A list
again requires walking through the whole thing and copying every ``ListNode``
plus its value.
* Sorting: As you saw, ``List`` is horrible if you need to keep the
data sorted. A ``DArray`` opens up a whole class of great sorting
algorithms, because now you can access elements randomly.
* Large Data: If you need to keep around a lot of data, then a ``DArray``
wins since its base, ``contents``, takes up less memory than the same
number of ``ListNode`` structs.
DArray Disadvantages
* Insert and remove on the front (what I called shift). A ``DArray``
needs special treatment to be able to do this efficiently, and usually it
has to do some copying.
* Splitting or joining: A ``List`` can just copy some pointers and
it's done, but with a ``DArray``, you have copy all of the
arrays involved.
DArray Disadvantages
* Small Data. If you only need to store a few elements, then typically the
storage will be less in a ``List`` than a generic ``DArray``. This is because
the ``DArray`` needs to expand the backing store to accommodate future
inserts, while a ``List`` only makes what it needs.
Breaking It
* Forget to check the return value from malloc and then use the buffer.
* Getting the end and start count of the buffer wrong. Easy to do an off-by-one here.
* Exploit the insert and delete costs to cause a denial of service.
Extra Credit
* Improve the unit tests to cover more of the operations, and test them
using a for-loop to ensure that they work.
* Research what it would take to implement bubble sort and merge sort
for DArray, but don't do it yet. I'll be implementing DArray algorithms
next, so you'll do this then.
Extra Credit
* Write some performance tests for common operations and compare them
to the same operations in ``List``. You did some of this already, but this
time, write a unit test that repeatedly does the operation in question, and
then in the main runner, do the timing.
Extra Credit
* Look at how the ``DArray_expand`` is implemented using a constant increase (size + 300).
Typically, dynamic arrays are implemented with a multiplicative increase (size * 2), but I've
found this to cost needless memory for no real performance gain. Test my assertion
and see when you'd want a multiplicative increase instead of a constant increase.
### Exercise 35 Sorting and Searching
The Plan
* Make a simple DArray sorting library using existing functions.
* Implement a new structure and algorithm called a "Radix Map".
* Create a binary search algorithm for the RadixMap.
The DArray Code
.\ex35\darray_algos.h
```c
#ifndef darray_algos_h
#define darray_algos_h
#include <lcthw/darray.h>
typedef int (*DArray_compare) (const void *a, const void *b);
int DArray_qsort(DArray * array, DArray_compare cmp);
int DArray_heapsort(DArray * array, DArray_compare cmp);
int DArray_mergesort(DArray * array, DArray_compare cmp);
#endif
```
.\ex35\darray_algos.c
```c
#include <lcthw/darray_algos.h>
#include <stdlib.h>
int DArray_qsort(DArray * array, DArray_compare cmp)
{
qsort(array->contents, DArray_count(array), sizeof(void *), cmp);
return 0;
}
int DArray_heapsort(DArray * array, DArray_compare cmp)
{
return heapsort(array->contents, DArray_count(array),
sizeof(void *), cmp);
}
int DArray_mergesort(DArray * array, DArray_compare cmp)
{
return mergesort(array->contents, DArray_count(array),
sizeof(void *), cmp);
}
```
.\ex35\darray_algos_tests.c
```c
#include "minunit.h"
#include <lcthw/darray_algos.h>
int testcmp(char **a, char **b)
{
return strcmp(*a, *b);
}
DArray *create_words()
{
DArray *result = DArray_create(0, 5);
char *words[] = { "asdfasfd",
"werwar", "13234", "asdfasfd", "oioj" };
int i = 0;
for (i = 0; i < 5; i++) {
DArray_push(result, words[i]);
}
return result;
}
int is_sorted(DArray * array)
{
int i = 0;
for (i = 0; i < DArray_count(array) - 1; i++) {
if (strcmp(DArray_get(array, i), DArray_get(array, i + 1)) > 0) {
return 0;
}
}
return 1;
}
char *run_sort_test(int (*func) (DArray *, DArray_compare),
const char *name)
{
DArray *words = create_words();
mu_assert(!is_sorted(words), "Words should start not sorted.");
debug("--- Testing %s sorting algorithm", name);
int rc = func(words, (DArray_compare) testcmp);
mu_assert(rc == 0, "sort failed");
mu_assert(is_sorted(words), "didn't sort it");
DArray_destroy(words);
return NULL;
}
char *test_qsort()
{
return run_sort_test(DArray_qsort, "qsort");
}
char *test_heapsort()
{
return run_sort_test(DArray_heapsort, "heapsort");
}
char *test_mergesort()
{
return run_sort_test(DArray_mergesort, "mergesort");
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_qsort);
mu_run_test(test_heapsort);
mu_run_test(test_mergesort);
return NULL;
}
RUN_TESTS(all_tests);
```
Continuing the code review method with a part of DArray.
The RadixMap Code
.\ex35\radixmap.h
```c
#ifndef _radixmap_h
#include <stdint.h>
typedef union RMElement {
uint64_t raw;
struct {
uint32_t key;
uint32_t value;
} data;
} RMElement;
typedef struct RadixMap {
size_t max;
size_t end;
uint32_t counter;
RMElement *contents;
RMElement *temp;
} RadixMap;
RadixMap *RadixMap_create(size_t max);
void RadixMap_destroy(RadixMap * map);
void RadixMap_sort(RadixMap * map);
RMElement *RadixMap_find(RadixMap * map, uint32_t key);
int RadixMap_add(RadixMap * map, uint32_t key, uint32_t value);
int RadixMap_delete(RadixMap * map, RMElement * el);
#endif
```
.\ex35\radixmap.c
```c
/*
* Based on code by <NAME> then heavily modified by <NAME>.
*/
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include <lcthw/radixmap.h>
#include <lcthw/dbg.h>
RadixMap *RadixMap_create(size_t max)
{
RadixMap *map = calloc(sizeof(RadixMap), 1);
check_mem(map);
map->contents = calloc(sizeof(RMElement), max + 1);
check_mem(map->contents);
map->temp = calloc(sizeof(RMElement), max + 1);
check_mem(map->temp);
map->max = max;
map->end = 0;
return map;
error:
return NULL;
}
void RadixMap_destroy(RadixMap * map)
{
if (map) {
free(map->contents);
free(map->temp);
free(map);
}
}
#define ByteOf(x,y) (((uint8_t *)x)[(y)])
static inline void radix_sort(short offset, uint64_t max,
uint64_t * source, uint64_t * dest)
{
uint64_t count[256] = { 0 };
uint64_t *cp = NULL;
uint64_t *sp = NULL;
uint64_t *end = NULL;
uint64_t s = 0;
uint64_t c = 0;
// count occurences of every byte value
for (sp = source, end = source + max; sp < end; sp++) {
count[ByteOf(sp, offset)]++;
}
// transform count into index by summing
// elements and storing into same array
for (s = 0, cp = count, end = count + 256; cp < end; cp++) {
c = *cp;
*cp = s;
s += c;
}
// fill dest with the right values in the right place
for (sp = source, end = source + max; sp < end; sp++) {
cp = count + ByteOf(sp, offset);
dest[*cp] = *sp;
++(*cp);
}
}
void RadixMap_sort(RadixMap * map)
{
uint64_t *source = &map->contents[0].raw;
uint64_t *temp = &map->temp[0].raw;
radix_sort(0, map->end, source, temp);
radix_sort(1, map->end, temp, source);
radix_sort(2, map->end, source, temp);
radix_sort(3, map->end, temp, source);
}
RMElement *RadixMap_find(RadixMap * map, uint32_t to_find)
{
int low = 0;
int high = map->end - 1;
RMElement *data = map->contents;
while (low <= high) {
int middle = low + (high - low) / 2;
uint32_t key = data[middle].data.key;
if (to_find < key) {
high = middle - 1;
} else if (to_find > key) {
low = middle + 1;
} else {
return &data[middle];
}
}
return NULL;
}
int RadixMap_add(RadixMap * map, uint32_t key, uint32_t value)
{
check(key < UINT32_MAX, "Key can't be equal to UINT32_MAX.");
RMElement element = {.data = {.key = key,.value = value} };
check(map->end + 1 < map->max, "RadixMap is full.");
map->contents[map->end++] = element;
RadixMap_sort(map);
return 0;
error:
return -1;
}
int RadixMap_delete(RadixMap * map, RMElement * el)
{
check(map->end > 0, "There is nothing to delete.");
check(el != NULL, "Can't delete a NULL element.");
el->data.key = UINT32_MAX;
if (map->end > 1) {
// don't bother resorting a map of 1 length
RadixMap_sort(map);
}
map->end--;
return 0;
error:
return -1;
}
```
.\ex35\radixmap_tests.c
```c
#include "minunit.h"
#include <lcthw/radixmap.h>
#include <time.h>
static int make_random(RadixMap * map)
{
size_t i = 0;
for (i = 0; i < map->max - 1; i++) {
uint32_t key = (uint32_t) (rand() | (rand() << 16));
check(RadixMap_add(map, key, i) == 0, "Failed to add key %u.",
key);
}
return i;
error:
return 0;
}
static int check_order(RadixMap * map)
{
RMElement d1, d2;
unsigned int i = 0;
// only signal errors if any (should not be)
for (i = 0; map->end > 0 && i < map->end - 1; i++) {
d1 = map->contents[i];
d2 = map->contents[i + 1];
if (d1.data.key > d2.data.key) {
debug("FAIL:i=%u, key: %u, value: %u, equals max? %d\n", i,
d1.data.key, d1.data.value,
d2.data.key == UINT32_MAX);
return 0;
}
}
return 1;
}
static int test_search(RadixMap * map)
{
unsigned i = 0;
RMElement *d = NULL;
RMElement *found = NULL;
for (i = map->end / 2; i < map->end; i++) {
d = &map->contents[i];
found = RadixMap_find(map, d->data.key);
check(found != NULL, "Didn't find %u at %u.", d->data.key, i);
check(found->data.key == d->data.key,
"Got the wrong result: %p:%u looking for %u at %u", found,
found->data.key, d->data.key, i);
}
return 1;
error:
return 0;
}
// test for big number of elements
static char *test_operations()
{
size_t N = 200;
RadixMap *map = RadixMap_create(N);
mu_assert(map != NULL, "Failed to make the map.");
mu_assert(make_random(map), "Didn't make a random fake radix map.");
RadixMap_sort(map);
mu_assert(check_order(map),
"Failed to properly sort the RadixMap.");
mu_assert(test_search(map), "Failed the search test.");
mu_assert(check_order(map),
"RadixMap didn't stay sorted after search.");
while (map->end > 0) {
RMElement *el = RadixMap_find(map,
map->contents[map->end / 2].data.key);
mu_assert(el != NULL, "Should get a result.");
size_t old_end = map->end;
mu_assert(RadixMap_delete(map, el) == 0, "Didn't delete it.");
mu_assert(old_end - 1 == map->end, "Wrong size after delete.");
// test that the end is now the old value,
// but uint32 max so it trails off
mu_assert(check_order(map),
"RadixMap didn't stay sorted after delete.");
}
RadixMap_destroy(map);
return NULL;
}
char *all_tests()
{
mu_suite_start();
srand(time(NULL));
mu_run_test(test_operations);
return NULL;
}
RUN_TESTS(all_tests);
```
Code review this code next.
The Binary Search Code
Finally, code review of the BSTree code.
Improving It
* Use a binary search to find the minimum position for the
new element, then only sort from there to the end. You find the
minimum, put the new element on the end, and then just sort from
the minimum on. This will cut your sort space down
considerably most of the time.
* Keep track of the biggest key currently being used, and then only
sort enough digits to handle that key. You can also keep track
of the smallest number, and then only sort the digits necessary
for the range. To do this, you'll have to start caring about
CPU integer ordering (endianness).
Extra Credit
* Implement quicksort, heapsort, and merge sort and then provide a *#define*
that lets you pick between the two, or create a second set of functions
you can call. Use the technique I taught you to read the Wikipedia page
for the algorithm, and then implement it with the psuedo-code.
* Compare the performance of your optimizations to the original implementations.
Extra Credit
* Use these sorting functions to create a *DArray_sort_add* that
adds elements to the *DArray* but sorts the array after.
* Write a *DArray_find* that uses the binary search algorithm from
*RadixMap_find* and the *DArray_compare* to find elements
in a sorted *DArray*.
### Exercise 36 Safer Strings
.\ex36\ex36.c
```c
void copy(char to[], char from[])
{
int i = 0;
// while loop will not end if from isn't '\0' terminated
while ((to[i] = from[i]) != '\0') {
++i;
}
}
int safercopy(int from_len, char *from, int to_len, char *to)
{
int i = 0;
int max = from_len > to_len - 1 ? to_len - 1 : from_len;
// to_len must have at least 1 byte
if (from_len < 0 || to_len <= 0)
return -1;
for (i = 0; i < max; i++) {
to[i] = from[i];
}
to[to_len - 1] = '\0';
return i;
}
```
.\ex36\ex36.c
```c
void copy(char to[], char from[])
{
int i = 0;
// while loop will not end if from isn't '\0' terminated
while ((to[i] = from[i]) != '\0') {
++i;
}
}
int safercopy(int from_len, char *from, int to_len, char *to)
{
int i = 0;
int max = from_len > to_len - 1 ? to_len - 1 : from_len;
// to_len must have at least 1 byte
if (from_len < 0 || to_len <= 0)
return -1;
for (i = 0; i < max; i++) {
to[i] = from[i];
}
to[to_len - 1] = '\0';
return i;
}
```
The Plan
Learn about an alternative string implementation to avoid most C string problems.
C Strings Suck
It is impossible to safely process strings in C.
The bstrling Library
An alternative is a library that provides alternative APIs for working with
C strings.
The Common Functions
bfromcstr Create a bstring from a C style constant.
blk2bstr Do the same thing, but give the length of the buffer.
bstrcpy Copy a bstring.
bassign Set one bstring to another.
bassigncstr Set a bstring to a C string's contents.
bassignblk Set a bstring to a C string but give the length.
bdestroy Destroy a bstring.
bconcat Concatenate one bstring onto another.
bstricmp Compare two bstrings returning the same result as strcmp.
biseq Tests if two bstrings are equal.
The Common Functions
binstr Tells if one bstring is in another.
bfindreplace Find one bstring in another, then replace it with a third.
bsplit Split a bstring into a bstrList.
bformat Do a format string, which is super handy.
blength Get the length of a bstring.
bdata Get the data from a bstring.
bchar Get a char from a bstring.
Extra Credit
There is only one extra credit and that's to write a *tests/bstr_tests.c* file that
tests all of these functions. The bstrlib comes with a test that you can reference it if needed.
### Exercise 37 Hashmaps
The Plan
Implement a Hashmap in C.
In Python these are called Dictionaries.
Hashmaps Visually
Hashmaps are very intuitive once you know how a DArray works.
It's all about the hashing function used.
Code Review
.\ex37\hashmap.h
```c
#ifndef _lcthw_Hashmap_h
#define _lcthw_Hashmap_h
#include <stdint.h>
#include <lcthw/darray.h>
#define DEFAULT_NUMBER_OF_BUCKETS 100
typedef int (*Hashmap_compare) (void *a, void *b);
typedef uint32_t(*Hashmap_hash) (void *key);
typedef struct Hashmap {
DArray *buckets;
Hashmap_compare compare;
Hashmap_hash hash;
} Hashmap;
typedef struct HashmapNode {
void *key;
void *data;
uint32_t hash;
} HashmapNode;
typedef int (*Hashmap_traverse_cb) (HashmapNode * node);
Hashmap *Hashmap_create(Hashmap_compare compare, Hashmap_hash);
void Hashmap_destroy(Hashmap * map);
int Hashmap_set(Hashmap * map, void *key, void *data);
void *Hashmap_get(Hashmap * map, void *key);
int Hashmap_traverse(Hashmap * map, Hashmap_traverse_cb traverse_cb);
void *Hashmap_delete(Hashmap * map, void *key);
#endif
```
.\ex37\hashmap.c
```c
#undef NDEBUG
#include <stdint.h>
#include <lcthw/hashmap.h>
#include <lcthw/dbg.h>
#include <lcthw/bstrlib.h>
static int default_compare(void *a, void *b)
{
return bstrcmp((bstring) a, (bstring) b);
}
/**
* Simple Bob Jenkins's hash algorithm taken from the
* wikipedia description.
*/
static uint32_t default_hash(void *a)
{
size_t len = blength((bstring) a);
char *key = bdata((bstring) a);
uint32_t hash = 0;
uint32_t i = 0;
for (hash = i = 0; i < len; ++i) {
hash += key[i];
hash += (hash << 10);
hash ^= (hash >> 6);
}
hash += (hash << 3);
hash ^= (hash >> 11);
hash += (hash << 15);
return hash;
}
Hashmap *Hashmap_create(Hashmap_compare compare, Hashmap_hash hash)
{
Hashmap *map = calloc(1, sizeof(Hashmap));
check_mem(map);
map->compare = compare == NULL ? default_compare : compare;
map->hash = hash == NULL ? default_hash : hash;
map->buckets = DArray_create(
sizeof(DArray *), DEFAULT_NUMBER_OF_BUCKETS);
map->buckets->end = map->buckets->max; // fake out expanding it
check_mem(map->buckets);
return map;
error:
if (map) {
Hashmap_destroy(map);
}
return NULL;
}
void Hashmap_destroy(Hashmap * map)
{
int i = 0;
int j = 0;
if (map) {
if (map->buckets) {
for (i = 0; i < DArray_count(map->buckets); i++) {
DArray *bucket = DArray_get(map->buckets, i);
if (bucket) {
for (j = 0; j < DArray_count(bucket); j++) {
free(DArray_get(bucket, j));
}
DArray_destroy(bucket);
}
}
DArray_destroy(map->buckets);
}
free(map);
}
}
static inline HashmapNode *Hashmap_node_create(int hash, void *key,
void *data)
{
HashmapNode *node = calloc(1, sizeof(HashmapNode));
check_mem(node);
node->key = key;
node->data = data;
node->hash = hash;
return node;
error:
return NULL;
}
static inline DArray *Hashmap_find_bucket(Hashmap * map, void *key,
int create,
uint32_t * hash_out)
{
uint32_t hash = map->hash(key);
int bucket_n = hash % DEFAULT_NUMBER_OF_BUCKETS;
check(bucket_n >= 0, "Invalid bucket found: %d", bucket_n);
// store it for the return so the caller can use it
*hash_out = hash;
DArray *bucket = DArray_get(map->buckets, bucket_n);
if (!bucket && create) {
// new bucket, set it up
bucket = DArray_create(
sizeof(void *), DEFAULT_NUMBER_OF_BUCKETS);
check_mem(bucket);
DArray_set(map->buckets, bucket_n, bucket);
}
return bucket;
error:
return NULL;
}
int Hashmap_set(Hashmap * map, void *key, void *data)
{
uint32_t hash = 0;
DArray *bucket = Hashmap_find_bucket(map, key, 1, &hash);
check(bucket, "Error can't create bucket.");
HashmapNode *node = Hashmap_node_create(hash, key, data);
check_mem(node);
DArray_push(bucket, node);
return 0;
error:
return -1;
}
static inline int Hashmap_get_node(Hashmap * map, uint32_t hash,
DArray * bucket, void *key)
{
int i = 0;
for (i = 0; i < DArray_end(bucket); i++) {
debug("TRY: %d", i);
HashmapNode *node = DArray_get(bucket, i);
if (node->hash == hash && map->compare(node->key, key) == 0) {
return i;
}
}
return -1;
}
void *Hashmap_get(Hashmap * map, void *key)
{
uint32_t hash = 0;
DArray *bucket = Hashmap_find_bucket(map, key, 0, &hash);
if (!bucket) return NULL;
int i = Hashmap_get_node(map, hash, bucket, key);
if (i == -1) return NULL;
HashmapNode *node = DArray_get(bucket, i);
check(node != NULL,
"Failed to get node from bucket when it should exist.");
return node->data;
error: // fallthrough
return NULL;
}
int Hashmap_traverse(Hashmap * map, Hashmap_traverse_cb traverse_cb)
{
int i = 0;
int j = 0;
int rc = 0;
for (i = 0; i < DArray_count(map->buckets); i++) {
DArray *bucket = DArray_get(map->buckets, i);
if (bucket) {
for (j = 0; j < DArray_count(bucket); j++) {
HashmapNode *node = DArray_get(bucket, j);
rc = traverse_cb(node);
if (rc != 0)
return rc;
}
}
}
return 0;
}
void *Hashmap_delete(Hashmap * map, void *key)
{
uint32_t hash = 0;
DArray *bucket = Hashmap_find_bucket(map, key, 0, &hash);
if (!bucket)
return NULL;
int i = Hashmap_get_node(map, hash, bucket, key);
if (i == -1)
return NULL;
HashmapNode *node = DArray_get(bucket, i);
void *data = node->data;
free(node);
HashmapNode *ending = DArray_pop(bucket);
if (ending != node) {
// alright looks like it's not the last one, swap it
DArray_set(bucket, i, ending);
}
return data;
}
```
Conducting a review of Hashmap by following the test.
Improving It
* You can use a sort on each bucket so that they're always sorted.
This increases your insert time but decreases your find time, because
you can then use a binary search to find each node. Right now,
it's looping through all of the nodes in a bucket just to find one.
* You can dynamically size the number of buckets, or let the caller
specify the number for each *Hashmap* created.
* You can use a better *default_hash*. There are tons of them.
Improving It
* This (and nearly every *Hashmap*) is vulnerable to someone picking
keys that will fill only one bucket, and then tricking your program
into processing them. This then makes your program run slower because
it changes from processing a *Hashmap* to effectively processing
a single *DArray*. If you sort the nodes in the bucket, this
helps, but you can also use better hashing functions, and for
the really paranoid programmer, add a random salt so that keys can't be predicted.
Improving It
* You could have it delete buckets that are empty of nodes to save space,
or put empty buckets into a cache so you can save on time lost creating and destroying
them.
* Right now, it just adds elements even if they already exist. Write an
alternative set method that only adds an element if it isn't set already.
Extra Credit
* Research the *Hashmap* implementation in your favorite programming language to see what features it has.
* Find out what the major disadvantages of a *Hashmap* are and how to avoid them. For example, it doesn't preserve order without special changes, nor does it work when you need to find things based on parts
of keys.
* Write a unit test that demonstrates the defect of filling a *Hashmap* with keys that land
in the same bucket, then test how this impacts performance. A good way to do this is to just reduce
the number of buckets to something stupid, like five.
### Exercise 38 Hashmap Algorithms
The Plan
Learn three different string hashing algorithms and make them dynamically available
to the Hashmap.
Code Review
.\ex38\hashmap_algos.h
```c
#ifndef hashmap_algos_h
#define hashmap_algos_h
#include <stdint.h>
uint32_t Hashmap_fnv1a_hash(void *data);
uint32_t Hashmap_adler32_hash(void *data);
uint32_t Hashmap_djb_hash(void *data);
#endif
```
.\ex38\hashmap_algos.c
```c
#include <lcthw/hashmap_algos.h>
#include <lcthw/bstrlib.h>
// settings taken from
// http://www.isthe.com/chongo/tech/comp/fnv/index.html#FNV-param
const uint32_t FNV_PRIME = 16777619;
const uint32_t FNV_OFFSET_BASIS = 2166136261;
uint32_t Hashmap_fnv1a_hash(void *data)
{
bstring s = (bstring) data;
uint32_t hash = FNV_OFFSET_BASIS;
int i = 0;
for (i = 0; i < blength(s); i++) {
hash ^= bchare(s, i, 0);
hash *= FNV_PRIME;
}
return hash;
}
const int MOD_ADLER = 65521;
uint32_t Hashmap_adler32_hash(void *data)
{
bstring s = (bstring) data;
uint32_t a = 1, b = 0;
int i = 0;
for (i = 0; i < blength(s); i++) {
a = (a + bchare(s, i, 0)) % MOD_ADLER;
b = (b + a) % MOD_ADLER;
}
return (b << 16) | a;
}
uint32_t Hashmap_djb_hash(void *data)
{
bstring s = (bstring) data;
uint32_t hash = 5381;
int i = 0;
for (i = 0; i < blength(s); i++) {
hash = ((hash << 5) + hash) + bchare(s, i, 0); /* hash * 33 + c */
}
return hash;
}
```
The default is the Jenkin's hash.
You added the FNV1a, Adler32, and DJB hashing algorithms.
Review the code for FNV1a vs. DJB.
Breaking It
In this exercise you will attempt to write the worst hashing function that can
pass for a real one. Try to make one that either looks complicated but
statistically is way off, or is a discrete change to an existing one that is a
bad change.
Extra Credit
* Take the ``default_hash`` out of the ``hashmap.c``, make it
one of the algorithms in ``hashmap_algos.c``, and then make all
of the tests work again.
* Add the ``default_hash`` to the ``hashmap_algos_tests.c``
test and compare its statistics to the other hash functions.
* Find a few more hash functions and add them, too. You can never have too
many hash functions!
### Exercise 39 String Algorithms
The Plan
Develop a formal code review procedure.
The Code
.\ex39\string_algos.h
```c
#ifndef string_algos_h
#define string_algos_h
#include <lcthw/bstrlib.h>
#include <lcthw/darray.h>
typedef struct StringScanner {
bstring in;
const unsigned char *haystack;
ssize_t hlen;
const unsigned char *needle;
ssize_t nlen;
size_t skip_chars[UCHAR_MAX + 1];
} StringScanner;
int String_find(bstring in, bstring what);
StringScanner *StringScanner_create(bstring in);
int StringScanner_scan(StringScanner * scan, bstring tofind);
void StringScanner_destroy(StringScanner * scan);
#endif
```
.\ex39\string_algos.c
```c
#include <lcthw/string_algos.h>
#include <limits.h>
static inline void String_setup_skip_chars(size_t * skip_chars,
const unsigned char *needle,
ssize_t nlen)
{
size_t i = 0;
size_t last = nlen - 1;
for (i = 0; i < UCHAR_MAX + 1; i++) {
skip_chars[i] = nlen;
}
for (i = 0; i < last; i++) {
skip_chars[needle[i]] = last - i;
}
}
static inline const unsigned char *String_base_search(const unsigned
char *haystack,
ssize_t hlen,
const unsigned
char *needle,
ssize_t nlen,
size_t *
skip_chars)
{
size_t i = 0;
size_t last = nlen - 1;
assert(haystack != NULL && "Given bad haystack to search.");
assert(needle != NULL && "Given bad needle to search for.");
check(nlen > 0, "nlen can't be <= 0");
check(hlen > 0, "hlen can't be <= 0");
while (hlen >= nlen) {
for (i = last; haystack[i] == needle[i]; i--) {
if (i == 0) {
return haystack;
}
}
hlen -= skip_chars[haystack[last]];
haystack += skip_chars[haystack[last]];
}
error: // fallthrough
return NULL;
}
int String_find(bstring in, bstring what)
{
const unsigned char *found = NULL;
const unsigned char *haystack = (const unsigned char *)bdata(in);
ssize_t hlen = blength(in);
const unsigned char *needle = (const unsigned char *)bdata(what);
ssize_t nlen = blength(what);
size_t skip_chars[UCHAR_MAX + 1] = { 0 };
String_setup_skip_chars(skip_chars, needle, nlen);
found = String_base_search(haystack, hlen,
needle, nlen, skip_chars);
return found != NULL ? found - haystack : -1;
}
StringScanner *StringScanner_create(bstring in)
{
StringScanner *scan = calloc(1, sizeof(StringScanner));
check_mem(scan);
scan->in = in;
scan->haystack = (const unsigned char *)bdata(in);
scan->hlen = blength(in);
assert(scan != NULL && "fuck");
return scan;
error:
free(scan);
return NULL;
}
static inline void StringScanner_set_needle(StringScanner * scan,
bstring tofind)
{
scan->needle = (const unsigned char *)bdata(tofind);
scan->nlen = blength(tofind);
String_setup_skip_chars(scan->skip_chars, scan->needle, scan->nlen);
}
static inline void StringScanner_reset(StringScanner * scan)
{
scan->haystack = (const unsigned char *)bdata(scan->in);
scan->hlen = blength(scan->in);
}
int StringScanner_scan(StringScanner * scan, bstring tofind)
{
const unsigned char *found = NULL;
ssize_t found_at = 0;
if (scan->hlen <= 0) {
StringScanner_reset(scan);
return -1;
}
if ((const unsigned char *)bdata(tofind) != scan->needle) {
StringScanner_set_needle(scan, tofind);
}
found = String_base_search(scan->haystack, scan->hlen,
scan->needle, scan->nlen,
scan->skip_chars);
if (found) {
found_at = found - (const unsigned char *)bdata(scan->in);
scan->haystack = found + scan->nlen;
scan->hlen -= found_at - scan->nlen;
} else {
// done, reset the setup
StringScanner_reset(scan);
found_at = -1;
}
return found_at;
}
void StringScanner_destroy(StringScanner * scan)
{
if (scan) {
free(scan);
}
}
```
.\ex39\string_algos_tests.c
```c
#include "minunit.h"
#include <lcthw/string_algos.h>
#include <lcthw/bstrlib.h>
#include <time.h>
struct tagbstring IN_STR = bsStatic(
"I have ALPHA beta ALPHA and oranges ALPHA");
struct tagbstring ALPHA = bsStatic("ALPHA");
const int TEST_TIME = 1;
char *test_find_and_scan()
{
StringScanner *scan = StringScanner_create(&IN_STR);
mu_assert(scan != NULL, "Failed to make the scanner.");
int find_i = String_find(&IN_STR, &ALPHA);
mu_assert(find_i > 0, "Failed to find 'ALPHA' in test string.");
int scan_i = StringScanner_scan(scan, &ALPHA);
mu_assert(scan_i > 0, "Failed to find 'ALPHA' with scan.");
mu_assert(scan_i == find_i, "find and scan don't match");
scan_i = StringScanner_scan(scan, &ALPHA);
mu_assert(scan_i > find_i,
"should find another ALPHA after the first");
scan_i = StringScanner_scan(scan, &ALPHA);
mu_assert(scan_i > find_i,
"should find another ALPHA after the first");
mu_assert(StringScanner_scan(scan, &ALPHA) == -1,
"shouldn't find it");
StringScanner_destroy(scan);
return NULL;
}
char *test_binstr_performance()
{
int i = 0;
int found_at = 0;
unsigned long find_count = 0;
time_t elapsed = 0;
time_t start = time(NULL);
do {
for (i = 0; i < 1000; i++) {
found_at = binstr(&IN_STR, 0, &ALPHA);
mu_assert(found_at != BSTR_ERR, "Failed to find!");
find_count++;
}
elapsed = time(NULL) - start;
} while (elapsed <= TEST_TIME);
debug("BINSTR COUNT: %lu, END TIME: %d, OPS: %f",
find_count, (int)elapsed, (double)find_count / elapsed);
return NULL;
}
char *test_find_performance()
{
int i = 0;
int found_at = 0;
unsigned long find_count = 0;
time_t elapsed = 0;
time_t start = time(NULL);
do {
for (i = 0; i < 1000; i++) {
found_at = String_find(&IN_STR, &ALPHA);
find_count++;
}
elapsed = time(NULL) - start;
} while (elapsed <= TEST_TIME);
debug("FIND COUNT: %lu, END TIME: %d, OPS: %f",
find_count, (int)elapsed, (double)find_count / elapsed);
return NULL;
}
char *test_scan_performance()
{
int i = 0;
int found_at = 0;
unsigned long find_count = 0;
time_t elapsed = 0;
StringScanner *scan = StringScanner_create(&IN_STR);
time_t start = time(NULL);
do {
for (i = 0; i < 1000; i++) {
found_at = 0;
do {
found_at = StringScanner_scan(scan, &ALPHA);
find_count++;
} while (found_at != -1);
}
elapsed = time(NULL) - start;
} while (elapsed <= TEST_TIME);
debug("SCAN COUNT: %lu, END TIME: %d, OPS: %f",
find_count, (int)elapsed, (double)find_count / elapsed);
StringScanner_destroy(scan);
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_find_and_scan);
// this is an idiom for commenting out sections of code
#if 0
mu_run_test(test_scan_performance);
mu_run_test(test_find_performance);
mu_run_test(test_binstr_performance);
#endif
return NULL;
}
RUN_TESTS(all_tests);
```
The code is easy to implement so should be no problem for you
at this point. Focus on getting the unit test right.
Code Review Process
1. Start at the entry point for a piece of code that has changed.
2. For each function, confirm that its calling parameters are correct.
3. Enter that function, and confirm each line's correctness.
4. When you encounter a function, repeat up to #2 until you go no further.
5. As you exit functions, confirm the return values and their usage.
6. Continue until you are back where you started at the entry point.
7. Do a diff on your changes, and confirm any missed calls to changed functions.
Code Review Key Points
1. Check your pointer dereferences and defend against NULL.
2. Check if-statements and while loops for exiting.
3. Check return values are going to be valid.
4. Check that memory allocated is freed and other resources freed.
5. Confirm all system call parameters are correct with man pages.
Record Your Code Review
I want *you* to try to record yourself coding and reviewing your
code. What do you learn from this experience?
What if you kept track of the number of mistakes you found in
your code reviews and analyzed the data?
Extra Credit
* See if you can make the ``Scan_find`` faster. Why is my implementation
here slow?
* Try some different scan times and see if you get different numbers.
What impact does the length of time that you run the test have on
the ``scan`` times? What can you say about that result?
* Alter the unit test so that it runs each function for a short burst
in the beginning to clear out any warm up period, and then start the
timing portion. Does that change the dependence on the length of time
the test runs? Does it change how many operations per second are possible?
Extra Credit
* Make the unit test randomize the strings to find and then measure
the performance you get. One way to do this is to use the ``bsplit``
function from ``bstrlib.h`` to split the ``IN_STR`` on
spaces. Then, you can use the ``bstrList`` struct that you get to access each
string it returns. This will also teach you how to use ``bstrList``
operations for string processing.
* Try some runs with the tests in different orders to see if you get different
results.
### Exercise 40 Binary Search Trees
The Plan
Implement a Binary Search Tree, a competitor to the Hashmap.
Binary Search Trees Visually
The Code
.\ex40\bstree.h
```c
#ifndef _lcthw_BSTree_h
#define _lcthw_BSTree_h
typedef int (*BSTree_compare) (void *a, void *b);
typedef struct BSTreeNode {
void *key;
void *data;
struct BSTreeNode *left;
struct BSTreeNode *right;
struct BSTreeNode *parent;
} BSTreeNode;
typedef struct BSTree {
int count;
BSTree_compare compare;
BSTreeNode *root;
} BSTree;
typedef int (*BSTree_traverse_cb) (BSTreeNode * node);
BSTree *BSTree_create(BSTree_compare compare);
void BSTree_destroy(BSTree * map);
int BSTree_set(BSTree * map, void *key, void *data);
void *BSTree_get(BSTree * map, void *key);
int BSTree_traverse(BSTree * map, BSTree_traverse_cb traverse_cb);
void *BSTree_delete(BSTree * map, void *key);
#endif
```
.\ex40\bstree.c
```c
#include <lcthw/dbg.h>
#include <lcthw/bstree.h>
#include <stdlib.h>
#include <lcthw/bstrlib.h>
static int default_compare(void *a, void *b)
{
return bstrcmp((bstring) a, (bstring) b);
}
BSTree *BSTree_create(BSTree_compare compare)
{
BSTree *map = calloc(1, sizeof(BSTree));
check_mem(map);
map->compare = compare == NULL ? default_compare : compare;
return map;
error:
if (map) {
BSTree_destroy(map);
}
return NULL;
}
static int BSTree_destroy_cb(BSTreeNode * node)
{
free(node);
return 0;
}
void BSTree_destroy(BSTree * map)
{
if (map) {
BSTree_traverse(map, BSTree_destroy_cb);
free(map);
}
}
static inline BSTreeNode *BSTreeNode_create(BSTreeNode * parent,
void *key, void *data)
{
BSTreeNode *node = calloc(1, sizeof(BSTreeNode));
check_mem(node);
node->key = key;
node->data = data;
node->parent = parent;
return node;
error:
return NULL;
}
static inline void BSTree_setnode(BSTree * map, BSTreeNode * node,
void *key, void *data)
{
int cmp = map->compare(node->key, key);
if (cmp <= 0) {
if (node->left) {
BSTree_setnode(map, node->left, key, data);
} else {
node->left = BSTreeNode_create(node, key, data);
}
} else {
if (node->right) {
BSTree_setnode(map, node->right, key, data);
} else {
node->right = BSTreeNode_create(node, key, data);
}
}
}
int BSTree_set(BSTree * map, void *key, void *data)
{
if (map->root == NULL) {
// first so just make it and get out
map->root = BSTreeNode_create(NULL, key, data);
check_mem(map->root);
} else {
BSTree_setnode(map, map->root, key, data);
}
return 0;
error:
return -1;
}
static inline BSTreeNode *BSTree_getnode(BSTree * map,
BSTreeNode * node, void *key)
{
int cmp = map->compare(node->key, key);
if (cmp == 0) {
return node;
} else if (cmp < 0) {
if (node->left) {
return BSTree_getnode(map, node->left, key);
} else {
return NULL;
}
} else {
if (node->right) {
return BSTree_getnode(map, node->right, key);
} else {
return NULL;
}
}
}
void *BSTree_get(BSTree * map, void *key)
{
if (map->root == NULL) {
return NULL;
} else {
BSTreeNode *node = BSTree_getnode(map, map->root, key);
return node == NULL ? NULL : node->data;
}
}
static inline int BSTree_traverse_nodes(BSTreeNode * node,
BSTree_traverse_cb traverse_cb)
{
int rc = 0;
if (node->left) {
rc = BSTree_traverse_nodes(node->left, traverse_cb);
if (rc != 0)
return rc;
}
if (node->right) {
rc = BSTree_traverse_nodes(node->right, traverse_cb);
if (rc != 0)
return rc;
}
return traverse_cb(node);
}
int BSTree_traverse(BSTree * map, BSTree_traverse_cb traverse_cb)
{
if (map->root) {
return BSTree_traverse_nodes(map->root, traverse_cb);
}
return 0;
}
static inline BSTreeNode *BSTree_find_min(BSTreeNode * node)
{
while (node->left) {
node = node->left;
}
return node;
}
static inline void BSTree_replace_node_in_parent(BSTree * map,
BSTreeNode * node,
BSTreeNode * new_value)
{
if (node->parent) {
if (node == node->parent->left) {
node->parent->left = new_value;
} else {
node->parent->right = new_value;
}
} else {
// this is the root so gotta change it
map->root = new_value;
}
if (new_value) {
new_value->parent = node->parent;
}
}
static inline void BSTree_swap(BSTreeNode * a, BSTreeNode * b)
{
void *temp = NULL;
temp = b->key;
b->key = a->key;
a->key = temp;
temp = b->data;
b->data = a->data;
a->data = temp;
}
static inline BSTreeNode *BSTree_node_delete(BSTree * map,
BSTreeNode * node,
void *key)
{
int cmp = map->compare(node->key, key);
if (cmp < 0) {
if (node->left) {
return BSTree_node_delete(map, node->left, key);
} else {
// not found
return NULL;
}
} else if (cmp > 0) {
if (node->right) {
return BSTree_node_delete(map, node->right, key);
} else {
// not found
return NULL;
}
} else {
if (node->left && node->right) {
// swap this node for the smallest node that is bigger than us
BSTreeNode *successor = BSTree_find_min(node->right);
BSTree_swap(successor, node);
// this leaves the old successor with possibly a right child
// so replace it with that right child
BSTree_replace_node_in_parent(map, successor,
successor->right);
// finally it's swapped, so return successor instead of node
return successor;
} else if (node->left) {
BSTree_replace_node_in_parent(map, node, node->left);
} else if (node->right) {
BSTree_replace_node_in_parent(map, node, node->right);
} else {
BSTree_replace_node_in_parent(map, node, NULL);
}
return node;
}
}
void *BSTree_delete(BSTree * map, void *key)
{
void *data = NULL;
if (map->root) {
BSTreeNode *node = BSTree_node_delete(map, map->root, key);
if (node) {
data = node->data;
free(node);
}
}
return data;
}
```
.\ex40\bstree_tests.c
```c
#include "minunit.h"
#include <lcthw/bstree.h>
#include <assert.h>
#include <lcthw/bstrlib.h>
#include <stdlib.h>
#include <time.h>
BSTree *map = NULL;
static int traverse_called = 0;
struct tagbstring test1 = bsStatic("test data 1");
struct tagbstring test2 = bsStatic("test data 2");
struct tagbstring test3 = bsStatic("xest data 3");
struct tagbstring expect1 = bsStatic("THE VALUE 1");
struct tagbstring expect2 = bsStatic("THE VALUE 2");
struct tagbstring expect3 = bsStatic("THE VALUE 3");
static int traverse_good_cb(BSTreeNode * node)
{
debug("KEY: %s", bdata((bstring) node->key));
traverse_called++;
return 0;
}
static int traverse_fail_cb(BSTreeNode * node)
{
debug("KEY: %s", bdata((bstring) node->key));
traverse_called++;
if (traverse_called == 2) {
return 1;
} else {
return 0;
}
}
char *test_create()
{
map = BSTree_create(NULL);
mu_assert(map != NULL, "Failed to create map.");
return NULL;
}
char *test_destroy()
{
BSTree_destroy(map);
return NULL;
}
char *test_get_set()
{
int rc = BSTree_set(map, &test1, &expect1);
mu_assert(rc == 0, "Failed to set &test1");
bstring result = BSTree_get(map, &test1);
mu_assert(result == &expect1, "Wrong value for test1.");
rc = BSTree_set(map, &test2, &expect2);
mu_assert(rc == 0, "Failed to set test2");
result = BSTree_get(map, &test2);
mu_assert(result == &expect2, "Wrong value for test2.");
rc = BSTree_set(map, &test3, &expect3);
mu_assert(rc == 0, "Failed to set test3");
result = BSTree_get(map, &test3);
mu_assert(result == &expect3, "Wrong value for test3.");
return NULL;
}
char *test_traverse()
{
int rc = BSTree_traverse(map, traverse_good_cb);
mu_assert(rc == 0, "Failed to traverse.");
mu_assert(traverse_called == 3, "Wrong count traverse.");
traverse_called = 0;
rc = BSTree_traverse(map, traverse_fail_cb);
mu_assert(rc == 1, "Failed to traverse.");
mu_assert(traverse_called == 2, "Wrong count traverse for fail.");
return NULL;
}
char *test_delete()
{
bstring deleted = (bstring) BSTree_delete(map, &test1);
mu_assert(deleted != NULL, "Got NULL on delete.");
mu_assert(deleted == &expect1, "Should get test1");
bstring result = BSTree_get(map, &test1);
mu_assert(result == NULL, "Should delete.");
deleted = (bstring) BSTree_delete(map, &test1);
mu_assert(deleted == NULL, "Should get NULL on delete");
deleted = (bstring) BSTree_delete(map, &test2);
mu_assert(deleted != NULL, "Got NULL on delete.");
mu_assert(deleted == &expect2, "Should get test2");
result = BSTree_get(map, &test2);
mu_assert(result == NULL, "Should delete.");
deleted = (bstring) BSTree_delete(map, &test3);
mu_assert(deleted != NULL, "Got NULL on delete.");
mu_assert(deleted == &expect3, "Should get test3");
result = BSTree_get(map, &test3);
mu_assert(result == NULL, "Should delete.");
// test deleting non-existent stuff
deleted = (bstring) BSTree_delete(map, &test3);
mu_assert(deleted == NULL, "Should get NULL");
return NULL;
}
char *test_fuzzing()
{
BSTree *store = BSTree_create(NULL);
int i = 0;
int j = 0;
bstring numbers[100] = { NULL };
bstring data[100] = { NULL };
srand((unsigned int)time(NULL));
for (i = 0; i < 100; i++) {
int num = rand();
numbers[i] = bformat("%d", num);
data[i] = bformat("data %d", num);
BSTree_set(store, numbers[i], data[i]);
}
for (i = 0; i < 100; i++) {
bstring value = BSTree_delete(store, numbers[i]);
mu_assert(value == data[i],
"Failed to delete the right number.");
mu_assert(BSTree_delete(store, numbers[i]) == NULL,
"Should get nothing.");
for (j = i + 1; j < 99 - i; j++) {
bstring value = BSTree_get(store, numbers[j]);
mu_assert(value == data[j],
"Failed to get the right number.");
}
bdestroy(value);
bdestroy(numbers[i]);
}
BSTree_destroy(store);
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_create);
mu_run_test(test_get_set);
mu_run_test(test_traverse);
mu_run_test(test_delete);
mu_run_test(test_destroy);
mu_run_test(test_fuzzing);
return NULL;
}
RUN_TESTS(all_tests);
```
There's nothing new in the code, but make sure you read the book carefully.
Code Review
.\ex40\bstree.h
```c
#ifndef _lcthw_BSTree_h
#define _lcthw_BSTree_h
typedef int (*BSTree_compare) (void *a, void *b);
typedef struct BSTreeNode {
void *key;
void *data;
struct BSTreeNode *left;
struct BSTreeNode *right;
struct BSTreeNode *parent;
} BSTreeNode;
typedef struct BSTree {
int count;
BSTree_compare compare;
BSTreeNode *root;
} BSTree;
typedef int (*BSTree_traverse_cb) (BSTreeNode * node);
BSTree *BSTree_create(BSTree_compare compare);
void BSTree_destroy(BSTree * map);
int BSTree_set(BSTree * map, void *key, void *data);
void *BSTree_get(BSTree * map, void *key);
int BSTree_traverse(BSTree * map, BSTree_traverse_cb traverse_cb);
void *BSTree_delete(BSTree * map, void *key);
#endif
```
.\ex40\bstree.c
```c
#include <lcthw/dbg.h>
#include <lcthw/bstree.h>
#include <stdlib.h>
#include <lcthw/bstrlib.h>
static int default_compare(void *a, void *b)
{
return bstrcmp((bstring) a, (bstring) b);
}
BSTree *BSTree_create(BSTree_compare compare)
{
BSTree *map = calloc(1, sizeof(BSTree));
check_mem(map);
map->compare = compare == NULL ? default_compare : compare;
return map;
error:
if (map) {
BSTree_destroy(map);
}
return NULL;
}
static int BSTree_destroy_cb(BSTreeNode * node)
{
free(node);
return 0;
}
void BSTree_destroy(BSTree * map)
{
if (map) {
BSTree_traverse(map, BSTree_destroy_cb);
free(map);
}
}
static inline BSTreeNode *BSTreeNode_create(BSTreeNode * parent,
void *key, void *data)
{
BSTreeNode *node = calloc(1, sizeof(BSTreeNode));
check_mem(node);
node->key = key;
node->data = data;
node->parent = parent;
return node;
error:
return NULL;
}
static inline void BSTree_setnode(BSTree * map, BSTreeNode * node,
void *key, void *data)
{
int cmp = map->compare(node->key, key);
if (cmp <= 0) {
if (node->left) {
BSTree_setnode(map, node->left, key, data);
} else {
node->left = BSTreeNode_create(node, key, data);
}
} else {
if (node->right) {
BSTree_setnode(map, node->right, key, data);
} else {
node->right = BSTreeNode_create(node, key, data);
}
}
}
int BSTree_set(BSTree * map, void *key, void *data)
{
if (map->root == NULL) {
// first so just make it and get out
map->root = BSTreeNode_create(NULL, key, data);
check_mem(map->root);
} else {
BSTree_setnode(map, map->root, key, data);
}
return 0;
error:
return -1;
}
static inline BSTreeNode *BSTree_getnode(BSTree * map,
BSTreeNode * node, void *key)
{
int cmp = map->compare(node->key, key);
if (cmp == 0) {
return node;
} else if (cmp < 0) {
if (node->left) {
return BSTree_getnode(map, node->left, key);
} else {
return NULL;
}
} else {
if (node->right) {
return BSTree_getnode(map, node->right, key);
} else {
return NULL;
}
}
}
void *BSTree_get(BSTree * map, void *key)
{
if (map->root == NULL) {
return NULL;
} else {
BSTreeNode *node = BSTree_getnode(map, map->root, key);
return node == NULL ? NULL : node->data;
}
}
static inline int BSTree_traverse_nodes(BSTreeNode * node,
BSTree_traverse_cb traverse_cb)
{
int rc = 0;
if (node->left) {
rc = BSTree_traverse_nodes(node->left, traverse_cb);
if (rc != 0)
return rc;
}
if (node->right) {
rc = BSTree_traverse_nodes(node->right, traverse_cb);
if (rc != 0)
return rc;
}
return traverse_cb(node);
}
int BSTree_traverse(BSTree * map, BSTree_traverse_cb traverse_cb)
{
if (map->root) {
return BSTree_traverse_nodes(map->root, traverse_cb);
}
return 0;
}
static inline BSTreeNode *BSTree_find_min(BSTreeNode * node)
{
while (node->left) {
node = node->left;
}
return node;
}
static inline void BSTree_replace_node_in_parent(BSTree * map,
BSTreeNode * node,
BSTreeNode * new_value)
{
if (node->parent) {
if (node == node->parent->left) {
node->parent->left = new_value;
} else {
node->parent->right = new_value;
}
} else {
// this is the root so gotta change it
map->root = new_value;
}
if (new_value) {
new_value->parent = node->parent;
}
}
static inline void BSTree_swap(BSTreeNode * a, BSTreeNode * b)
{
void *temp = NULL;
temp = b->key;
b->key = a->key;
a->key = temp;
temp = b->data;
b->data = a->data;
a->data = temp;
}
static inline BSTreeNode *BSTree_node_delete(BSTree * map,
BSTreeNode * node,
void *key)
{
int cmp = map->compare(node->key, key);
if (cmp < 0) {
if (node->left) {
return BSTree_node_delete(map, node->left, key);
} else {
// not found
return NULL;
}
} else if (cmp > 0) {
if (node->right) {
return BSTree_node_delete(map, node->right, key);
} else {
// not found
return NULL;
}
} else {
if (node->left && node->right) {
// swap this node for the smallest node that is bigger than us
BSTreeNode *successor = BSTree_find_min(node->right);
BSTree_swap(successor, node);
// this leaves the old successor with possibly a right child
// so replace it with that right child
BSTree_replace_node_in_parent(map, successor,
successor->right);
// finally it's swapped, so return successor instead of node
return successor;
} else if (node->left) {
BSTree_replace_node_in_parent(map, node, node->left);
} else if (node->right) {
BSTree_replace_node_in_parent(map, node, node->right);
} else {
BSTree_replace_node_in_parent(map, node, NULL);
}
return node;
}
}
void *BSTree_delete(BSTree * map, void *key)
{
void *data = NULL;
if (map->root) {
BSTreeNode *node = BSTree_node_delete(map, map->root, key);
if (node) {
data = node->data;
free(node);
}
}
return data;
}
```
.\ex40\bstree_tests.c
```c
#include "minunit.h"
#include <lcthw/bstree.h>
#include <assert.h>
#include <lcthw/bstrlib.h>
#include <stdlib.h>
#include <time.h>
BSTree *map = NULL;
static int traverse_called = 0;
struct tagbstring test1 = bsStatic("test data 1");
struct tagbstring test2 = bsStatic("test data 2");
struct tagbstring test3 = bsStatic("xest data 3");
struct tagbstring expect1 = bsStatic("THE VALUE 1");
struct tagbstring expect2 = bsStatic("THE VALUE 2");
struct tagbstring expect3 = bsStatic("THE VALUE 3");
static int traverse_good_cb(BSTreeNode * node)
{
debug("KEY: %s", bdata((bstring) node->key));
traverse_called++;
return 0;
}
static int traverse_fail_cb(BSTreeNode * node)
{
debug("KEY: %s", bdata((bstring) node->key));
traverse_called++;
if (traverse_called == 2) {
return 1;
} else {
return 0;
}
}
char *test_create()
{
map = BSTree_create(NULL);
mu_assert(map != NULL, "Failed to create map.");
return NULL;
}
char *test_destroy()
{
BSTree_destroy(map);
return NULL;
}
char *test_get_set()
{
int rc = BSTree_set(map, &test1, &expect1);
mu_assert(rc == 0, "Failed to set &test1");
bstring result = BSTree_get(map, &test1);
mu_assert(result == &expect1, "Wrong value for test1.");
rc = BSTree_set(map, &test2, &expect2);
mu_assert(rc == 0, "Failed to set test2");
result = BSTree_get(map, &test2);
mu_assert(result == &expect2, "Wrong value for test2.");
rc = BSTree_set(map, &test3, &expect3);
mu_assert(rc == 0, "Failed to set test3");
result = BSTree_get(map, &test3);
mu_assert(result == &expect3, "Wrong value for test3.");
return NULL;
}
char *test_traverse()
{
int rc = BSTree_traverse(map, traverse_good_cb);
mu_assert(rc == 0, "Failed to traverse.");
mu_assert(traverse_called == 3, "Wrong count traverse.");
traverse_called = 0;
rc = BSTree_traverse(map, traverse_fail_cb);
mu_assert(rc == 1, "Failed to traverse.");
mu_assert(traverse_called == 2, "Wrong count traverse for fail.");
return NULL;
}
char *test_delete()
{
bstring deleted = (bstring) BSTree_delete(map, &test1);
mu_assert(deleted != NULL, "Got NULL on delete.");
mu_assert(deleted == &expect1, "Should get test1");
bstring result = BSTree_get(map, &test1);
mu_assert(result == NULL, "Should delete.");
deleted = (bstring) BSTree_delete(map, &test1);
mu_assert(deleted == NULL, "Should get NULL on delete");
deleted = (bstring) BSTree_delete(map, &test2);
mu_assert(deleted != NULL, "Got NULL on delete.");
mu_assert(deleted == &expect2, "Should get test2");
result = BSTree_get(map, &test2);
mu_assert(result == NULL, "Should delete.");
deleted = (bstring) BSTree_delete(map, &test3);
mu_assert(deleted != NULL, "Got NULL on delete.");
mu_assert(deleted == &expect3, "Should get test3");
result = BSTree_get(map, &test3);
mu_assert(result == NULL, "Should delete.");
// test deleting non-existent stuff
deleted = (bstring) BSTree_delete(map, &test3);
mu_assert(deleted == NULL, "Should get NULL");
return NULL;
}
char *test_fuzzing()
{
BSTree *store = BSTree_create(NULL);
int i = 0;
int j = 0;
bstring numbers[100] = { NULL };
bstring data[100] = { NULL };
srand((unsigned int)time(NULL));
for (i = 0; i < 100; i++) {
int num = rand();
numbers[i] = bformat("%d", num);
data[i] = bformat("data %d", num);
BSTree_set(store, numbers[i], data[i]);
}
for (i = 0; i < 100; i++) {
bstring value = BSTree_delete(store, numbers[i]);
mu_assert(value == data[i],
"Failed to delete the right number.");
mu_assert(BSTree_delete(store, numbers[i]) == NULL,
"Should get nothing.");
for (j = i + 1; j < 99 - i; j++) {
bstring value = BSTree_get(store, numbers[j]);
mu_assert(value == data[j],
"Failed to get the right number.");
}
bdestroy(value);
bdestroy(numbers[i]);
}
BSTree_destroy(store);
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_create);
mu_run_test(test_get_set);
mu_run_test(test_traverse);
mu_run_test(test_delete);
mu_run_test(test_destroy);
mu_run_test(test_fuzzing);
return NULL;
}
RUN_TESTS(all_tests);
```
I'll walk through the implementation and compare it to Hashmaps for features.
Improving It
* As usual, you should go through all of the defensive programming checks and add
*assert*s for conditions that shouldn't happen. For example, you shouldn't be getting *NULL* values for the recursion functions, so assert that.
* The traverse function walks through the tree in order by traversing left, then right,
and then the current node. You can create traverse functions for the reverse order, as well.
Improving It
* It does a full string compare on every node, but I could use the *Hashmap*
hashing functions to speed this up. I could hash the keys, and then keep the hash in
the *BSTreeNode*. Then, in each of the set up functions, I can hash the
key ahead of time, and pass it down to the recursive function. Using this hash, I can
then compare each node much quicker in a way that's similar to what I do in *Hashmap*.
Breaking It
A big flaw in this is the use of recursion. An attacker could choose data to cause a stack overflow.
Extra Credit
* There's an alternative way to do this data structure without using recursion. The Wikipedia
page shows alternatives that don't use recursion but do the same thing. Why would this
be better or worse?
* Read up on all of the different but similar trees you can find. There are AVL trees (named after <NAME> and <NAME>), red-black trees,
and some non-tree structures like skip lists.
<file_sep>+++
title = "Coding"
weight = 2
+++
{{%children style="card" description="true" %}}<file_sep>#!/bin/bash
#******************************************************************************
# AWS VPC Creation Shell Script
#******************************************************************************
#
# SYNOPSIS
# Automates the creation of a custom IPv4 VPC, having both a public and a
# private subnet, and a NAT gateway.
#
#==============================================================================
#
# NOTES
# VERSION: 1.0
# AUTHOR: <NAME>
#
#==============================================================================
# MODIFY THE SETTINGS BELOW
#==============================================================================
#
AWS_REGION="ap-southeast-2"
VPC_NAME="DEV-PG-II"
VPC_CIDR="10.5.0.0/16"
SUBNET_PUBLIC_CIDR="10.5.1.0/24"
SUBNET_PUBLIC_AZ="ap-southeast-2a"
SUBNET_PUBLIC_NAME="$VPC_NAME-PubSub-AZ2a"
SUBNET_PRIVATE_CIDR="10.5.2.0/24"
SUBNET_PRIVATE_AZ="ap-southeast-2b"
SUBNET_PRIVATE_NAME="$VPC_NAME-PrvSub-AZ2b"
IGW_NAME="$VPC_NAME-IGW"
NAT_GW_NAME="$VPC_NAME-NAT-GW"
CHECK_FREQUENCY=5
#
#==============================================================================
# DO NOT MODIFY CODE BELOW
#==============================================================================
#
# Create VPC
echo "Creating VPC in preferred region..."
VPC_ID=$(aws ec2 create-vpc \
--cidr-block $VPC_CIDR \
--query 'Vpc.{VpcId:VpcId}' \
--output text \
--region $AWS_REGION)
echo " VPC ID '$VPC_ID' CREATED in '$AWS_REGION' region."
# Add Name tag to VPC
aws ec2 create-tags \
--resources $VPC_ID \
--tags "Key=Name,Value=$VPC_NAME" \
--region $AWS_REGION
echo " VPC ID '$VPC_ID' NAMED as '$VPC_NAME'."
# Create Public Subnet
echo "Creating Public Subnet..."
SUBNET_PUBLIC_ID=$(aws ec2 create-subnet \
--vpc-id $VPC_ID \
--cidr-block $SUBNET_PUBLIC_CIDR \
--availability-zone $SUBNET_PUBLIC_AZ \
--query 'Subnet.{SubnetId:SubnetId}' \
--output text \
--region $AWS_REGION)
echo " Subnet ID '$SUBNET_PUBLIC_ID' CREATED in '$SUBNET_PUBLIC_AZ'" \
"Availability Zone."
# Add Name tag to Public Subnet
aws ec2 create-tags \
--resources $SUBNET_PUBLIC_ID \
--tags "Key=Name,Value=$SUBNET_PUBLIC_NAME" \
--region $AWS_REGION
echo " Subnet ID '$SUBNET_PUBLIC_ID' NAMED as" \
"'$SUBNET_PUBLIC_NAME'."
# Create Private Subnet
echo "Creating Private Subnet..."
SUBNET_PRIVATE_ID=$(aws ec2 create-subnet \
--vpc-id $VPC_ID \
--cidr-block $SUBNET_PRIVATE_CIDR \
--availability-zone $SUBNET_PRIVATE_AZ \
--query 'Subnet.{SubnetId:SubnetId}' \
--output text \
--region $AWS_REGION)
echo " Subnet ID '$SUBNET_PRIVATE_ID' CREATED in '$SUBNET_PRIVATE_AZ'" \
"Availability Zone."
# Add Name tag to Private Subnet
aws ec2 create-tags \
--resources $SUBNET_PRIVATE_ID \
--tags "Key=Name,Value=$SUBNET_PRIVATE_NAME" \
--region $AWS_REGION
echo " Subnet ID '$SUBNET_PRIVATE_ID' NAMED as '$SUBNET_PRIVATE_NAME'."
# Create Internet gateway
echo "Creating Internet Gateway..."
IGW_ID=$(aws ec2 create-internet-gateway \
--query 'InternetGateway.{InternetGatewayId:InternetGatewayId}' \
--output text \
--region $AWS_REGION)
echo " Internet Gateway ID '$IGW_ID' CREATED."
# Add Name tag to Internet gateway
aws ec2 create-tags \
--resources $IGW_ID \
--tags "Key=Name,Value=$IGW_NAME" \
--region $AWS_REGION
echo " Internet gateway '$IGW_ID' NAMED as '$IGW_NAME'."
# Attach Internet gateway to your VPC
aws ec2 attach-internet-gateway \
--vpc-id $VPC_ID \
--internet-gateway-id $IGW_ID \
--region $AWS_REGION
echo " Internet Gateway ID '$IGW_ID' ATTACHED to VPC ID '$VPC_ID'."
# Create Route Table
echo "Creating Route Table..."
ROUTE_TABLE_ID=$(aws ec2 create-route-table \
--vpc-id $VPC_ID \
--query 'RouteTable.{RouteTableId:RouteTableId}' \
--output text \
--region $AWS_REGION)
echo " Route Table ID '$ROUTE_TABLE_ID' CREATED."
# Create route to Internet Gateway
RESULT=$(aws ec2 create-route \
--route-table-id $ROUTE_TABLE_ID \
--destination-cidr-block 0.0.0.0/0 \
--gateway-id $IGW_ID \
--region $AWS_REGION)
echo " Route to '0.0.0.0/0' via Internet Gateway ID '$IGW_ID' ADDED to" \
"Route Table ID '$ROUTE_TABLE_ID'."
# Associate Public Subnet with Route Table
RESULT=$(aws ec2 associate-route-table \
--subnet-id $SUBNET_PUBLIC_ID \
--route-table-id $ROUTE_TABLE_ID \
--region $AWS_REGION)
echo " Public Subnet ID '$SUBNET_PUBLIC_ID' ASSOCIATED with Route Table ID" \
"'$ROUTE_TABLE_ID'."
# Enable Auto-assign Public IP on Public Subnet
aws ec2 modify-subnet-attribute \
--subnet-id $SUBNET_PUBLIC_ID \
--map-public-ip-on-launch \
--region $AWS_REGION
echo " 'Auto-assign Public IP' ENABLED on Public Subnet ID" \
"'$SUBNET_PUBLIC_ID'."
# Allocate Elastic IP Address for NAT Gateway
echo "Creating NAT Gateway..."
EIP_ALLOC_ID=$(aws ec2 allocate-address \
--domain vpc \
--query '{AllocationId:AllocationId}' \
--output text \
--region $AWS_REGION)
echo " Elastic IP address ID '$EIP_ALLOC_ID' ALLOCATED."
# Create NAT Gateway
NAT_GW_ID=$(aws ec2 create-nat-gateway \
--subnet-id $SUBNET_PUBLIC_ID \
--allocation-id $EIP_ALLOC_ID \
--query 'NatGateway.{NatGatewayId:NatGatewayId}' \
--output text \
--region $AWS_REGION)
FORMATTED_MSG="Creating NAT Gateway ID '$NAT_GW_ID' and waiting for it to "
FORMATTED_MSG+="become available.\n Please BE PATIENT as this can take some "
FORMATTED_MSG+="time to complete.\n ......\n"
printf " $FORMATTED_MSG"
FORMATTED_MSG="STATUS: AVAILABLE - Total of %02d seconds elapsed for process"
FORMATTED_MSG+="\n ......\n NAT Gateway ID '%s' is now AVAILABLE.\n"
start_time="$(date -u +%s)"
aws ec2 wait nat-gateway-available \
--nat-gateway-ids $NAT_GW_ID
end_time="$(date -u +%s)"
elapsed="$(($end_time-$start_time))"
printf " $FORMATTED_MSG" $elapsed $NAT_GW_ID
# Add Name tag to NAT Gateway
aws ec2 create-tags \
--resources $NAT_GW_ID \
--tags "Key=Name,Value=$NAT_GW_NAME" \
--region $AWS_REGION
echo " Internet gateway '$NAT_GW_ID' NAMED as '$NAT_GW_NAME'."
# Create route to NAT Gateway
MAIN_ROUTE_TABLE_ID=$(aws ec2 describe-route-tables \
--filters Name=vpc-id,Values=$VPC_ID Name=association.main,Values=true \
--query 'RouteTables[*].{RouteTableId:RouteTableId}' \
--output text \
--region $AWS_REGION)
echo " Main Route Table ID is '$MAIN_ROUTE_TABLE_ID'."
RESULT=$(aws ec2 create-route \
--route-table-id $MAIN_ROUTE_TABLE_ID \
--destination-cidr-block 0.0.0.0/0 \
--gateway-id $NAT_GW_ID \
--region $AWS_REGION)
echo " Route to '0.0.0.0/0' via NAT Gateway with ID '$NAT_GW_ID' ADDED to" \
"Route Table ID '$MAIN_ROUTE_TABLE_ID'."
echo "COMPLETED"
<file_sep>+++
date = "2016-03-04T14:59:31+11:00"
title = "Ubuntu 14 -- server setup"
description = "Ubuntu 14 -- server note"
draft = false
+++
Prelude
> *This article is mainly to help experienced user install and setup Ubuntu server. If you are not familiar with Ubuntu system, please install Ubuntu desktop version at first, and you can follow [Ubuntu deskstop setup](/os/ubuntu-desktop-14/)*
## Prerequisites
* You are familiar with Ubuntu, at least you have some experience working on Linux system.
* You are familiar with bash/shell script
* You are going to setup Ubuntu server for special purpose. e.g. Web server, file server, or data center.
## UFW setup
```bash
sudo ufw enable
sudo ufw allow 80/tcp
sudo ufw allow ssh
sudo ufw allow 443/tcp
sudo ufw allow 8000/tcp
```
## SSH server setup
### Secure SSH with CA in production
```bash
sudo apt-get install openssh-server
## backup default config
sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.factory-defaults
sudo chmod a-w /etc/ssh/sshd_config.factory-defaults
## use any editor to update sshd_config
sudo nano /etc/ssh/sshd_config
## uncomment PasswordAuthentication yes to allow remote password login
## Password authentication is only for test environment
## setup ssh auto-start onboot
sudo update-rc.d ssh defaults
```
## Time Zone setup
```bash
sudo dpkg-reconfigure tzdata
```
## install software-properties-common Package
```bash
software-properties-common python-software-properties
```
Install byobu screen
```bash
sudo apt-get install byobu screen
## Launch byobu
byobu
## F9 for help
## change the keyboard for putty > Termianl > Keyboard > Function keys and keyboard > Xterm R6
```
## install docker (Ubuntu 14.04 LTS)
```bash
## add GBG Key
sudo apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys <KEY>
## add docker.list
sudo touch /etc/apt/sources.list.d/docker.list
## repo
sudo vi /etc/apt/sources.list.d/docker.list
## add following repo ath the end of file
deb https://apt.dockerproject.org/repo ubuntu-trusty main
## apt update
sudo apt-get update
## verify
apt-cache policy docker-engine
## install docker engine
```
## build vim
* I am not `vi` fans, but if you really want to use `vi`. I wiil suggest spend some time to dig into [`vimawesome`](http://vimawesome.com) and play around with those plugins. Some are pretty cool, e.g. `NERD Tree`, 'youcompleteme`, `syntastic`, etc.
```bash
sudo apt-get build-dep vim
git clone https://github.com/vim/vim.git ~/forks/vim
cd ~/forks/vim
### make distclean && make clean
### build script from this repo
make VIMRUNTIMEDIR=/usr/share/vim/vim74
sudo make install
```
## Install JDK 8
* If you are going to run Java Web Application on server, or you are going to setup Hadoop environment.
* Setup oracle jdk ppa and install oracle jdk from ppa.
```bash
sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update
sudo apt-get install oracle-java8-installer
sudo apt-get install oracle-java8-set-default
```
## Install OpenJdk
* Setup OpenJdk ppa and install it from ppa
```bash
sudo add-apt-repository ppa:openjdk-r/ppa
sudo apt-get update
sudo apt-get install openjdk-8-jdk
sudo update-alternatives --config java
```
* Type in a number to select a Java version.
* set default Java Compiler
```bash
sudo update-alternatives --config javac
java -version
```
* How to stop mysql auto startup
* Comment out the line below in the config file ( /etc/init/mysql.conf )
* Start on (net-device-up
## Install nodejs
```bash
curl -sL https://deb.nodesource.com/setup | sudo bash -
sudo apt-get install nodejs
sudo apt-get install build-essential
```
Setup NPM
* You can use default `npm` on your server after you install nodejs, but there is a better way to manage your `npm`. It allows you easily to control your packages.
```bash
cd
mkdir .node_modules
npm config list
npm config get prefix
## /usr or /usr/share
npm config set prefix $HOME/.node_modules
cat .npmrc
## /home/hho/.node_modules
npm install -g npm
which npm
## /usr/bin/npm
```
* Open .profile add following to end of file
```bash
export PATH="$HOME/.node_modules_global/bin:$PATH"
```
## Install nvm
* Using `nvm` is no longer popular and best option. I will recommand you just use `npm` to manage eveything you need.
```bash
sudo apt-get update
sudo apt-get install build-essential libssl-dev
curl https://raw.githubusercontent.com/creationix/nvm/v0.16.1/install.sh | sh
source ~/.profile
nvm ls-remote
nvm install 0.11.13
nvm use 0.11.13
nvm alias default 0.11.13
nvm use default
```
## Install PHP & Compser
```bash
sudo apt-get install curl php5-cli git
curl -sS https://getcomposer.org/installer | sudo php -- --install-dir=/usr/local/bin --filename=composer
```
## Install Python2, Python3
* Ubuntu has python instaleld by default
```bash
sudo apt-get python pip
sudo apt-get install python3 pip3
## Install virtualenv
sudo pip install virtualenv
sudo pip3 install virtualenv
```
## Install Go
```bash
wget https://storage.googleapis.com/golang/go1.4.linux-amd64.tar.gz
sudo tar -xzf go1.4.linux-amd64.tar.gz -C /usr/local
sudo vi /etc/profile
GOPATH="/YOUR/USER/HOME/go"
GOROOT="/usr/local/go"
PATH=$GOROOT/bin:$PATH
```
## Install R
```bash
sudo apt-key adv –keyserver keyserver.ubuntu.com –recv-keys E084DAB9
sudo add-apt-repository ‘deb http://star-www.st-andrews.ac.uk/cran/bin/linux/ubuntu trusty/’
sudo apt-get update
sudo apt-get install r-base
```
## Install Rust
```bash
$ curl -sf -L https://static.rust-lang.org/rustup.sh | sh
```
*Uninstall Rust*
```bash
$ sudo /usr/local/lib/rustlib/uninstall.sh
```
<file_sep>+++
date = "2017-06-07T16:56:21+11:00"
title = "What is SRE"
description="SRE - Site Reliability Engineering"
draft = true
+++
<file_sep>+++
title = "Azure: CAF - 2"
weight = 1
description="Function & Team of Cloud Adoption Framework"
+++
## Function
* Cloud adoption functions deliver technical solutions.
* Cloud strategy functions align technical change with business needs.
* Cloud operations functions support and operate adopted solutions.
* Cloud center of excellence (CCoE) functions improve quality, speed, and resiliency of adoption.
* Cloud governance functions manage risk.
* Cloud platform functions operate and mature the platform.
* Cloud automation functions accelerate adoption and innovation.
* Cloud security functions manage security risks.
## Team
### cloud strategy team
To be successful, every cloud adoption journey needs to involve some level of strategic planning. This getting started guide is designed to help you establish a dedicated team or virtual team that can build and deliver on a solid cloud strategy.
As the cloud strategy team forms and gets started, the strategy facilitator is temporarily accountable for creating alignment and keeping the team aligned with business goals. The strategy facilitator is often the person most accountable for the success of the cloud adoption journey.
> Note: Not every organization requires a dedicated team or virtual team to help meet its strategic needs. In your RACI (responsible, accountable, consulted, and informed) template, list the core accountabilities of the strategy, and identify the person on your team who will be accountable for each. If one person will take on all of those accountabilities, simple replace "cloud strategy" with that person's name in the RACI template.
* Step 1: Determine whether a cloud strategy team is needed
Deliverables: Determine whether your business requires a cloud strategy team
* Step 2: Establish the cloud strategy team
Deliverables: Identify the appropriate organizations or individuals who are willing to share in the accountability and responsibility for driving the cloud adoption strategy.
* Step 3: Establish a cadence
Deliverables: Review suggested meeting cadences and schedule meetings with all strategy team participants.
* Step 4: Establish a motivation-driven strategy
Deliverables: Record motivations in the strategy and plan template.
* Step 5: Establish business outcomes
Deliverables:
- Identify at least one expected business outcome per member of the cloud strategy team.
- Refine the list of members to align expected time commitments with expected outcomes.
- Align on a set of short-term and mid-term metrics to support ongoing progress reports.
* Step 6: Decide whether to proceed or cancel based on the business justification
Deliverables:
- Kick off the business justification effort with your supporting teams.
- Meet with the supporting teams monthly (or as needed) until the strategy team can align on a go/no go decision to proceed with cloud adoption.
* Step 7: Support adoption through a regular cadence
Deliverables:
- Prioritization: When the existing digital estate is rationalized, the strategy team helps establish waves of migration or innovation priorities.
- Evaluate risks: As cloud adoption grows, new forms of adoption expose new risks. The strategy team is responsible for helping evaluate those new risks.
- Review budget and spend: As cloud adoption increases, so will budgets for various workloads in the portfolio.
- Business planning: When the adoption teams complete their migration or innovation efforts, additional business planning will be required to maximize return from the new technology solutions.
- Executive support: Cloud adoption will result in organizational change. This is most visible within the IT organization.
### cloud adoption team
Cloud adoption teams are the modern-day equivalent of technical implementation teams or project teams. The nature of the cloud might require more fluid team structures.
> Some cloud adoption teams focus exclusively on cloud migration, and others focus on innovations that take advantage of cloud technologies. Some teams include the broad technical expertise that's required to complete large adoption efforts, such as a full datacenter migration, and others have a tighter technical focus. As cloud adoption expands, customers benefit from a team that's dedicated to the cloud platform function.
* Step 1: Determine the type of adoption team you need
The types of adoption:
- Migration of existing workloads
- Modernization of existing workloads and assets
- Architectural change to existing workloads and assets
- Development of new workloads
Deliverables:
- Determine whether the team aligns better with the Migrate methodology or the Innovate methodology.
- Each methodology has a four-step onboarding experience to help the team understand the tools and processes required to get really good at that effort.
- Align responsibilities across teams by developing a cross-team matrix that identifies responsible, accountable, consulted, and informed (RACI) parties.
* Step 2: Align your team with other supporting teams
Deliverables:
- Review design guidance, operational baselines, policies, and processes from the various supporting teams to understand the guardrails that have been established for guiding cloud adoption.
- Review the guidance with other cloud adoption teams to understand any limitations you might encounter as a result of those guardrails.
* Step 3: Begin your adoption journey
Deliverables:
- Become increasingly better at delivering on the methodology associated with your adoption approach.
- Support other teams in the completion of their accountable steps, even if those steps are blockers to your adoption efforts.
* Step 4: Expand your skills with scenarios and best practices
Deliverables:
- Increase skills and experience to address more complex adoption scenarios.
* Step 5: Build a cloud adoption factory
Deliverables:
- Improve delivery processes to create a highly optimized adoption factory.
### cloud governance team
A cloud governance team ensures that cloud-adoption risks and risk tolerance are properly evaluated and managed. The team identifies risks that can't be tolerated by the business, and it converts risks into governing corporate policies.
* Step 1: Determine whether a cloud governance team is needed
Deliverables:
- Determine whether you need a cloud governance team.
- Align responsibilities across teams by developing a cross-team matrix that identifies responsible, accountable, consulted, and informed (RACI) parties.
* Step 2: Align with other teams
Deliverables:
- Discuss current-state implementation and ongoing adoption plans with each team.
* Step 3: Establish a cadence with other teams
Deliverables:
- Establish a cadence with the supporting teams. If possible, align that cadence with release and planning cycles.
- Establish a separate cadence directly with the cloud strategy team (or various team members) to review risks that are associated with the next wave of adoption and gauge the team's level of tolerance for those risks.
* Step 4: Review the methodology
Deliverables:
- Gain an understanding of the methodology, approach, and implementation that supports the Govern methodology.
* Step 5: Complete the governance benchmark
Deliverables:
- Complete the governance benchmark assessment, based on conversations with various stakeholders. Or ask other teams to complete the assessment on their own.
* Step 6: Implement the initial governance best practice and configuration
Deliverables:
- Deploy the basic governance tools and organization configurations that are required to govern the environment during the next few waves of adoption efforts.
* Step 7: Continuously improve governance maturity
Deliverables:
- Implement governance improvements to guard against changing risks and governance needs.
### cloud operations team
An operations team focuses on monitoring, repairing, and remediating issues related to traditional IT operations and assets. In the cloud, many of the capital costs and operations activities are transferred to the cloud provider, giving IT operations the opportunity to improve and provide significant additional value.
* Step 1: Determine whether a cloud operations team is needed
Deliverables:
- Determine whether you need a cloud operations team.
- Align responsibilities across teams by developing a cross-team matrix that identifies responsible, accountable, consulted, and informed (RACI) parties.
* Step 2: Align with other teams
Deliverables:
- Discuss current-state implementation and ongoing adoption plans with each team.
* Step 3: Establish a cadence with other teams
Deliverables:
- Establish a cadence with the supporting teams. If possible, align that cadence with release and planning cycles.
- Establish a separate cadence directly with the cloud strategy team or its various team members to review any operational requirements that are associated with the next wave of adoption.
* Step 4: Review the methodology
Deliverables:
- Gain an understanding of the methodology, approach, and implementation that supports the Manage methodology.
* Step 5: Implement the operations baseline
Deliverables:
- Deploy the basic Azure server-management configurations that are required for operating the environment during the next few waves of adoption efforts.
* Step 6: Align business commitments
Deliverables:
- Document the expectations of business stakeholders.
- Determine whether advanced operations are required for specific workloads or platforms.
* Step 7: Operations maturity
Deliverables:
- Improve operations maturity to support commitments to business stakeholders.
* Step 8: Scale operations consistency through governance
Deliverables:
- Help the cloud governance team implement new requirements for resource consistency.
* Step 9: Adoption handoffs
Deliverables:
- Regularly review and accept handoffs from cloud adoption teams.
### MVP - team structure
This proven approach is considered a minimum viable product (MVP), because it might not be sustainable. Each team wears many hats, as outlined in the RACI (responsible, accountable, consulted, and informed) charts.
#### Cloud adoption team
This team is accountable for technical solutions, business alignment, project management, and operations for the solutions that are adopted.
#### Cloud governance team
To balance the cloud adoption team, a cloud governance team is dedicated to ensuring excellence in the solutions that are adopted. The cloud governance team is accountable for platform maturity, platform operations, governance, and automation.
- | Cloud adoption team | Cloud governance team
--------------------|---------------------|-----------
Solution delivery | Accountable| Consulted
Business alignment | Accountable| Informed
Change management | Accountable| Informed
Solution operations | Accountable| Informed
Governance | Consulted | Accountable
Platform maturity | Consulted | Accountable
Platform operations | Consulted | Accountable
Platform automation | Informed | Accountable
- Accountable: The one person or team to be held accountable for results and outcomes
- Responsible: Any people or teams who are responsible for contributing to the results and outcomes
- Consulted: People or teams who should be consulted prior to changes being implemented
- Informed: People or teams who should be informed about changes
<file_sep>+++
title = "JS & ES Note - 3"
description="The var , let and const keywords "
+++
### The **var** statement
* var declarations, wherever they occur, are processed before any code is executed. This is called hoisting.
* The scope of a variable declared with var is its current execution context, which is either the enclosing function or, for variables declared outside any function, global. If you re-declare a JavaScript variable, it will not lose its value.
* Assigning a value to an undeclared variable implicitly creates it as a __global__ variable (it becomes a property of the global object) when the assignment is executed.
> Test yourself with following samples
#### Sample 1
```js
function testVar(){
console.log(a)
console.log(b)
var b = 2
c=3
}
var a = 1
testVar()
console.log(b)
```
#### Answer of sample 1
```js
function testVar(){
console.log(a) // 1
console.log(b) // undefined
var b = 2
c = 3
}
var a = 1
testVar()
console.log(c) // 3
```
#### Sample 2 (Run in browser's console)
```js
d = 13
console.log(this.d)
delete this.d
console.log(this.d)
var e = 31
console.log(this.e)
delete this.e
console.log(this.e)
var i = i + 1
console.log( i )
console.log( this.i )
```
#### Answer of sample 2
```js
var e = 31
f = 13
console.log(this.e, this.f) // 31 13
console.log(e, f) // 31 13
delete this.f
delete this.e
console.log(this.e, this.f) // 31 undefinded
console.log(e, f) // ReferenceError: d is not defined
var i = i + 1
console.log( i ) // NaN
console.log( this.i ) // NaN
```
* Conclusion: Because of the above unexpected results, it is recommended to always declare variables, regardless of whether they are in a function or global scope.
> Since the var statement is difficult to harness, people have to come up a solution to address the problem. Then it turns out other new keywords: let & const from ECMAScript 2015 (6th Edition, ECMA-262)
### Let and Const statement
* The let statement declares a block scope local variable, optionally initializing it to a value.
* __let__ allows you to declare variables that are limited to a scope of a block statement, or expression on which it is used, unlike the var keyword, which defines a variable globally, or locally to an entire function regardless of block scope. The other difference between var and let is that the latter is initialized to value only when parser evaluates it (see below).
* Constants are block-scoped, much like variables defined using the let statement. The value of a constant can't be changed through reassignment, and it can't be re-declared.
* The const declaration creates a read-only reference to a value. It does not mean the value it holds is immutable, just that the variable identifier cannot be reassigned. For instance, in the case where the content is an object, this means the object's contents (e.g., its properties) can be altered.
>>> Test yourself with following samples
#### Sample 3
```js
var var1;
let letVar;
const constVar;
function testVar() {
console.log( var1);
console.log( constVar);
console.log( letVar);
}
testVar()
```
#### Answer of sample 3
```js
var var1;
let letVar;
const constVar; // missing initialization
function testVar() {
console.log( var1);
console.log( constVar);
console.log( letVar);
}
testVar()
```
#### Sample 4
```js
var v1 = "";
var v1 = 123;
let let1 = "";
let let1 = 123;
const c1 = "";
c1 = 123;
```
#### Answer of sample 4
```js
var v1 = "";
var v1 = 123;
let let1 = "";
let let1 = 123; //SyntaxError: Identifier 'let1' has already been declared.
const c1 = "";
c1 = 123; // TypeError: Assignment to constant variable.
```
#### Sample 5
```js
for ( var i = 0 ; i < 5 ; i++ ){
var x = 20;
console.log(i);
}
console.log( i );
console.log( x );
for ( ; i < 10 ; i++ ){
var i
console.log(i);
}
/////////////////////////////////////////
for ( let t = 0 ; t < 5 ; t++ ){
console.log( t);
let s = 100
}
console.log(s)
console.log(t)
```
#### Answer of sample 5
```js
for ( var i = 0 ; i < 5 ; i++ ){
var x = 20
console.log(i); // 0 1 2 3 4
}
console.log( i ); // 5
console.log( x ); // 20
for ( ; i < 10 ; i++ ){
var i // re-declare will not reset value
console.log(i); // 5 6 7 8 9
}
/////////////////////////////////////////
for ( let t = 0 ; t < 5 ; t++ ){
console.log( t);
let s = 100
}
console.log(t) // ReferenceError: t is not defined
console.log(s) //
```
<file_sep>+++
title="Pitfalls"
description="Common pitfalls"
weight = 9
+++
### Shadowing
* Hiding (shadowing) a variable by misusing short declaration.
* Such mistakes occur mostly inside the if-body or for-loop
```go
var remember bool = false
if something {
remember := true // Wrong.
}
// use remember
func shadow() (err error) {
x, err := check1() // x is created; err is assigned to
if err != nil {
return // err correctly returned
}
if y, err := check2(x); err != nil { // y and inner err are created
return // inner err shadows outer err so nil is wrongly returned!
} else {
fmt.Println(y)
}
return
}
```
### Misusing strings
* String concatenations of the kind a += b are inefficient, especially when performed inside a loop.
* Instead one should use a bytes.Buffer to accumulate string content
```go
var b bytes.Buffer
// ...
for condition {
b.WriteString(str) // appends string str to the buffer
}
return b.String()
```
### Using deffer incorrectly
* Using defer for closing a file in the wrong scope
* It mostly occurs in the for-loop body. Suppose you are processing a range of files in a for-loop, and you want to make sure the files are closed after processing by using defer,
* BAD defer sample
```go
for _, file := range files {
if f, err = os.Open(file); err != nil {
return
}
defer f.Close() // This is /wrong/.
// The file is not closed when this loop iteration ends.
// perform operations on f:
f.Process(data)
}
```
* Defer is only executed at the return of a function, not at the end of a loop or some other limited scope.
* DO NOT use defer to close the file in the for-loop body
```go
for _, file := range files {
if f, err = os.Open(file); err != nil {
return
}
// perform operations on f:
f.Process(data)
// close f:
f.Close()
}
```
### Confusing new() and make()
* for slices, maps and channels, use make
* for arrays, structs, and all value types, use new
### No need for slice
* No need to pass a pointer to a slice to a function
* A slice is a pointer to an underlying array. Passing a slice as a parameter to a function is probably what you always want: namely passing a pointer to a variable to be able to change it, and not passing a copy of the data.
* Do not dereference a slice when used as a parameter!
```go
// correct way
func findBiggest( listOfNumbers []int ) int {}
// wrong way
func findBiggest( listOfNumbers *[]int ) int {}
```
### Using pointers to interface
* Look at the following program: nexter is an interface with a method next() meaning read the next byte. nextFew1 has this interface type as parameter and reads the next num bytes, returning them as a slice: this is ok.
```go
package main
import (
"fmt"
)
type nexter interface {
next() byte
}
func nextFew1(n nexter, num int) []byte {
var b []byte
for i:=0; i < num; i++ {
b[i] = n.next()
}
return b
}
func nextFew2(n *nexter, num int) []byte {
var b []byte
for i:=0; i < num; i++ {
b[i] = n.next() // compile error:
// n.next undefined (type *nexter has no field or method next)
}
return b
}
func main() {
fmt.Println("Hello World!")
}
```
### Misusing pointers
* Passing a value as a parameter in a function or as receiver to a method may seem a misuse of memory, because a value is always copied. But on the other hand values are allocated on the stack, which is quick and relatively cheap.
* If you would pass a pointer to the value instead the Go compiler in most cases will see this as the making of an object, and will move this object to the heap, so also causing an additional memory allocation: therefore nothing was gained in using a pointer instead of the value!
### Misusing goroutines and channel
* In practice often you don’t need the concurrency, or you don’t need the overhead of the goroutines with channels, passing parameters using the stack is in many cases far more efficient.
* Moreover it is likely to leak memory if you break or return or panic your way out of the loop, because the goroutine then blocks in the middle of doing something. In real code, it is often better to just write a simple procedural loop. Use goroutines and channels only where concurrency is important!
```go
var values = [5]int{10, 11, 12, 13, 14}
func main() {
// version A:
fmt.Println("\nVersion A:")
for ix := range values { // ix is the index
func() {
fmt.Print(ix, " ")
}() // call closure, prints each index
}
fmt.Println()
// version B: same as A, but call closure as a goroutine
fmt.Println("\nVersion B:")
for ix := range values {
go func() {
fmt.Print(ix, " ")
}()
}
fmt.Println()
time.Sleep(5e9)
// version C: the right way
fmt.Println("\n\nVersion C:")
for ix := range values {
go func(ix interface{}) {
fmt.Print(ix, " ")
}(ix)
}
fmt.Println()
time.Sleep(5e9)
// version D: print out the values:
fmt.Println("\n\nVersion D:")
for ix := range values {
val := values[ix]
go func() {
fmt.Print(val, " ")
}()
}
time.Sleep(1e9)
}
//----- output -------------
// Version A:
// 0 1 2 3 4
// Version B:
// 4 4 4 4 4
// Version C:
// 1 3 4 0 2
// Version D:
// 14 10 13 12 11
```
### Bad error handling
#### Don’t use booleans:
* Making a boolean variable whose value is a test on the error-condition like in the following is superfluous
```go
var good bool
// test for an error, good becomes true or false
if !good {
return errors.New(“things aren’t good”)
}
////--------------
//...
err1 := api.Func1()
if err1 != nil { … }
```
#### Don’t clutter your code with error-checking
* BAD sample.
```go
// ...
err1 := api.Func1()
if err1 != nil {
fmt.Println(“err: “ + err.Error())
return
}
err2 := api.Func2()
if err2 != nil {
//...
return
}
```
* With the above pattern, it is hard to tell what is normal program logic and what is error checking/reporting. Also notice that most of the code is dedicated to error conditions at any point in the code. A good solution is to wrap your error conditions in a closure wherever possible
```go
err := func () error {
if req.Method != “GET” {
return errors.New(“expected GET”)
}
if input := parseInput(req); input != “command” {
return errors.New(“malformed command”)
}
// other error conditions can be tested here
} ()
if err != nil {
w.WriteHeader(400)
io.WriteString(w, err)
return
}
```
<file_sep>+++
title = "Java Note - 1: Enum"
description="Replace constant property of the interface or abstract class with Enum"
+++
## Prerequisites
>*Java 1.5+*
## New type: Enum
Enum was a great improvement in Java 1.5. From that more and more developer abandom the interface or abstract class as constant variable container.
### Before Java 5
Before Java 1.5 you will following coding in many Java program.
```java
// Use interface or abstract class as constant variable container
public interface Country {
public static final String AU = "Australian";
public static final String UK = "United Kingdom";
public static final String US = "United State";
}
public class Util {
public static String getLanguageCode(String country) {
String languageCode = "en";
switch (country) {
case Country.AU:
languageCode = "en-au";
break;
case Country.UK:
languageCode = "en-uk";
break;
case Country.US:
languageCode = "en-us";
break;
}
return languageCode;
}
}
```
Above program looks very good. Please take a close look and check it carefully. You will find the program will never return __*en-au*__, since there is a typo in the constant AU. It should be __*Australia*__ instead of __*Australian*__. I believe many developers have short sight problem like me, and it happened again and again. Using string as constant flag is not a good option, but there is no other better solution before Java 1.5.
### After Java 5
After Java 1.5, you will see the change below. Enum is the best container for constants. It can help you check the program time. Meanwhile, it can simplfy your coding.
```java
// Use Enum as constant variable container
public enum Country {
Australia, UnitedKingdom, UnitedState
}
public class Util {
public static String getLanguageCode(Country country) {
String languageCode = "en";
switch (country) {
case Australia:
languageCode = "en-au";
break;
case UnitedKingdom:
languageCode = "en-uk";
break;
case UnitedState:
languageCode = "en-us";
break;
}
return languageCode;
}
}
```
Now you program will not be by any unintentional typo, since it will throw you compile error before you run the application. If you haven't refactor your static constants container, it is time to improve it now.
After Java Java 1.7, there are some new features. One of these new features is Switch statement. Now it supports String. It is a great for Java developer.With this new feature, the old Enum can be enhanced and the Util class can provide more handy methods (Overload method getLanguageCode) for development.
New Enum class can support flexible requirement. In the early version of Enum, the toString method only will return exactly the specified constanct name. Now it can be overrided with toString to return different constant name.
```java
// It can return customized name and simplify coding
public enum Country {
AU("Australia", "au","en-au"),
UK("United Kingdom", "en-uk"),
US("United State", "us","en-us");
String countryName;
String countryCode;
String languageCode;
private Country(String name, String code) {
countryName = name;
countryCode = code;
}
public String getCode() {
return countryCode;
}
public String getLanguageCode() {
return languageCode;
}
@Override
public String toString() {
return countryName;
}
}
```
The Uitl class can convert any country name or country code to Enum Country, vice versa. Now developer can seamless convert the String from UI to the Enum, since on the UI, usually the country name will Australia or United Kingdom instead of just AU or UK. For coding, use AU or UK can simplify coding and is friendly to developer.
```java
public class Util {
public static Country convertCountryNameOrCode(String nameOrCode ) {
Country country = null;
switch (nameOrCode) {
case "au":
case "AU":
case "Australia":
country = Country.AU;
break;
case "uk":
case "UK":
case "United Kingdom":
country = Country.UK;
break;
case "us":
case "US":
case "United State":
country = Country.UK;
break;
}
return country;
}
public static String getCountryName( Country country ){
String countryName = null;
switch (country){
case AU:
countryName = Country.AU.toString();
break;
case US:
countryName = Country.UK.toString();
break;
case UK:
countryName = Country.US.toString();
break;
default:
System.err.println("Unknow Country");
assert false;
break;
}
return countryName;
}
public static String getCountryCode( Country country ){
String countryCode = null;
switch (country){
case AU:
countryCode = Country.AU.getCode();
break;
case US:
countryCode = Country.UK.getCode();
break;
case UK:
countryCode = Country.US.getCode();
break;
default:
System.err.println("Unknow Country");
assert false;
break;
}
return countryCode;
}
public static String getLanguageCode(Country country) {
String languageCode = "en";
switch (country) {
case AU:
languageCode = Country.AU.getLanguageCode() ;
break;
case UK:
languageCode = Country.UK.getLanguageCode();
break;
case US:
languageCode = Country.US.getLanguageCode();
break;
}
return languageCode;
}
}
```
<file_sep>+++
title = "TS: Basic Types"
description="Basic Types"
+++
## Basic Types
In TypeScript, the same types as you would expect in JavaScript are supported, with a convenient enumeration type thrown in to help things along.
### Types in JavaScript
* Boolean - The most basic datatype is the simple true/false value, aka boolean value.
let isDone: boolean = false;
* Number - All numbers in TypeScript are floating point values.
let decimal: number = 6;
let hex: number = 0xf00d;
let binary: number = 0b1010;
let octal: number = 0o744;
* String - Another fundamental part of creating programs in JavaScript for webpages and servers alike is working with textual data. TypeScript also uses double quotes (") or single quotes (') to surround string data.
let color: string = "blue";
color = 'red';
* Template strings - It can span multiple lines and have embedded expressions. These strings are surrounded by the backtick/backquote (`) character, and embedded expressions are of the form ${ expr }.
```ts
let sentence: string = `Hello, my name is ${ fullName }.
I'll be ${ age + 1 } years old next month.`;
```
* Array - Two ways, the elements followed by [] to denote an array of that element type; The second way uses a generic array type, Array<elemType>.
let list: number[] = [1, 2, 3];
let list: Array<number> = [1, 2, 3];
* Object - object is a type that represents the non-primitive type, i.e. anything that is not number, string, boolean, bigint, symbol, null, or undefined.
declare function create(o: object | null): void;
create({ prop: 0 }); // OK
create(null); // OK
create(42); // Error
create("string"); // Error
create(false); // Error
create(undefined); // Error
### Types in Typescdript
* Tuple - Tuple types allow you to express an array with a fixed number of elements whose types are known, but need not be the same.
let x: [string, number, boolean];
x=['text', 1, true]; // OK
x=['text', , true]; // Error, Type 'undefined' is not assignable to type 'number'.
x=['text', 0, 0]; // Error, Type 'number' is not assignable to type 'boolean'.
console.log(x[0]); // OK
console.log(x[3]); // Error, Tuple type '[string, number, boolean]' of length '3' has no element at index '3'.
* Enum - A helpful addition to the standard set of datatypes from JavaScript is the enum. By default, enums begin numbering their members starting at 0. You can change this by manually setting the value of one of its members.
enum Color {Red, Green, Blue}
let c: Color = Color.Green;
console.log(c); // output: 0
enum Color2 { Red = 1, Green = 2, Blue = 4 }
let d: Color2 = Color2.Green;
console.log(d); // output: 2
enum Color3 { Red = 10, Green , Blue }
let e: Color3 = Color3.Blue;
console.log(e); // output: 12
* Any - The any type is a powerful way to work with existing JavaScript, allowing you to gradually opt-in and opt-out of type checking during compilation. You might expect Object to play a similar role, as it does in other languages. However, variables of type Object only allow you to assign any value to them. You can’t call arbitrary methods on them, even ones that actually exist.
let a: any = 4.001;
console.log(a.toFixed()) // output: 4
let o: Object = 4.001;
console.log(o.valueOf()) // output: 4.001
console.log(o.toFixed()) // Error - Property 'toFixed' does not exist on type 'Object'.
* Void - void is a little like the opposite of any: the absence of having any type at all. Declaring variables of type void is not useful because you can only assign null (only if --strictNullChecks is not specified, see next section) or undefined to them.
function warnUser(): void {
console.log("This is my warning message");
}
let unusable: void = undefined;
unusable = null; // OK if `--strictNullChecks` is not given
* Null and Undefined - both undefined and null actually have their own types named undefined and null respectively.When using the --strictNullChecks flag, null and undefined are only assignable to any and their respective types (the one exception being that undefined is also assignable to void). This helps avoid many common errors. In cases where you want to pass in either a string or null or undefined, you can use the union type string | null | undefined.
let u: undefined = undefined;
let n: null = null;
* Never - The never type represents the type of values that never occur. For instance, never is the return type for a function expression or an arrow function expression that always throws an exception or one that never returns; Variables also acquire the type never when narrowed by any type guards that can never be true. The never type is a subtype of, and assignable to, every type; however, no type is a subtype of, or assignable to, never (except never itself). Even any isn’t assignable to never.
function error(message: string): never {
throw new Error(message);
}
// Function returning never must have unreachable end point
function infiniteLoop(): never {
while (true) {
}
}
<file_sep>+++
title = "Azure: RBAC - 3"
weight = 1
description="Best practices & Azure AD Privileged Identity Management"
+++
## Best practices
### Only grant the access users need
Scope | Roles | | | | |
------|--------|-------------------|--------|-------------|----|
- | Reader | Resource-specific | Custom | Contributor | Owner|
Management Group | Observers | Users managing resources| Users managing resources| Users managing resources | Admins
Subscriptions | Observers | Users managing resources | Users managing resources | Users managing resources| Admins
Resource Group | Observers | Users managing resources | Users managing resources | Users managing resources | Admins
Resources | Automated processes | Automated processes | Automated processes | Automated processes | Automated processes |
### Azure AD Privileged Identity Management
Privileged Identity Management (PIM) is a service in Azure Active Directory (Azure AD) that enables you to manage, control, and monitor access to important resources in your organization. These resources include resources in Azure AD, Azure, and other Microsoft Online Services such as Microsoft 365 or Microsoft Intune.
#### What does it do
Privileged Identity Management provides time-based and approval-based role activation to mitigate the risks of excessive, unnecessary, or misused access permissions on resources that you care about.
- Provide just-in-time privileged access to Azure AD and Azure resources
- Assign time-bound access to resources using start and end dates
- Require approval to activate privileged roles
- Enforce multi-factor authentication to activate any role
- Use justification to understand why users activate
- Get notifications when privileged roles are activated
- Conduct access reviews to ensure users still need roles
- Download audit history for internal or external audit
<file_sep>+++
title = "PostgresQL Note - 1"
description="Introduction of SQL for PostgresQL"
+++
> PostgreSQL is a powerful, open source object-relational database system with over 30 years of active development that has earned it a strong reputation for reliability, feature robustness, and performance.
### Getting Started
* Switch to user postges
```bash
sudo su - postgres
psql
```
* Create new login id as super admin
It is frequently convenient to group users together to ease management of privileges: that way, privileges can be granted to, or revoked from, a group as a whole. In PostgreSQL this is done by creating a role that represents the group, and then granting membership in the group role to individual user roles.
> In the SQL standard, there is a clear distinction between users and roles, and users do not automatically inherit privileges while roles do. This behavior can be obtained in PostgreSQL by giving roles being used as SQL roles the INHERIT attribute, while giving roles being used as SQL users the NOINHERIT attribute. However, PostgreSQL defaults to giving all roles the INHERIT attribute, for backward compatibility with pre-8.1 releases in which users always had use of permissions granted to groups they were members of.
> The role attributes LOGIN, SUPERUSER, CREATEDB, and CREATEROLE can be thought of as special privileges, but they are never inherited as ordinary privileges on database objects are. You must actually SET ROLE to a specific role having one of these attributes in order to make use of the attribute.
```sql
CREATE ROLE user_id WITH
LOGIN
SUPERUSER
CREATEDB
CREATEROLE
INHERIT
REPLICATION
CONNECTION LIMIT -1
PASSWORD '<PASSWORD>';
GRANT postgres, pg_monitor, pg_read_all_settings,
pg_read_all_stats, pg_signal_backend, pg_stat_scan_tables 4
TO hho WITH ADMIN OPTION;
```
* Change password
```sql
ALTER USER user_id WITH PASSWORD '<PASSWORD>';
```
* Once the group role exists, you can add and remove members using the GRANT and REVOKE commands
```sql
GRANT postgres TO new_role;
REVOKE postgres FROM new_role;
```
* Create new database with new login id and load sql script to initialize the database
```sql
sudo su - user_id psql -c 'createdb db_01;'
sudo su - user_id psql -d postgres -f init_db.sql
```
* Grant the privilege of database __db_01__ to other user
```sql
GRANT ALL PRIVILEGES ON DATABASE db_01 to another_user;
```
<file_sep>+++
title = "F#"
description = "F# Tutorials"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "Docker Practices"
description="Useful & practical docker practices"
+++
> Docker is an open platform for developing, shipping, and running applications. Docker provides the ability to package and run an application in a loosely isolated environment called a container. The isolation and security allow you to run many containers simultaneously on a given host. Containers are lightweight because they don’t need the extra load of a hypervisor, but run directly within the host machine’s kernel. This means you can run more containers on a given hardware combination than if you were using virtual machines. You can even run Docker containers within host machines that are actually virtual machines!
> Docker enables you to separate your applications from your infrastructure so you can deliver software quickly. With Docker, you can manage your infrastructure in the same ways you manage your applications. By taking advantage of Docker’s methodologies for shipping, testing, and deploying code quickly, you can significantly reduce the delay between writing code and running it in production.
## Common Use Cases
### Test with busybox image
```
docker run -it --rm busybox echo Hello World
## output
Hello World
```
### Test with nginx image
Create a test web page with content below
```html
<!DOCTYPE html>
<html >
<head>
<title>Docker Nginx</title>
</head>
<body>
<h2>Hello from docker</h2>
</body>
</html>
```
Test the web page with ngnix image
```
docker run -it --rm -d -p 8080:80 --name web -v ~/app:/usr/share/nginx nginx
```
### build
Build an image from a Dockerfile. The presence of Dockerfile is mandatory. The file name convention is __Dockerfile__ or **your_customized_filename.Dockerfile**
$ docker build <path_of_workspace>
Build an image with tag
$ docker build <path_of_workspace> -t <image_tag>
Build an image with specific dockerfile
$ docker build <path_of_workspace> -f <path_of_dockerfile>
### tag or untag
$ docker image tag SOURCE_IMAGE[:TAG] TARGET_IMAGE[:TAG]
$ docker rmi [unwanted_tag]
### push
$ docker push <docker_repo>:<image_tag>
### scripting
The build & push can be simplified with some scripting. Here I recap a script from my other docker repository below.
```bash
DOCKER_REPO=$1
if [ -z "$1" ]; then
DOCKER_REPO='harryh00/docker-kits'
fi
echo docker repo: $DOCKER_REPO
############################################################################
## --------------------- Build images ---------------------------
############################################################################
build_image() {
FOLDER=$1
echo "----- Build ${FOLDER} based image -----"
for fname in $(ls ${FOLDER}); do
PREFIX=${fname/".Dockerfile"/""}
echo ":::: Build ${DOCKER_REPO}:${FOLDER}-${PREFIX}"
docker build ${FOLDER} -f "${FOLDER}/${PREFIX}.Dockerfile" \
-t "${DOCKER_REPO}:${FOLDER}-${PREFIX}"
done
}
##############################################################################
## --------------------- Push images to docker hub ---------------------------
##############################################################################
## alpine basealpined image
push_image() {
FOLDER=$1
echo "----- Push ${FOLDER} based image -----"
for fname in $(ls ${FOLDER}); do
PREFIX=${fname/".Dockerfile"/""}
echo ":::: Push ${DOCKER_REPO}:${FOLDER}-${PREFIX}"
docker push ${DOCKER_REPO}:${FOLDER}-${PREFIX}
done
}
# Main
main() {
docker login
docker info
FOLDERS=(
alpine
ubuntu
centos
)
for FOLDER in ${FOLDERS[@]}; do
build_image ${FOLDER}
push_image ${FOLDER}
done
}
main "$@"
```
### 2-step build
For production deployment, usually we just deploy the delivery instead of the full copy of source code. To achieve that, we can the build into 2 steps. The first step is to build the source code. and the second one is to build the deliverable image.
I recap one docker file from the my repository vue-crm here
```dockerfile
###### Build the App #####
FROM node:10.19 AS node
LABEL author="<NAME>"
WORKDIR /
COPY . .
RUN npm install
RUN npm run build -- --prod
###### Build the Delivery #####
FROM nginx:alpine
LABEL author="Harry Ho"
WORKDIR /var/cache/nginx
COPY --from=node /dist /usr/share/nginx/html
COPY ./config/nginx.conf /etc/nginx/conf.d/default.conf
```
<file_sep>FROM nginx:alpine
LABEL author="<NAME>"
COPY ./public /usr/share/nginx/html
<file_sep>+++
date = "2021-12-20T14:59:31+11:00"
title = "Ubuntu Desktop 20 LTS note"
description = "Post-installation for Ubuntu 20 desktop"
+++
## Ubuntu 20.04.3 LTS (Focal Fossa)
Ubuntu is the world’s most popular open-source desktop operating system. Ubuntu 20.04 LTS is an enterprise-grade, secure, cost-effective operating system for organisations and home users.
### Zsh Prezto
#### Install Prezto
```sh
clear
sudo apt-get install -y git
sudo apt-get update && sudo apt-get install -y zsh
# Get prezto
git clone --recursive https://github.com/sorin-ionescu/prezto.git ~/.zprezto
# Backup zsh config if it exists
if [ -f ~/.zshrc ];
then
mv ~/.zshrc ~/.zshrc.backup
fi
# Create links to zsh config files
ln -s ~/.zprezto/runcoms/zlogin ~/.zlogin
ln -s ~/.zprezto/runcoms/zlogout ~/.zlogout
ln -s ~/.zprezto/runcoms/zpreztorc ~/.zpreztorc
ln -s ~/.zprezto/runcoms/zprofile ~/.zprofile
ln -s ~/.zprezto/runcoms/zshenv ~/.zshenv
ln -s ~/.zprezto/runcoms/zshrc ~/.zshrc
```
#### Change theme & module
* Update the theme 'sorin' to 'steeef' in .zpreztorc
* Add following plugins
```
zstyle ':prezto:load' pmodule \
'environment' \
'terminal' \
'editor' \
'history' \
'directory' \
'spectrum' \
'utility' \
'completion' \
'git' \
'syntax-highlighting' \
'history-substring-search' \
'prompt'
```
#### Change shell to Zsh
```sh
chsh -s $(Which zsh)
source ~/.zshrc
```
### Install docker
#### Set up the repository
```sh
sudo apt-get update
sudo apt-get install \
ca-certificates \
curl \
gnupg \
lsb-release
```
#### Add Docker’s official GPG key:
```sh
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor \
-o /usr/share/keyrings/docker-archive-keyring.gpg
# Use the following command to set up the stable repository.
# To add the nightly or test repository, add the word nightly or test (or both)
# after the word stable in the commands below. Learn about nightly and test channels.
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
```
#### Install Docker Engine
```sh
# Update the apt package index, and install the latest version of
# Docker Engine and containerd, or go to the next step to install
# a specific version:
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
```
#### Post installation of Docker
```sh
# Create the docker group.
sudo groupadd docker
# Add your user to the docker group.
sudo usermod -aG docker $USER
# Log out and log back in so that your group membership is re-evaluated.
# On Linux, you can also run the following command to activate the changes to groups:
newgrp docker
# Test the docker
docker ps
```
#### Add Docker completion to Zsh
* Add the completion to prezto
```sh
curl -fLo ~/.zprezto/modules/completion/external/src/_docker \
https://raw.githubusercontent.com/docker/cli/master/contrib/completion/zsh/_docker
```
* Add following line to zshrc
```
autoload -Uz compinit; compinit
```
### Git
#### Set git credential store
```sh
git config credential.helper store
# OR
git config --global credential.helper store
```
### Dotnet Core 6 SDK
* Add the Microsoft package signing key to your list of trusted keys and add the package repository.
```sh
wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
sudo dpkg -i packages-microsoft-prod.deb
rm packages-microsoft-prod.deb
```
* Install the SDK
```sh
sudo apt-get update; \
sudo apt-get install -y apt-transport-https && \
sudo apt-get update && \
sudo apt-get install -y dotnet-sdk-6.0
```
### Golang
* Download & install the golang tal ball
```sh
# Download the linux tar ball from golang site
sudo rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.17.5.linux-amd64.tar.gz
```
* Set the Go to PATH on file `.zshrc`
```
export PATH=$PATH:/usr/local/go/bin
```
* Verify the version
```
go version
```
### Rustlang
* Use universal script
```
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
```
### Java: OpenJDK
* Install LTS JDK
```sh
sudo apt install -y openjdk-8-jdk \
openjdk-11-jdk \
openjdk-17-jdk
```
* Setup default JDK
```sh
sudo update-alternatives --config java
```
<file_sep>+++
title = "Qemu & Virtual Machine"
description="Use Qemu to create virtual machines"
+++
___What is Qemu ?___
[QEMU](https://wiki.qemu.org/Main_Page) is free and open source. And is licensed under GPL 2. it has the ability to run under both KVM and XEN models (if you enabled virtualization technology from your BIOS first) and offers a lot of options and virtualization options. In this article, we’ll explain how to use QEMU and install it.
QEMU is a virtualization technology emulator that allows you to run operating systems and Linux distributions easily on your current system without the need to install them or burn their ISO files. It is like VMware or VirtualBox. You can use it at anytime to emulate running any operating system you want on a lot of devices and architecture.
## Prerequisites
* Install [Qemu](https://www.qemu.org/download/#linux) on Linux (Ubuntu / Cent OS) or MacBook
* Create a folder to store the images files. e.g. ~/ws/vms
## Assumptions
* Assume the new iso files are stored in folder "Download"
* Architect of your machine is x86_64
* Install Ubuntu as VM on Qemu
## Create new image
### Create a new image
Use `qemu-img` to create an image file with a maximum size of 10GB
qemu-img create -f qcow2 ~/ws/vms/ubuntu16-vm.img 10G
### Resize the maximum
Resize the maximum of the existing image by adding 30GB
qemu-img resize ~/ws/vms/ubuntu16-vm.img +30G
### Create new vm
Create a new VM (virtual machine) on the image file
qemu-system-x86_64 -m 2048 -boot d -enable-kvm \
-smp 2 -net nic -net user \
-hda ~/ws/vms/ubuntu16-vm.img \
-cdrom ~/Downloads/ubuntu-16.04.iso
`-m 2048`: Here we chose the RAM amount that we want to provide for QEMU when running the ISO file. We chose 2048MB here. You can change it if you like according to your needs.
`-boot -d`: The boot option allows us to specify the boot order, which device should be booted first? `-d` means that the CD-ROM will be the first, then QEMU will boot normally to the hard drive image. We have used the `-cdrom`: option as you can see at the end of the command. You can use `-c` if you want to boot the hard drive image first.
`-enable-kvm`: This is a very important option. It allows us to use the KVM technology to emulate the architecture we want. Without it, QEMU will use software rendering which is very slow. That’s why we must use this option, just make sure that the virtualization options are enabled from your computer BIOS.
`-smp 2`: If we want to use more than 1 core for the emulated operating system, we can use this option. We chose to use 2 cores to run the virtual image which will make it faster. You should change this number according to your computer’s CPU.
`-net nic -net user`: By using these options, we will enable an Ethernet Internet connection to be available in the running virtual machine by default.
`-hda testing-image.img`: Here we specified the path for the hard drive which will be used. In our case, it was the testing-image.img file which we created before.
`-cdrom ubuntu-16.04.iso`: Finally we told QEMU that we want to boot our ISO file “ubuntu-16.04.iso”.
### Start the new VM
qemu-system-x86_64 -m 2048 -boot d -enable-kvm \
-smp 2 -net nic -net user \
-hda ~/ws/vms/ubuntu16-vm.img
### Post-Create VM
* Install `spice-vdagent` to Copy content from Host to Guest
* Install `tmux`
<file_sep>+++
date = "2016-12-07T16:56:21+11:00"
title = "Tensorflow Note - 1"
description = "Tensorflow Note - 1"
+++
> TensorFlow is an end-to-end open source platform for machine learning. It has a comprehensive, flexible ecosystem of tools, libraries and community resources that lets researchers push the state-of-the-art in ML and developers easily build and deploy ML powered applications.
### Prerequisites
* You are using Ubuntu 16
* Your machine has Nvidia GPU card installed
* If you are using Ubuntu 14, the installation of CUDA and cuDNN will be a bit different. Please check Google's instructions.
### Install python3 and pip3
* [Please find instructions here](https://harryho.github.io/os/ubuntu-server-14)
### Install virtualenv via pip3
pip3 install virtualenv
### Create two tensorflow virtualenvs.
mkdir ~/.envs
virtualenv --system-site-packages ~/.envs/tf ### CPU only
virtualenv --system-site-packages ~/.envs/tfgpu ### GPU enabled
### Install tensorflow for different virtualenvs
source ~/.envs/tf/bin/activate
source ~/.envs/tfgpu/bin/activate
pip3 install tensorflow ### CPU only
pip3 install tensorflow-gpu ### GPU enabled
### Install CUDA and cuDNN for tensorflow-gpu
* Use following command to check you GPU information
```bash
lspic -nn | grep '\[03' ]
lshw -numeric -C display
### GPU info sample
### NVIDIA Corporation GM107M [GeForce GTX 850M]
```
* Download and install Nvidia driver based on above [GPU info](http://www.geforce.com/drivers)
```bash
chmod +x NVIDIA-Linux-xxxx.run
sudo ./NVIDIA-Linux-xxxx.run
```
* Download and install [CUDA from NVIDIA](https://developer.nvidia.com/cuda-downloads)
```bash
sudo dpkg -i cuda-repo-xxxxx.deb
sudo apt-get udpate
sudo apt-get install cuda
```
* Setup CUDA_HOME
```bash
### CUDA
export CUDA_HOME=/usr/local/cuda-8.0
export LD_LIBRARY_PATH=${CUDA_HOME}/lib64
```
* Download and install [cuDNN for CUDA](https://developer.nvidia.com/cudnn)
```bash
### extra the cuDNN tar ball
tar -xvf cudnn-8.0
cd cuda
sudo cp lib64/* /usr/local/cuda-8.0/lib64
sudo cp include/* /usr/local/cuda-8.0/include
```
### Use sample code to test Tensorflow
* Save code below to file test.py
```python
import numpy as np
import tensorflow as tf
### Model parameters
W = tf.Variable([.3], tf.float32)
b = tf.Variable([-.3], tf.float32)
### Model input and output
x = tf.placeholder(tf.float32)
linear_model = W * x + b
y = tf.placeholder(tf.float32)
### loss
loss = tf.reduce_sum(tf.square(linear_model - y)) ### sum of the squares
### optimizer
optimizer = tf.train.GradientDescentOptimizer(0.01)
train = optimizer.minimize(loss)
### training data
x_train = [1,2,3,4]
y_train = [0,-1,-2,-3]
### training loop
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init) ### reset values to wrong
for i in range(1000):
sess.run(train, {x:x_train, y:y_train})
### evaluate training accuracy
curr_W, curr_b, curr_loss = sess.run([W, b, loss], {x:x_train, y:y_train})
print("W: %s b: %s loss: %s"%(curr_W, curr_b, curr_loss))
```
* Test with tensorflow-gpu (GPU enabled)
```bash
source ~/.envs/tfgpu/bin/activate
python3 test.py
## You will probably see the result as follow
## ....
## name: GeForce GTX 850M
## major: 5 minor: 0 memoryClockRate (GHz) 0.9015
## pciBusID 0000:0a:00.0
## Total memory: 3.95GiB
## Free memory: 3.58GiB
## 2017-04-25 10:25:59.640621: I tensorflow/core/common_runtime/gpu/gpu_device.cc:908] DMA: 0
## 2017-04-25 10:25:59.640626: I tensorflow/core/common_runtime/gpu/gpu_device.cc:918] 0: Y
## 2017-04-25 10:25:59.640640: I tensorflow/core/common_runtime/gpu/gpu_device.cc:977]
### Creating TensorFlow device (/gpu:0) -> (device: 0, name: GeForce GTX 850M, pci ## bus id: 0000:0a:00.0)
## W: [-0.9999969] b: [ 0.99999082] loss: 5.69997e-11
```
<file_sep>+++
title = "Python Web Framework"
description = "Introduction of Django: Most popular python web framework"
+++
> Django is a high-level Python Web framework that encourages rapid development and clean, pragmatic design. Built by experienced developers, it takes care of much of the hassle of Web development, so you can focus on writing your app without needing to reinvent the wheel. It’s free and open source.
## Install Python, pip and virtualenv
* Windows: Please find in from Home Page
* Ubuntu: Please find it from [Ubuntu setup](/os/ubuntu-server-14)
* Python 3.4 (released March 2014) and Python 2.7.9 (released December 2014) ship with Pip.
* You can simply use pip or pip3 install any package you need.
## **Windows**
### Install django
* create a folder virtualenvs within the location of python 3
* create a new virtualenv named django18
* Activate the new virtual env
* Install Django 1.x.x (LTS version)
```bash
cd /path/to/python3
cd virtualenvs
virtualenv django18
cd django18
Scripts\activate
which python
pip install django==1.x.x
```
### Create django project
* SET PATH in current command promp
* Navigate to workspace folder
* Create new django project
* Start the app
```bat
SET PATH=c:\apps\python3\virtualenvs\django18\Script;%PATH%
which django-admin
cd c:\ws\python\django\
django-admin startproject demo
cd demo
python manage.py runserver
```
### Create a new app module
```bash
python manage.py startapp main
```
### Create a new db
__following commands are just tested in django 1.8__
```python
python manage.py syncdb
python manage.py makemigrations new_app
## migrate
pyrhon manage.py sqlmigrate new_app 0001
## migrate
python manage.py migrate new_app 0001
```
### use python shell Model API
```python
python manage.py shell
>>> from XXX.models import ModelClass
>>> ModelClass.objects.all()
>>> ModelClass.objects.get(pk =1 )
>>> ModelClass.objects.filter( fieldName1="abc")
>>> mc = new ModelClass.( fieldName1 = "abc", fieldName2="def", fieldName3 = 3 )
>>> mc.save()
>>> mc = ModelClass.objects.get(pd=1)
>>> mc.delete()
```
## **Linux**
```bash
pip install virtualenv
pip3 install virtualenv
cd ~
mkdir .envs
## create python2 env
virtualenv -p /usr/bin/python2.7 py2env
virtualenv -p /usr/bin/python3.4 py3env
cd py2env
source bin/activate
## Check python path
which python
## Exit
deactivate
```
**Activate virtual environment need to use source instead of executing sh file**
<file_sep>+++
title = "Kubernetes Cluster in 5min"
description="5 Mins to create a kubernetes cluster on Ubuntu or Centos Linux machine"
+++
## Kubernetes
> Kubernetes (K8s) is an open-source system for automating deployment, scaling, and management of containerized applications.
> It groups containers that make up an application into logical units for easy management and discovery. Kubernetes builds upon 15 years of experience of running production workloads at Google, combined with best-of-breed ideas and practices from the community.
## Purpose
This project focuses on the training and demonstration. Please DO NOT use it in production environment.
## Screenshot of kubernetes dashboard
* Below is snapshot of What you'll get after completing all steps
> 
## Prerequisites
* You have a Linux machine in place. Physical or virtual machine doesn't mather.
* The Linux OS is supposed to be one of following distroes: the Ubuntu 16+, 18+ or CentOS 7.
* Internet is available on your machine
## Caveat
* Use Virtual Machine to test it before running it on physical machine
* VirtualBox or VMWare is a good option.
* Kubernetes is supposed to run on Linux server, but it should be able to run on desktop version as well.
## Steps to use the script
* Setup SSH Server if you need
* Get the script from my github repo
* Switch to root user and run the script
```bash
# Ubuntu
wget https://github.com/harryho/kube-cluster-in-5mins/blob/master/ubuntu/kube-cluster.sh
# CentOS
wget https://github.com/harryho/kube-cluster-in-5mins/blob/master/centos/kube-cluster.sh
# Switch user
sudo su
./kube-cluster.sh -h
```
## Trouble shooting
* Hardware ( Enable VT-x, 2+ Core , 2G RAM )
* The scripts only support ubuntu 16+ or CentOS 7+
* Swap must be off
* disable swap immediately
```bash
sudo swapoff -a
```
- comment out the swap drive from fstab
```bash
# swap was on ...
# UUID=XXXXXXX-XXXXX-XXXX
```
## [Repository of scripts](https://github.com/harryho/kube-cluster-in-5mins.git)
<file_sep>+++
title = "Sql Server Note - 1"
description="Introduction of MS Sql Server"
+++
## SQL Server
> Microsoft SQL Server is a relational database management system developed by Microsoft. As a database server, it is a software product with the primary function of storing and retrieving data as requested by other software applications—which may run either on the same computer or on another computer across a network (including the Internet).
### Get db/table size or space
* Get db size
```sql
-- Get database size
SELECT sys.databases.name,
CONVERT(VARCHAR,SUM(size)*8/1024)+' MB' AS TotalDiskSpaceMB ,
CONVERT(VARCHAR,SUM(size)*8/1024/1024)+' GB' AS TotalDiskSpaceGB
FROM sys.databases
JOIN sys.master_files
ON sys.databases.database_id=sys.master_files.database_id
GROUP BY sys.databases.name
ORDER BY TotalDiskSpaceMB
-- Get database space & unallocated space
exec sp_spaceused
--
```
* Get table size
```sql
SELECT
t.NAME AS TableName,
s.Name AS SchemaName,
p.rows AS RowCounts,
SUM(a.total_pages) * 8 AS TotalSpaceKB,
CAST(ROUND(((SUM(a.total_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2))
AS TotalSpaceMB,
SUM(a.used_pages) * 8 AS UsedSpaceKB,
CAST(ROUND(((SUM(a.used_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2))
AS UsedSpaceMB,
(SUM(a.total_pages) - SUM(a.used_pages)) * 8 AS UnusedSpaceKB,
CAST(ROUND(((SUM(a.total_pages) - SUM(a.used_pages)) * 8) / 1024.00, 2) AS NUMERIC(36, 2))
AS UnusedSpaceMB
FROM sys.tables t
INNER JOIN sys.indexes i ON t.OBJECT_ID = i.object_id
INNER JOIN sys.partitions p ON i.object_id = p.OBJECT_ID AND i.index_id = p.index_id
INNER JOIN sys.allocation_units a ON p.partition_id = a.container_id
LEFT OUTER JOIN sys.schemas s ON t.schema_id = s.schema_id
WHERE t.NAME NOT LIKE 'dt%'
AND t.is_ms_shipped = 0
AND i.OBJECT_ID > 255
GROUP BY t.Name, s.Name, p.Rows
ORDER BY t.Name;
```
### Get full text search objects
```sql
SELECT
SCHEMA_NAME(tbl.schema_id) as SchemaName,
tbl.name AS TableName,
FT_ctlg.name AS FullTextCatalogName,
i.name AS UniqueIndexName,
scols.name AS IndexedColumnName
FROM
sys.tables tbl
INNER JOIN
sys.fulltext_indexes FT_idx
ON
tbl.[object_id] = FT_idx.[object_id]
INNER JOIN
sys.fulltext_index_columns FT_idx_cols
ON
FT_idx_cols.[object_id] = tbl.[object_id]
INNER JOIN
sys.columns scols
ON
FT_idx_cols.column_id = scols.column_id
AND FT_idx_cols.[object_id] = scols.[object_id]
INNER JOIN
sys.fulltext_catalogs FT_ctlg
ON
FT_idx.fulltext_catalog_id = FT_ctlg.fulltext_catalog_id
INNER JOIN
sys.indexes i
ON
FT_idx.unique_index_id = i.index_id
AND FT_idx.[object_id] = i.[object_id];
```
### Find the table
* Find table by naming pattern
```sql
SELECT distinct t.name AS 'TableName'
FROM sys.columns c
JOIN sys.tables t ON c.object_id = t.object_id
WHERE t.name LIKE '%bk%'
ORDER BY TableName;
```
* Find table by colume name
```sql
SELECT c.name AS 'ColumnName'
,t.name AS 'TableName'
FROM sys.columns c
JOIN sys.tables t ON c.object_id = t.object_id
WHERE c.name LIKE '%MyName%'
ORDER BY TableName
,ColumnName;
```
### Restore user login after db restore
```sql
EXEC sp_change_users_login Report
EXEC sp_change_users_login 'Auto_Fix', 'your_username', NULL, 'your_password';
```
### Create a new login
```sql
USE [master]
GO
CREATE LOGIN [sql_user_id] WITH PASSWORD=N'<PASSWORD>',
DEFAULT_DATABASE=[Your_Database], CHECK_EXPIRATION=OFF, CHECK_POLICY=OFF
GO
USE [Your_Database]
GO
CREATE USER [sql_user_id] FOR LOGIN [sql_user_id]
GO
ALTER USER [sql_user_id] WITH DEFAULT_SCHEMA=[sql_user_id]
GO
CREATE SCHEMA [sql_user_id] AUTHORIZATION [sql_user_id]
GO
ALTER ROLE [db_datareader] ADD MEMBER [sql_user_id]
GO
ALTER ROLE [db_datawriter] ADD MEMBER [sql_user_id]
GO
```
### Get Connection Info
```sql
EXEC sp_who
GO
EXEC sp_who @loginname='user_id'
GO
-- Enhanced verssion
EXEC sp_who2
GO
```
<file_sep>+++
date = "2016-12-07T16:56:21+11:00"
title = "Laravel MVC Starter"
description = "This starter is the starting point of laravel 5 MVC project. "
+++
## Summary
This starter is the starting point of laravel 5 MVC project. This application is meant to be used as a starting place for those looking to get their feet wet with laravel.
## Overview of project
### BDD ( Business domain design)
``` ini
+-------+ 0...* 0...* +--------+ 1 0...* +---------+
| tag | --------------- | post | ----- ----- | comment |
+-------+ +--------+ +---------+
| 1...*
|
| 0...*
+--------+
| like |
+--------+
```
### Structure of starter
```
\path\to\lara-mvc-starter
+---app <-// Customized PHP application code
| +---Console
| +---Exceptions
| +---Http
| | +---Controllers
| | | \---Auth
| | \---Middleware
| \---Providers
+---bootstrap <-// bootstrap the framework and configure autoloading
| \---cache
+---config <-// application configuration
+---database <-// database migration files
| +---factories
| +---migrations
| \---seeds
+---public <-// Distributed website folder including Style sheet
+---resources <-// View files, Javascripts, localization setting
| +---assets
| +---lang
| \---views
| +---admin
| +---blog
| +---errors
| +---layouts
| +---other
| +---partials
| \---vendor
| \---pagination
+---routes <-// Route definitions and setting
+---storage <-//Blade templates, file based sessions, file caches
| +---app
| | \---public
| +---framework
| | +---cache
| | +---sessions
| | \---views
| \---logs
+---tests
\---vendor <-// Laravel framework
```
## Screenshots
> 
### Browse [Repository](https://github.com/harryho/lara-mvc-starter.git)
<file_sep>+++
date = "2018-01-04T13:59:31+11:00"
title = "Lubuntu 16 desktop"
description = "Post-installation for Lubuntu 16 desktop"
draft = false
+++
Prelude
> There is no big difference against setup between Lubuntu and Ubuntu. I just want to keep a latest version of setup for myself as reference
## Prerequisites
* You have Lubuntu 16 in place
* Internet is available
## UFW setup
```bash
sudo ufw enable
sudo ufw allow 80/tcp
sudo ufw allow ssh
sudo ufw allow 443/tcp
sudo ufw allow 8000/tcp
```
## SSH server setup
`!!! For production environment, SSH should be secured by the CA`
```bash
sudo apt-get install openssh-server
## backup default config
sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.factory-defaults
sudo chmod a-w /etc/ssh/sshd_config.factory-defaults
## use any editor to update sshd_config
sudo nano /etc/ssh/sshd_config
## uncomment PasswordAuthentication yes to allow remote password login
## Password authentication is only for test environment
## setup ssh auto-start onboot
sudo update-rc.d ssh defaults
```
## !!! Install the software-properties-common Package
```bash
sudo apt-get install software-properties-common python-software-properties
```
## Time Zone setup
```bash
sudo dpkg-reconfigure tzdata
```
## Install tmux
```bash
sudo apt-get install tmux
```
* Most useful tmux commands
> Ctrl+b " — split pane horizontally.
>
> Ctrl+b % — split pane vertically.
>
> Ctrl+b arrow key — switch pane.
>
> Hold Ctrl+b, don’t release it and hold one of the arrow keys — resize pane.
>
> Ctrl+b c — (c)reate a new window.
>
> Ctrl+b , — rename reate a new window.
>
> Ctrl+b n — move to the (n)ext window.
>
> Ctrl+b p — move to the (p)revious window.
## Install git
```bash
sudo add-apt-repository ppa:git-core/ppa
sudo apt-get update
sudo apt-get install git
```
## install docker CE (Ubuntu 16 LTS)
```bash
## Update the apt package index
sudo apt-get update
## Install packages to allow apt to use a repository over HTTPS
sudo apt-get install \
apt-transport-https \
ca-certificates \
curl \
software-properties-common
## Add Docker’s official GPG key
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
## Verify the last 8 characters of the fingerprint.
sudo apt-key fingerprint xxxxxxxx
## set up the stable repository
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
## apt update
sudo apt-get update
## install docker CE
sudo apt-get install docker-ce
```
## Install JDK 9
* Downlaod the JDK from Oracle website.
```bash
sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update
sudo apt-get install oracle-java9-installer
java -version
```
* Setup environment
```bash
sudo apt-get install oracle-java9-set-default
sudo apt autoremove
## Following setup is no longer required
## sudo su
## cat >> /etc/environment <<EOL
## JAVA_HOME=/usr/lib/jvm/java-9-oracle
## JRE_HOME=/usr/lib/jvm/java-9-oracle/jre
## EOL
```
* Test JDK with a simple HelloWorld program
```java
import java.util.Calendar;
class HelloWorld {
public static void main(String[] args) {
Calendar cal = Calendar.getInstance();
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH) + 1;
int day = cal.get(Calendar.DATE);
int hour = cal.get(Calendar.HOUR_OF_DAY);
int minute = cal.get(Calendar.MINUTE);
String username = System.getProperty("user.name");
System.out.println(username+ ": Hello World! ");
System.out.println(year + "/" + month + "/" + day + " " + hour + ":" + minute);
}
}
```
* Compile and run the program
```bash
javac HelloWorld.java
java HelloWorld.java
```
## Install nodejs
* Install Nodejs 8.x
```bash
curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash -
sudo apt-get install -y nodejs
```
* Install latest npm, yarn and ts
```
sudo npm install -g npm
sudo npm install -g typescript
sudo mpm install -g yarn
```
## Install PHP
* Add new repo
```bash
sudo apt-get install -y python-software-properties
sudo add-apt-repository -y ppa:ondrej/php
sudo apt-get update -y
apt-cache pkgnames | grep php7.2
```
* Option 1: Install LAMP stack
```bash
sudo apt-get install -y apache2
sudo apt-get install -y php7.2 libapache2-mod-php7.2 \
php7.2-cli php7.2-common php7.2-mbstring php7.2-gd \
php7.2-intl php7.2-xml php7.2-mysql php7.1-mcrypt php7.2-zip
```
* Option 2: Install LEMP stack
```bash
sudo apt-get install -y nginx
sudo apt-get install -y php7.2 php7.2-fpm php7.2-cli \
php7.2-common php7.2-mbstring php7.2-gd php7.2-intl \
php7.2-xml php7.2-mysql php7.1-mcrypt php7.2-zip
```
* Disable Apache and Nginx if you install both
```bash
sudo systemctl disable apache2.service
sudo systemctl disable nginx.service
```
## Install Python2, Python3
* Ubuntu has python2 installed by default
```bash
sudo apt-get python-pip
sudo apt-get install python3-pip
sudo apt-get install python3-dev python-dev
## Install virtualenv
sudo pip install virtualenv
sudo pip3 install virtualenv
```
* Install new python 3.6
```
sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get update
sudo apt-get install python3.6
```
## Install Go
* Install Go
```bash
wget https://dl.google.com/go/go1.10.1.linux-amd64.tar.gz
## check hash
shasum -a 256 go*linux-amd64.tar.gz
## install tar ball
sudo tar -xvzf go*linux-amd64.tar.gz
sudo mv go /usr/local
```
* Setup GOROOT & GOPATH
```bash
export GOROOT="/usr/local/go"
export GOPATH="$HOME/ws/go"
export PATH="$GOROOT/bin:$GOPATH/bin:$PATH"
```
* Create a simple `hello.go` file to test
`touch ~/ws/go/src/hello/hello.go`
```go
package main
import (
"fmt"
"log"
"os/user"
)
func main(){
user, err := user.Current()
if err != nil {
log.Fatal(err)
}
fmt.Printf(user.Name + " said : Hello World! \n" )
}
```
* Run the program
```bash
go run $GOPATH/src/hello/hello.go
go install hello
$GOPATH/bin/hello
```
* Install hugo
```
# use apt
sudo apt install hugo
# use snap
sudo snap install hugo
```
## Install clang & cmake
```bash
sudo apt-get install clang
sudo apt-get install cmake
```
## Install Rust
```bash
$ curl -f -L https://static.rust-lang.org/rustup.sh -O
$ sh rustup.sh
```
## Install vim 8
* Add ppa repo
```
sudo add-apt-repository ppa:jonathonf/vim
sudo apt update
sudo apt install vim
```
* Install awesome vimrc
```
git clone --depth=1 https://github.com/amix/vimrc.git ~/.vim_runtime
sh ~/.vim_runtime/install_awesome_vimrc.sh
```
## Install MySql
* Install mysql
```
wget https://dev.mysql.com/get/mysql-apt-config_0.8.9-1_all.deb
sudo dpkg -i mysql-apt-config_0.8.9-1_all.deb
sudo apt-get install mysql-server
systemctl status mysql
mysqladmin -u root -p version
mysql -u root -p mysql
```
* create a sample table products
```
CREATE TABLE products (
id INT AUTO_INCREMENT NOT NULL,
title VARCHAR(255),
price DECIMAL(10, 2) NOT NULL,
created_at datetime,
deleted_at datetime,
tags VARCHAR(255)
,PRIMARY KEY (id)
);
load data local infile '/home/<your_name>/db/products.csv' into table products \
fields terminated by ',' enclosed by '"' lines terminated by '\n' \
(id, title, price, created_at, deleted_at, tags);
```
## Install PostgresQL
* psql is case sensitive
```
echo 'deb http://apt.postgresql.org/pub/repos/apt/ xenial-pgdg main' \
>> /etc/apt/sources.list.d/pgdg.list
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | \
sudo apt-key add -
sudo apt-get update
sudo apt-get install postgresql-10
sudo su - postgres
psql -U postgres
## Create a dump databbase
curl -L -O http://cl.ly/173L141n3402/download/example.dump
createdb pgguide
pg_restore --no-owner --dbname pgguide example.dump
psql --dbname pgguide
psql
## Rename database -- use double quote
ALTER database "pgguide" rename to "sample"
```
* export the database to sql file
```
sudo su postgres
pg_dump sample >> sample.sql
```
* export table to csv file
```
COPY products to '/home/<your_name>/db/products.csv' delimiter ',' csv;
```
* export data to json file
```
select json_agg(t) from \
(select * from products) t \t on \pset format unaligned \g products.json
```
## Install mongodb
```
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 \
--recv 2930ADAE8CAF5059EE73BB4B58712A2291FA4AD5
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu \
xenial/mongodb-org/3.6 multiverse" \
| sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list
sudo apt-get update
sudo apt-get install -y mongodb-org
sudo service mongod start
sudo service mongod stop
sudo service mongod status
```
* Create a database `sample` and insert one record into document `products`
```json
use sample
db.products.insertOne({
id: 1,
title: "Dictionary",
price: 9.99,
created_at: "2011-01-02 07:00:00+11",
tags: "{Book}"
});
db.products.find();
```
* Import json into database
```
mongoimport --db sample --collection products --drop \
--jsonArray --file ~/db/products.json
```<file_sep>+++
title = "Every IT project involves culture change"
description="Culture Change"
draft = true
+++
## Every IT project involves culture change
<file_sep>+++
title="Error handling"
description="Error handling"
weight=5
+++
### Error handling
* Go does not have an exception mechanism, like the try/catch in Java or .NET for instance: you cannot throw exceptions. Instead it has a defer-panic-and-recover mechanism.
* The Go way to handle errors is for functions and methods to return an error object as their only or last return value—or nil if no error occurred—and for calling functions to always check the error they receive.
* Handle the errors and return from the function in which the error occurred with an error message to the user: that way if something does go wrong, your program will continue to function and the user will be notified. The purpose of panic-and-recover is to deal with genuinely exceptional (so unexpected) problems and not with normal errors.
* The idiomatic way in Go to detect and report error-conditions
- A function which can result in an error returns two variables, a value and an error-code; the latter is nil in case of success, and != nil in case of an error-condition.
- After the function call the error is checked, in case of an error ( if error != nil) the execution of the actual function (or if necessary the entire program) is stopped.
__!!Never ignore errors, because ignoring them can lead to program crashes!!__
#### Error interface
* Go has a predefined error interface type:
```go
type error interface {
Error() string
}
```
* Defining errors
```go
err := errors.New("math - square root of negative number")
```
* Making an error-object with fmt
```go
if f < 0 {
return 0, fmt.Errorf("math: square root of negative number %g", f)
}
```
### Run-time exceptions & panicking
* When execution errors occur, such as attempting to index an array out of bounds or a type assertion failing, the Go runtime triggers a run-time panic with a value of the interface type runtime.Error, and the program crashes with a message of the error; this value has a RuntimeError()-method, to distinguish it from a normal error.
* A panic can also be initiated from code with the panic function is used, which effectively creates a run-time error that will stop the program. It takes 1 argument of any type, usually a string, to be printed out when the program dies. The Go runtime takes care to stop the program and issuing some debug information.
* If panic is called from a nested function, it immediately stops execution of the current function, all defer statements are guaranteed to execute and then control is given to the function caller, which receives this call to panic. This bubbles up to the top level, executing defers, and at the top of the stack the program crashes and the error condition is reported on the command-line using the value given to panic: this termination sequence is called __panicking__.
### Error-handling & panicking in custom package
* Best practice for custom package
a. Always recover from panic in your package: no explicit panic() should be allowed to cross a package boundary
b. Return errors as error values to the callers of your package.
* Sample
* Parse package
```go
package parse
import (
"fmt"
"strings"
"strconv"
)
// A ParseError indicates an error in converting a word into an integer.
type ParseError struct {
Index int // The index into the space-separated list of words.
Word string // The word that generated the parse error.
// The raw error that precipitated this error, if any.
Error err
}
// String returns a human-readable error message.
func (e *ParseError) String() string {
return fmt.Sprintf("pkg parse: error parsing %q as int", e.Word)
}
// Parse parses the space-separated words in in put as integers.
func Parse(input string) (numbers []int, err error) {
defer func() {
if r := recover(); r != nil {
var ok bool
err, ok = r.(error)
if !ok {
err = fmt.Errorf("pkg: %v", r)
}
}
}()
fields := strings.Fields(input)
numbers = fields2numbers(fields) // here panic can occur
return
}
func fields2numbers(fields []string) (numbers []int) {
if len(fields) == 0 {
panic("no words to parse")
}
for idx, field := range fields {
num, err := strconv.Atoi(field)
if err != nil {
panic(&ParseError{idx, field, err})
}
numbers = append(numbers, num)
}
return
}
```
* main package
```go
func main() {
var examples = []string{
"1 2 3 4 5",
"100 50 25 12.5 6.25",
"2 + 2 = 4",
"1st class",
""
}
for _, ex := range examples {
fmt.Printf("Parsing %q:\n ", ex)
nums, err := parse.Parse(ex)
if err != nil {
// here String() method from ParseError is used
fmt.Println(err)
continue
}
fmt.Println(nums)
}
/* Output:
Parsing "w1 2 3 4 5":
360
<NAME>
[1 2 3 4 5]
Parsing "100 50 25 12.5 6.25":
pkg parse: error parsing "12.5" as int
Parsing "2 + 2 = 4":
pkg parse: error parsing "+" as int
Parsing "1st class":
pkg parse: error parsing "1st" as int
Parsing "":
pkg: no words to parse
*/
```
### Recover
* recover is only useful when called inside a deferred function (see § 6.4) : it then retrieves the error value passed through the call of panic; when used in normal execution a call to recover will return nil and have no other effect.
* Summarized: panic causes the stack to unwind until a deferred recover() is found or the program terminates
#### Similar try-catch block in Go
```go
func protect(g func()) {
defer func() {
log.Println("done")
// Println executes normally even if there is a panic
if err := recover(); err != nil {
log.Printf("run time panic: %v", err)
}
}()
log.Println("start")
g() // possible runtime-error
}
```
#### Sample of panic, defer & recover
```go
func badCall() {
panic("bad end")
}
func test() {
defer func() {
if e := recover(); e != nil {
fmt.Printf("Panicking %s\r\n", e)
}
}()
badCall()
fmt.Printf("After bad call\r\n")
}
func main() {
fmt.Printf("Calling test\r\n")
test()
fmt.Printf("Test completed\r\n")
}
```
### An error-handling scheme with closures
* Combining the defer/panic/recover mechanism with closures can result in a far more elegant scheme that we will now discuss. However it is only applicable when all functions have the same signature, which is rather restrictive.
* The scheme uses 2 helper functions:
i) check: a function which tests whether an error occurred, and panics if so:
```go
func check(err error) { if err != nil { panic(err) } }
```
ii) errorhandler: this is a wrapper function. It takes a function fn of our type fType1 and returns such a function by calling fn. However it contains the defer/recover mechanism
```go
func errorHandler(fn fType1) fType1 {
return func(a type1, b type2) {
defer func() {
if e, ok := recover().(error); ok {
log.Printf("run time panic: %v", err)
}
}()
fn(a, b)
}
}
```
### Start external program
```go
func main() {
// 1) os.StartProcess //
/*********************/
/* Linux: */
env := os.Environ()
procAttr := &os.ProcAttr{
Env: env,
Files: []*os.File{
os.Stdin,
os.Stdout,
os.Stderr,
},
}
pid, err := os.StartProcess("/bin/ls", []string{"ls", "-l"}, procAttr)
if err != nil {
fmt.Printf("Error %v starting process!", err) //
os.Exit(1)
}
fmt.Printf("The process id is %v", pid)
/* Output:
The process id is &{21275 0 0 {{0 0} 0 0 0 0}}The process id is &{21276 0 0 {{0 0} 0 0 0 0}}total 54
-rwxrwxrwx 1 root root 250 Sep 21 19:33 csv_data.txt
-rwxrwxrwx 1 root root 25227 Oct 4 23:34 hello.go
-rwxrwxrwx 1 root root 6708 Sep 21 10:25 hello.go.txt
-rwxrwxrwx 1 root root 130 Sep 21 11:08 output.txt
-rwxrwxrwx 1 root root 8898 Sep 21 12:10 target_hello.txt
-rwxrwxrwx 1 root root 1619 Sep 22 14:40 urlshorten.go.txt
-rwxrwxrwx 1 root root 182 Sep 21 13:50 vcard.json
*/
// 2nd example: show all processes
pid, err = os.StartProcess("/bin/ps", []string{"-e", "opid,ppid,comm"}, procAttr)
if err != nil {
fmt.Printf("Error %v starting process!", err) //
os.Exit(1)
}
fmt.Printf("The process id is %v", pid)
// 2) cmd.Run //
/***************/
cmd := exec.Command("gedit") // this opens a gedit-window
err = cmd.Run()
if err != nil {
fmt.Printf("Error %v executing command!", err)
os.Exit(1)
}
fmt.Printf("The command is %v", cmd)
}
```
### Testing
* Table-driven test
```go
var tests = [] struct {
in
// Test table
string
out string
}{
{"in1", "exp1"},
{"in2", "exp2"},
{"in3", "exp3"},
// ....
}
func verify(t *testing.T, testnum int, testcase, input, output, expected string) {
if input != output {
t.Errorf("%d. %s with input = %s: output %s != %s", testnum, testcase, input, output, expected)
}
}
func TestFunction(t *testing.T) {
for i, tt := range tests {
s := FuncToBeTested(tt.in)
verify(t, i, "FuncToBeTested: ", tt.in, s, tt.out)
}
}
```<file_sep>+++
title = "Package & Module"
description="Good practices for package & module"
weight=1
+++
## Package & Module
---
* Packages are modules that contain other modules.
* Packages are generally implemented as directories containing a special `__init__.py` file.
* The `__init__.py` file is executed when the package is imported.
* Packages can contain sub packages which themselves are implemented with `__init__.py` files in directories.
* The module objects for packages have a `__path__` attribute.
### sys.path
* List of directories which Python searches for modules.
```python
# list directories
>>>import sys
>>>sys.path
```
* Use `append` to attach the package directory to sys.path
* Append the package to sys.path
* If you append the relative path of the package to sys.path, you need to make sure the it is correct.
* Example
* path: path_root\package0\module0.py
* The code of module0.py
```python
def test():
print('module0 -- test !')
```
* Test module importing
```python
cd root
python
>>>import sys
>>>sys.append('package0')
>>>import module0
>>>module0.test
module0 -- test !
>>>exit()
# It will fail if you launch python at the parent directory of root
cd ..
python
>>>import sys
>>>sys.append('package0')
>>>import module0
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named 'module0'
### It will success if you adjust relative path as below
>>>sys.append('path_root/package0')
>>>import module0
>>>module0.test
module0 -- test !
```
## PYTHONPATH
* Environment variable adds paths to sys.path
* Use previous `module0.py` to test
* Linux
```bash
export PYTHONPATH=package0
python
>>>import module0
module0 -- test !
```
* Windows
```
set PYTHONPATH=package0
python
>>>import module0
module0 -- test !
```
### Package structure
---
* Convert a package into a module
* Basic structure of package0
```bash
path_root <--// it must be attached sys.path
+---package0 <--// package root
+---__init__.py <--// package init file
\---module0.py
```
* Sample code - `__init__.py` ( The sample code is for demo purpose)
```python
print('package0 --init...')
```
* Test
```
python
>>>import package0
package0 --init...
>>>import package0.module0
>>>package0.module0.test()
module0 -- test !
```
* Add a `FileReader` class into `module0.py`
* Sample code of `module0.py`
```python
class FileReader:
def __init__(self, filename):
self.filename = filename
self.f = open(self.filename, 'rt')
def close(self):
self.f.close()
def read(self):
return self.f.read()
```
* Test
```python
fr=package0.module0.FileReader('package0/module0.py')
>>>fr.read()
>>>fr.close()
```
* Update `__init__.py`
```
from package0.module0 import FileReader
```
* Test again
```python
>>>import package0
>>>r=package0.FileReader('package0/module0.py')
>>>r.read()
>>>r.close()
```
### Subpackage
---
* Demo below shows how to add subpackages
* Add sub-package under the `package0`
* Structure
```
path_root <--// it must be attached sys.path
+---package0 <--// package root
+---compress
| +---__init__.py
| +---bz.py
| +---gz.py
+---__init__.py <--// package init file
\---module0.py
```
* Sample code - `gz.py`
```python
import gzip
import sys
opener=gzip.open
if __name__ == '__main__':
f = gzip.open(sys.argv[1], mode='wt')
f.write(' '.join(sys.argv[2:]))
f.close()
```
* Sample code - `bz.py`
```python
import bz2
import sys
opener = bz2.open
if __name__ == '__main__':
f = bz2.open(sys.argv[1], mode='wt')
f.write(' '.join(sys.argv[2:]))
f.close()
```
* Test by creating two compressed files
```
python3 -m package0.compress.gz test.gz data compressed with gz
python3 -m package0.compress.bz test.bz2 data compressed with bz2
```
* Change `FileReader.py` to read above files
```python
from package0.compress import gz, bz
import os
extension_map = {
'.gz':gz.opener,
'.bz2':bz.opener
}
class FileReader:
def __init__(self, filename):
self.filename = filename
extension = os.path.splitext(filename)[1]
opener = extension_map.get(extension, open)
self.f = opener(self.filename, 'rt')
def close(self):
self.f.close()
def read(self):
return self.f.read()
```
* Test
```python
>>> import package0
>>> r=package0.FileReader('test.gz')
>>> r.read()
'data compressed with gz'
>>> r=package0.FileReader('test.bz2')
>>> r.read()
'data compressed with bz2'
```
### Import with relative path
---
* Example below show how to use relative path to import packages
```
path_root
+---package0
+---compress
| +---__init__.py
| +---bz.py <--// from ..module0 import FileReader
| +---gz.py <--// from .bz import bz.opener
+---__init__.py
\---module0.py
```
### Namespace package
---
* Namespace packages have no `__init__.py`
* Python scans all entries in `sys.path`.
* If a matching directory with `__init__.py` is found, a normal package is loaded
* Otherwise, all matching directories in `sys.path` are considered part of the namespace package
* Example 1
* Structure of package
```
path_root0
+---package0
+---module0.py
path_root1
+---package0
+---module0.py
```
* Test
```
>>> import sys
>>> sys.path.append('gh')
>>> sys.path.append('path_root0')
>>> sys.path.append('path_root1')
>>> import package0
>>> package0.__path__
_NamespacePath(['gh\\package0', 'path_root0\\package0', 'path_root0\\package0'])
```
* Example 2
* Structure of package
```
path_root0
+---package0
+---module0.py
path_root1
+---package0
+---module0.py
path_root2
+---package0
+---__init__.py <--// Namespace should not include __init__.py
+---module0.py
```
* Test
```
>>> import sys
>>> sys.path.append('path_root0')
>>> sys.path.append('path_root1')
>>> sys.path.append('path_root2')
>>> import package0
>>> package0.__path__
_NamespacePath(['path_root2\\package0'])
```
### Recommended Executable directories
---
```
project_root <--// Project root directory contains everything
+---__main__.py
+---project_name
| +---__init__.py
| +---resource.py
| +---package0
| | +---__init__.py
| | +---module0.py
| +---test
| +---__init__.py
| +---test.py
\---setup.py
```
### Visual Studio Code setup
---
* Install python plugin `Python - donjayamanne`
* Setup `launch.json`
```json
{
"version": "0.2.0",
"configurations": [
{
"name": "Python",
"type": "python",
"request": "launch",
"stopOnEntry": true,
"pythonPath": "${config:python.pythonPath}",
"program": "${workspaceRoot}/__main__.py",
"cwd": "${workspaceRoot}",
"env":{},
"envFile": "${workspaceRoot}/.env",
"debugOptions": [
"WaitOnAbnormalExit",
"WaitOnNormalExit",
"RedirectOutput"
]
}
]
...
}
```
* Sample code - `__main__.py`
```python
from package1 import FileReader
if __name__ == "__main__":
app = FileReader('C:/ws/python/plural/pbb/gh/test.gz')
print(app.read())
app.close()
app = FileReader('C:/ws/python/plural/pbb/gh/test.bz2')
print(app.read())
app.close()
```
### Function & Lambda
## Function
* statement which defines a function and binds it to a name
* Must have a name
* Arguments delimited by parentheses, separated by commas
* Zero or more arguments supported -zero arguments ⇒ empty parentheses
* Body is an indented block of statements
* A return statement is required to return anything other than None
* Regular functions can have docstrings‣ Easy to access for testing
## Lambda
* Expression which evaluates to a function
* Anonymous
* Argument list terminated by colon, separated by commas
* Zero or more arguments supported - zero arguments ⇒ lambda:
* Body is a single expression
* The return value is given by the body expression. No return statement is permitted.
* Lambdas cannot have docstrings
* Awkward or impossible to test
* Example
```python
>>>names=list(['<NAME>', '<NAME>', '<NAME>'])
>>>sorted(names, key=lambda name:name.split()[-1]))
['<NAME>', '<NAME>', '<NAME>']
```
## Callable
---
* Callable instance
* Example
* Sample code - `Resolver.py`
```python
class Resolver:
def __init__(self):
self.cache={}
def __call__(self, host):
if host not in self.cache:
self.cache[host]= socket.gethostbyname(host)
return self.cache[host]
```
* Sample code - `__main__.py`
```
from package1 import Resolver
if __name__ == "__main__":
app = Resolver()
print(app('harryho.github.io'))
print(app.__call__('harryho.github.io'))
```
* Callable class
```python
>>>seq_class_1 = list
>>>sequence= seq_class_1('abc')
>>>type(sequence)
<class 'list'>
>>>seq_class_1 = tuple
>>>sequence= seq_class_1('abc')
>>>type(sequence)
<class 'tuple'>
```
## Extended arguments and call
---
* Extended arguments - syntax: `def extend( *args, **kargs)`
* Example
```python
>>> def tag(name, **attrs):
... t='<'
... for k,v in attrs.items():
... t+= '{key}="{val}"'.format(key=k, val=str(v))
... t+='>'
... return t
...
>>> tag('a', href="harryho.github.io", target="_blank", id="link")
'<href="harryho.github.io"id="link"target="_blank">'
```
* Extended call - sample
```python
>>> def f1 ( a1, a2, *a3):
... print(a1)
... print(a2)
... print(a3)
...
>>> aa=(2,3)
>>> f1(aa)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: f1() missing 1 required positional argument: 'a2'
>>> f1(*aa)
2
3
()
>>> def f2(a1, a2):
... print(a1)
... print(a2)
...
>>> f2(*aa)
2
3
>>> aa=(1,2,3,4)
>>> f2(*aa)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: f2() takes 2 positional arguments but 4 were given
```
* Transpose example
```python
>>> mon=[12,13,15,12,14,18,13]
>>> tue=[11,14,16,12,11,17,14]
>>> for d in zip(mon, tue):
... print(d)
...
(12, 11)
(13, 14)
(15, 16)
(12, 12)
(14, 11)
(18, 17)
(13, 14)
>>> daily = [mon, tue]
>>> from pprint import pprint as pp
>>> pp(daily)
[[12, 13, 15, 12, 14, 18, 13], [11, 14, 16, 12, 11, 17, 14]]
>>> transposed = list(zip(*daily))
>>> pp(transposed)
[(12, 11), (13, 14), (15, 16), (12, 12), (14, 11), (18, 17), (13, 14)]
```
<file_sep>+++
date = "2017-09-20T14:59:31+11:00"
title = "Grub Trouble Shooting"
description = "Common steps to trouble shoot the Grub booting problem "
draft = false
+++
## Update Grub Menu for dual OS boot
**Change BIOS**
* Start your PC by pressing a pressing a special function key (usually F12, F10 or F2 depending on the vendor specifications).
* Some PC's BIOS has `BOOT` tab option, open the `BOOT` tab, you will find the `OS Boot Manager`. It is the simplest way to fix the issue. If your PC's BIOS has no such setting feature, you need to check the next section.
**Change the Windows Boot Manager**
* Login windows with common prompt
* Restart windows, meanwhile press shift key
* In the options page, choose change to other options
* Troubleshooting
* Command Prompt
* Login in Windows with Common prompt
* Use BCDEdit to change windows boot manager. Change to boot ubuntu at first
```bash
REM backup
bcdedit /enum > X:\Users\public\documents\bcdedit.txt
REM change the bootmgr
bcdedit /set {bootmgr} path \EFI\ubuntu\grubx64.efi
```
* After you reboot system, you will see the Grub 2 menu as follow.
```bash
GNU GRUB version 2.0
----------------------------------------------------------------------------------
| Ubuntu
| Advanced options for Unbuntu
| Windows Boot Manager ( on /dev/sda2 )
| Fedora 20
| Advanced options for Fedora 20
| OpenSuse
| Advanced options for OpenSuse
| ....
```
## Boot from Grub command promp
* Use <ESC> to navigate to grub command promp from Grub menu
* List all available dirves by typing `ls`. After that, you willl see a couple drives. If you have multiple hard drive, USB or SD Card pluged in.
```
(hd0) (hd0,gpt4) (hd0, gpt3) (hd0,gpt2) (hd0, gpt1) (hd1) (hd1,msdos2)(hd1, msdos2)(hd2)
## Get more detail of drives
ls -l
```
* From the above detail information, you might find the hard drive of your PC. Continue to use `ls` to locate the actual boot file to confirm the drive contains the boot file.
```
## Assume the (hd0,gpt2) contains the linux kernal boot file.
ls -a (hd0,gpt2)/
```
* Set root drive
```
set root=(hd0,gpt2)
linux (hd0,gpt2)/boot/vmlinuz-linux-4.4.x-xxx-generic
initrd (hd0,gpt2)/boot/initrd.img-linux-4.4.x-xxx-generic
normal
```
<file_sep>+++
title = "AWS: RDS - 1"
description = "RDS - Native Backup & Restore"
weight=20
+++
## RDS
Amazon Relational Database Service (Amazon RDS) makes it easy to set up, operate, and scale a relational database in the cloud. It provides cost-efficient and resizable capacity while automating time-consuming administration tasks such as hardware provisioning, database setup, patching and backups.
### Backup & Restore SQL Server
#### Backup database to S3
* Assumption
* DB name: sample_db
* S3 bucket name: sql-server-backup
* Backup with built-in stored proc
exec msdb.dbo.rds_backup_database
@source_db_name='sample_db',
@s3_arn_to_backup_to='arn:aws:s3:::sql-server-backup/sample_db_20191221.bak',
@overwrite_S3_backup_file=1;
* Track status
exec msdb.dbo.rds_task_status @db_name='sample_db'
#### Restore DB into EC2
* Restore with powershell
#Copy-S3Object -BucketName 'nsw-prod-s3-sql-backups' \
# -Key sample_db-20190531011003.bak \
# -LocalFile L:\Backups\Automated\sample_db-20190531011003.bak
Restore-SqlDatabase -ServerInstance 'localhost' -Database "sample_db" \
-BackupFile "L:\Backups\Automated\sample_db-20190531011003.bak" \
-ReplaceDatabase -KeepReplication -Verbose
#### Restore DB into RDS
* Create IAM role which can access the backup file in the S3 bucket. e.g. **RDS_RESTORE_S3_READONLY**
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:ListBucket",
"s3:GetBucketLocation"
],
"Resource": [
"arn:aws:s3:::sql-server-backup"
]
},
{
"Effect": "Allow",
"Action": [
"s3:GetObjectMetaData",
"s3:GetObject",
"s3:PutObject",
"s3:ListMultipartUploadParts",
"s3:AbortMultipartUpload"
],
"Resource": [
"arn:aws:s3:::sql-server-backup/*"
]
}
]
}
```
* Create a new option group, or copy or modify an existing option group.
* Add the **SQLSERVER_BACKUP_RESTORE** option to the option group.
* Associate the IAM role with the option. The IAM role must have access to an S3 bucket to store the database backups. e.g. **RDS_RESTORE_S3_READONLY**
* Associate the option group with the DB instance.
* Make sure the RDS has enough space to restore the db backup.
* Restore the database
```sql
EXEC msdb.dbo.rds_restore_database
@restore_db_name='sample_db',
@s3_arn_to_restore_from='arn:aws:s3:::sql-server-backup/sample_db_20191221.bak'
GO
```
* Check the progress
```sql
EXEC msdb.dbo.rds_task_status
@db_name='sample_db'
GO
```
#### Caveat
* After you modify the storage size for a DB instance, the status of the DB instance is storage-optimization. The DB instance is fully operational after a storage modification.
* You can't make further storage modifications until **six (6) hours** after storage optimization has completed on the instance.
* You can't reduce the amount of storage for a DB instance after storage has been allocated.
### Rename database
* Rename db with built-in stored proc
```sql
EXEC rdsadmin.dbo.rds_modify_db_name N'<OldName>', N'<NewName>'
```
* Troubleshoot -The database could not be exclusively locked to perform the operation.
```sql
# Kill connection
EXEC sp_who
GO
KILL <spid>
GO
EXEC sp_who
GO
```
<file_sep>+++
title="Projects"
weight = 1
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "Adv Bash - 4"
description = "File & Standard IO "
draft=true
+++
##
<file_sep>+++
title = "Iteration"
description="Iterables & Iteration"
weight=4
+++
## Iterables & Iteration
### Comprehensions
* Comprehensions can process more than one input sequence
* Multiple input sequences in comprehensions work like nested for-loops
* Comprehensions can also have multiple if-clauses interspersed with the for-clauses
* Later clauses in a comprehension can reference variables bound in earlier clauses
* Comprehension can also appear in the result expression of a comprehension, resulting in nested sequences
#### Example
```python
## Comprehensions
>>> points
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
>>> points_c=[(x,y) for x in range(3) for y in range(4) ]
>>> points_c
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
>>> points_d=[(x,y)
... for x in range(3)
... if x > 0
... for y in range(4)
... if y > x ]
>>> points_d
[(1, 2), (1, 3), (2, 3)]
>>> points_e = [ [ y-1 for y in range(x) ] for x in range(3)]
>>> points_e
[[], [-1], [-1, 0]]
```
### Functional-style tools
* Python provides a number of functional-style tools for working with iterators
#### Map
* `map()` calls a function for each element in its input sequences
* `map()` returns an iterable object, not a fully-evaluated collection
* `map()` results are lazily evaluated, meaning that you must access them to
force their calculation
* `map()` results are typically evaluated through the use of iteration constructs such as for-loops
* You must provide as many input sequences to `map()` as the callable argument has parameters
* `map()` takes one element from each input sequence for each output element
it produces
* `map()` stops producing output when its shortest input sequence is exhausted
* `map()` can be used to implement the same behavior as comprehensions in some cases
##### Example
```python
>>> sizes =['small','medium', 'large']
>>> colors = ['lavender', 'teal', 'burnt orange']
>>> animals = ['koala', 'platypus', 'salamander']
>>> def combine( size, color, animal):
... return '{} {} {}'.format(size, color, animal)
...
>>> list(map(combine, sizes, colors, animals ))
['small lavender koala', 'medium teal platypus', 'large burnt orange salamander']
>>> import itertools
>>> def combine2(quantity, size, color, animal):
... return '{} - {} {} {}'.format(quantity, size, color, animal)
...
>>> list(map(combine2, itertools.count(), sizes, colors, animals ))
['0 - small lavender koala', '1 - medium teal platypus', '2 - large burnt orange salamander']
```
#### Filter
* `filter()` selects values from an input sequence which match a specified criteria
* `filter()` passes each element in its input sequence to the function argument
* `filter()` returns an iterable over the input elements for which the function argument is truthy
* Like `map()`, `filter()` produces its output lazily If you pass None as the first argument to `filter()`, it yields the input values which evaluate to True in a boolean context `reduce()` cumulatively applies a function to the elements of an input sequence
##### Example
```python
## filter
>>> negs = filter(lambda x: x<0 , [3, -1, 2,-4,0,9,-33])
>>> list(negs)
[-1, -4, -33]
>>> notnones = filter(None, [0,1, False, True, [], () , {}, (1,2,), [1,2], '', 'yes'])
>>> list(map(type, list(notnones)))
[<class 'int'>, <class 'bool'>, <class 'tuple'>, <class 'list'>, <class 'str'>]
```
#### Reduce
* `reduce()` calls the input function with two arguments: the accumulated result so far, and the next element in the sequence
* `reduce()` is a generalization of summation
* `reduce()` returns the accumulated result after all of the input has been processed
* If you pass an empty sequence to `reduce()` it will raise a TypeError
* `reduce()` accepts an optional initial value argument
* This initial value is conceptually added to the front of the input sequence
* The initial value is returned if the input sequence is empty
* The `map()` and `reduce()` functions in Python are related to the ideas in the map-reduce algorithm
##### Example
```python
## reduce
>>> from functools import reduce
>>> import operator
>>> reduce(operator.add, [1,2,3,4,5])
15
### x is interim result , y is the next sequence value
>>> def mul(x, y):
... print(' mul {} {} '.format(x,y))
... return x * y
...
>>> reduce( mul, range(1, 5))
mul 1 2
mul 2 3
mul 6 4
24
```
### The next() function
* Python's next() function calls `__next__()` on its argument
* Iterators in Python must support the `__next__()` method
* `__next__()` should return the next item in the sequence, or raise `StopIteration` if it is exhausted
* Python's `iter()` function calls `__iter__()` on its argument
* Iterable objects in Python must support the `__iter__()` method
* `__iter__()` should return an iterator for the iterable object
* Objects with a `__getitem__()` method that accepts consecutive integer indices starting at zero are also iterables
* Iterables implemented via `__getitem__()` must raise `IndexError` when they are exhausted
##### Example of Iterator
```python
class ExampleIterator:
def __init__(self, data):
self.index = 0
self.data = data
def __iter__(self):
return self
def __next__(self):
if self.index >= len(self.data):
raise StopIteration()
rslt = self.data[self.index]
self.index += 1
return rslt
```
##### Example of Iterable
```python
class ExampleIterable:
def __init__(self):
self.data = [1, 2, 3]
def __iter__(self):
return ExampleIterator(self.data)
```
##### Example of AnotherIterable
```python
class AnotherIterable:
def __init__(self):
self.data = [1, 2, 3]
def __getitem__(self, idx):
return self.data[idx]
```
### The iter() function
* The extended form of `iter()` accepts a zero-argument callable and a sentinel value
* Extended `iter()` repeatedly calls the callable argument until it returns the sentinel value
* The values produced by extended `iter()` are those returned from the callable
##### Example
```python
>>> ts = iter(datetime.datetime.now, None)
>>> next(ts)
datetime.datetime(2017, 7, 14, 14, 38, 10, 752761)
>>> next(ts)
datetime.datetime(2017, 7, 14, 14, 38, 13, 373613)
>>> next(ts)
datetime.datetime(2017, 7, 14, 14, 38, 14, 754588)
## Read file
## Content of the file file.txt
## You are reading
## the file
## you won't read
## the
## END
## but not see the END above
>>> with open('file.txt', 'rt') as f:
... for line in iter(lambda: f.readline().strip(), 'END'):
... print(line)
...
You are reading
the file
you won't read
the
```
* One use case for extended `iter()` is to iterate using simple functions
* Protocol conforming iterators must also be iterable
##### Example of Sensor
```python
import datetime
import itertools
import random
import time
class Sensor:
def __iter__(self):
return self
def __next__(self):
return random.random()
sensor = Sensor()
timestamps = iter(datetime.datetime.now, None)
for stamp, value in itertools.islice(zip(timestamps, sensor), 10):
print(stamp, value)
time.sleep(1)
```
<file_sep>#!/bin/bash
if [ ! -z themes/docdock/layouts ]; then
echo 'docdock does not exist.'
git submodule add --force https://github.com/harryho/hugo-theme-docdock.git themes/docdock
git submodule init .
git submodule update .
fi
DRAFT=$1
if [ ${DRAFT} == 'd' ]; then
echo 'start with draft '
hugo serve -t docdock -D --watch --disableFastRender
else
hugo serve -t docdock --watch --disableFastRender --ignoreCache
fi
<file_sep>+++
title = "TypeScript Note - 2"
description="Generics, Advanced Types"
draft=true
+++
### Generics
A major part of software engineering is building components that not only have well-defined and consistent APIs, but are also reusable. Components that are capable of working on the data of today as well as the data of tomorrow will give you the most flexible capabilities for building up large software systems.
* Generics with function
function identity<T>(arg: T): T {
return arg;
}
function identity2<T>(arg: Array<T>): Array<T> {
console.log(arg.length); // Array has a .length, so no more error
return arg;
}
let myIdentity: <T>(arg: T) => T = identity;
let myIdentity2: <T>(arg: T) => T = identity2; // Error
* Generics with types
interface GenericIdentityFn<T> {
(arg: T): T;
}
function identity<T>(arg: T): T {
return arg;
}
let myIdentity: GenericIdentityFn<number> = identity;
### Adavenced Types
#### Intersection Types
An intersection type combines multiple types into one. This allows you to add together existing types to get a single type that has all the features you need.
<file_sep>+++
date = "2018-11-30T14:59:31+11:00"
title = "Dual Boot Windows 10 & Ubuntu 18"
description = "Steps to create a machine with dual-boot OS: Windows 10 and Ubuntu 18"
+++
### Dual boot Ubuntu 18 with Windows 10
> I have a couple Linux workstations, but all of them are old PC or laptop. Today I get a chance to test Ubuntu on a brand new laptop. Here I are going to write down all I did to create this dual boot laptop
#### Caution
The laptop I worked on is Lenovo IdeaPad S model with UEFI firmware, but it doesn't mean all Lenovo laptops will work in the same way, not to mention other brand's laptop. So before you try anything, please backup all your data first.
#### Prepare bootable USB with Ubuntu ISO
* Prepare a clean USB pen with minimum 4G volume
* Download the 18.04 LTS Desktop ISO from Ubuntu website.
* Create a bootable USB with Rufus or any other USB build tool
#### Prepare Windows Machine for Dual-Boot
* Start the command prompt as Admin
* Start Menu -> Command Prompt (Admin) in order to enter Windows Command Line.
* Change Window boot manager to restart the system in Safe Mode
```bat
bcdedit /enum
bcdedit /set {default} safeboot minimal
```
* The first thing is to create a free space on the computer hard disk in case the system is installed on a single partition.
* Once in CLI, type diskmgmt.msc on prompt and the Disk Management utility should open. From here, right click on C: partition and select Shrink Volume in order to resize the partition.
```cmd
C:\Windows\system32\>diskmgmt.msc
```
* On Shrink C: enter a value on space to shrink in MB (use at least 20000 MB depending on the C: partition size) and hit Shrink to start partition resize as illustrated below (the value of space shrink from below image is lower and only used for demonstration purposes).
Once the space has been resized you will see a new unallocated space on the hard drive. Leave it as default and reboot the computer in order to proceed with Ubuntu installation.
* Reboot the Windows to UEFI configuration
* Change the UEFI setting
* Disable the **OS Optimized Defaults**
* Disable the **Intel Platform Trust Technology** (Optional)
* Change the boot drive order
* Plugin the USB pen and continue to boot the system
#### Install Ubuntu
* Install Ubuntu Desktop as usual
* Recommand to install minimal version. You always can install other applications later.
#### Rollback the Windows Safe Mode
* Launch command promt as Admin and run following command
```
bcdedit /deletevalue {default} safeboot
```
#### Make your life easier
- Ubuntu
* Update fstab to add the Windows mount point at system start up
* Update the Grub file if you want to boot Windows as default OS
- Windows
* Install the Linx Reader to access the Ubuntu files from Windows
<file_sep>+++
date = "2011-03-09T10:59:31+11:00"
title = "COBIT 5"
description="COBIT 5 - Introduction & Principles"
weight=1
+++
## COBIT 5
COBIT (Control Objectives for Information and Related Technologies) is a business framework for the governance and management of Enterprise IT, created by ISACA.
The framework contains the COBIT 5 framework for governing and managing enterprise IT. It defines a set of generic processes for the management of IT, with each process defined together with process inputs and outputs, key process-activities, process objectives, performance measures and an elementary maturity model.
### Product Family
The COBIT 5 framework is built on five basic principles, which are covered in detail, and includes extensive guidance on enablers for governance and management of enterprise IT.
The COBIT 5 product family includes the following products:
* COBIT 5 (the framework)
* COBIT 5 enabler guides, in which governance and management enablers are discussed in detail. These include:
* COBIT 5: Enabling Processes
* COBIT 5: Enabling Information (in development)
* Other enabler guides (check www.isaca.org/cobit)
* COBIT 5 professional guides, which include:
* COBIT 5 Implementation
* COBIT 5 for Information Security (in development)
* COBIT 5 for Assurance (in development)
* COBIT 5 for Risk (in development)
* Other professional guides (check www.isaca.org/cobit)
* A collaborative online environment, which will be available to support the use of COBIT 5
COBIT 5 provides a comprehensive framework that assists enterprises in achieving their objectives for the governance and management of enterprise IT. Simply stated, it helps enterprises create optimal value from IT by maintaining a balance between realising benefits and optimising risk levels and resource use. COBIT 5 enables IT to be governed and managed in a holistic manner for the entire enterprise, taking in the full end-to-end business and IT functional areas of responsibility, considering the IT-related interests of internal and external stakeholders. COBIT 5 is generic and useful for enterprises of all sizes, whether commercial, not-for-profit or in the public sector.
### Principles
#### Principle 1: Meeting Stakeholder Needs
Enterprises exist to create value for their stakeholders by maintaining a balance between the realisation of benefits and the optimisation of risk and use of resources. COBIT 5 provides all of the required processes and other enablers to support business value creation through the use of IT. Because every enterprise has different objectives, an enterprise can customise COBIT 5 to suit its own context through the goals cascade, translating high-level enterprise goals into manageable, specific, IT-related goals and mapping these to specific processes
and practices.
#### Principle 2: Covering the Enterprise End-to-end
COBIT 5 integrates governance of enterprise IT into enterprise governance:
* It covers all functions and processes within the enterprise; COBIT 5 does not focus only on the ‘IT function’, but treats information and related technologies as assets that need to be dealt with just like any other asset by everyone in the enterprise.
* It considers all IT-related governance and management enablers to be enterprisewide and end-to-end, i.e., inclusive of everything and everyone—internal and external—that is relevant to governance and management of enterprise information and related IT.
#### Principle 3: Applying a Single, Integrated Framework
There are many IT-related standards and good practices, each providing guidance on a subset of IT activities. COBIT 5 aligns with other relevant standards and frameworks at a high level, and thus can serve as the overarching framework for governance and management of enterprise IT.
#### Principle 4: Enabling a Holistic Approach
Efficient and effective governance and management of enterprise IT require a holistic approach, taking into account several interacting components. COBIT 5 defines a set of enablers to support the implementation of a comprehensive governance and management system for enterprise IT. Enablers are broadly defined as anything that can help to achieve the objectives of the enterprise. The COBIT 5 framework defines seven categories of enablers:
* Principles, Policies and Frameworks
* Processes
* Organisational Structures
* Culture, Ethics and Behaviour
* Information
* Services, Infrastructure and Applications
* People, Skills and Competencies
#### Principle 5: Separating Governance From Management
The COBIT 5 framework makes a clear distinction between governance and management. These two disciplines encompass different types of activities, require different organisational structures and serve different purposes. COBIT 5’s view on this key distinction between governance and management is:
* Governance
Governance ensures that stakeholder needs, conditions and options are evaluated to determine balanced, agreed-on enterprise objectives to be achieved; setting direction through prioritisation and decision making; and monitoring performance and compliance against agreed-on direction and objectives.
> In most enterprises, overall governance is the responsibility of the board of directors under the leadership of the chairperson. Specific governance responsibilities may be delegated to special organisational structures at an appropriate level, particularly in larger, complex enterprises.
* Management
Management plans, builds, runs and monitors activities in alignment with the direction set by the governance body to achieve the enterprise objectives.
> In most enterprises, management is the responsibility of the executive management under the leadership of the chief executive officer (CEO).
<file_sep>+++
title = "Context"
description = "Context Manager, Introspection"
weight=8
+++
## Context managers
### context manager
* context manager an object designed to be used in a with-statement
```python
with context-manager:
body
with context-manager:
context-manager.begin()
body
context-manager.end()
with context-manager:
setup()
body
teardown()
with context-manager:
context-manager.begin()
body
context-manager.end()
with context-manager:
allocation()
body
deallocation()
with context-manager:
enter()
body
exit()
```
* A context-manager ensures that resources are properly and automatically managed
* enter() prepares the manager for use
* exit() cleans it up
* Context-manager Protocol
```python
__enter__(self)
__exit__(self,
exc_type, ## exception type
exc_val, ## exception object
exc_tb) ## exception traceback
```
* `__enter__()`
* called before entering with-statement body
* return value bound to as variable
* can return value of any type
* commonly returns context-manager itself
* `__exit__()` called when with-statement body exits
* By default `__exit__()` propagates exceptions thrown from the with-statement’s code block
* `__exit__()` should never explicitly re-raise exceptions
* `__exit__()` should only raise exceptions if it fails itself
## contextlib
* `contextlib` : standard library module for working with context managers
* `contextmanager` is a decorator you can use to create new context managers
```python
@contextlib.contextmanager
def my_context_manager():
## <ENTER>
try:
yield [value]
## <NORMAL EXIT>
except:
## <EXCEPTIONAL EXIT>
raise
with my_context_manager() as x:
## . . .
```
* `contextmanager` lets you define context-managers with simple control flow It allows you to leverage the statefulness of generators
* `contextmanager` uses standard exception handling to propagate exceptions
* `contextmanager` explicitly re-raises – or doesn’t catch – to propagate exceptions
* `contextmanager` swallows exceptions by not re-raising them
* Exceptions propagated from __inner__ context managers will be seen by __outer__ context managers
* multiple context managers with-statements can use as many context-managers as you need
* Never pass list
- Example -- The code below is the same
```python
with cm1() as a, cm2() as b:
BODY
with cm1() as a:
with cm2() as b:
BODY
```
* Examples
-- simple sample
```python
import contextlib
class LoggingContextManager:
def __enter__(self):
print('LoggingContextManager.__enter__()')
return "You're in a with-block!"
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
print('LoggingContextManager.__exit__: '
'normal exit detected')
else:
print('LoggingContextManager.__exit__: '
'Exception detected! '
'type={}, value={}, traceback={}'.format(
exc_type, exc_val, exc_tb))
@contextlib.contextmanager
def logging_context_manager():
print('logging_context_manager: enter')
try:
yield "You're in a with-block!"
print('logging_context_manager: normal exit')
except Exception:
print('logging_context_manager: exceptional exit',
sys.exc_info())
raise
if __name__ == "__main__":
with LoggingContextManager() as log:
pass
with logging_context_manager() as clog:
pass
## test result
## LoggingContextManager.__enter__()
## LoggingContextManager.__exit__: normal exit detected
## logging_context_manager: enter
## logging_context_manager: normal exit
```
* nested
```python
@contextlib.contextmanager
def nest_test(name):
print('Entering', name)
yield name
print('Exiting', name)
if __name__ == "__main__":
with nest_test('outer') as n1, nest_test('inner nested in ' + n1):
pass
## test result
## Entering outer
## Entering inner nested in outer
## Exiting inner nested in outer
## Exiting outer
```
* db & transaction
```python
import contextlib
class Connection:
def __init__(self):
self.xid = 0
def _start_transaction(self, read_only=True):
print('starting transaction', self.xid)
rslt = self.xid
self.xid = self.xid + 1
return rslt
def _commit_transaction(self, xid):
print('committing transaction', xid)
def _rollback_transaction(self, xid):
print('rolling back transaction', xid)
class Transaction:
def __init__(self, conn, read_only=True):
self.conn = conn
self.xid = conn._start_transaction(read_only=read_only)
def commit(self):
self.conn._commit_transaction(self.xid)
def rollback(self):
self.conn._rollback_transaction(self.xid)
@contextlib.contextmanager
def start_transaction(connection):
tx = Transaction(connection)
try:
yield tx
except Exception:
tx.rollback()
raise
tx.commit()
```
### Introspection
* type of object is type
* Example
* introspector
```python
import inspect
import reprlib
import itertools
## from sorted_set import SortedSet
from bisect import bisect_left
from collections.abc import Sequence, Set
from itertools import chain
class SortedSet(Sequence, Set):
def __init__(self, items=None):
self._items = sorted(set(items)) if items is not None else []
def __contains__(self, item):
try:
self.index(item)
return True
except ValueError:
return False
def __len__(self):
return len(self._items)
def __iter__(self):
return iter(self._items)
def __getitem__(self, index):
result = self._items[index]
return SortedSet(result) if isinstance(index, slice) else result
def __repr__(self):
return "SortedSet({})".format(
repr(self._items) if self._items else ''
)
def __eq__(self, rhs):
if not isinstance(rhs, SortedSet):
return NotImplemented
return self._items == rhs._items
def __ne__(self, rhs):
if not isinstance(rhs, SortedSet):
return NotImplemented
return self._items != rhs._items
def _is_unique_and_sorted(self):
return all(self[i] < self[i + 1] for i in range(len(self) - 1))
def index(self, item):
assert self._is_unique_and_sorted()
index = bisect_left(self._items, item)
if (index != len(self._items)) and (self._items[index] == item):
return index
raise ValueError("{} not found".format(repr(item)))
def count(self, item):
assert self._is_unique_and_sorted()
return int(item in self)
def __add__(self, rhs):
return SortedSet(chain(self._items, rhs._items))
def __mul__(self, rhs):
return self if rhs > 0 else SortedSet()
def __rmul__(self, lhs):
return self * lhs
def issubset(self, iterable):
return self <= SortedSet(iterable)
def issuperset(self, iterable):
return self >= SortedSet(iterable)
def intersection(self, iterable):
return self & SortedSet(iterable)
def union(self, iterable):
return self | SortedSet(iterable)
def symmetric_difference(self, iterable):
return self ^ SortedSet(iterable)
def difference(self, iterable):
return self - SortedSet(iterable)
def full_sig(method):
try:
return method.__name__ + str(inspect.signature(method))
except ValueError:
return method.__name__ + '(...)'
def brief_doc(obj):
doc = obj.__doc__
if doc is not None:
lines = doc.splitlines()
if len(lines) > 0:
return lines[0]
return ''
def print_table(rows_of_columns, *headers):
num_columns = len(rows_of_columns[0])
num_headers = len(headers)
if len(headers) != num_columns:
raise TypeError("Expected {} header arguments, "
"got {}".format(num_columns, num_headers))
rows_of_columns_with_header = itertools.chain([headers], rows_of_columns)
columns_of_rows = list(zip(*rows_of_columns_with_header))
column_widths = [max(map(len, column)) for column in columns_of_rows]
column_specs = ('{{:{w}}}'.format(w=width) for width in column_widths)
format_spec = ' '.join(column_specs)
print(format_spec.format(*headers))
rules = ('-' * width for width in column_widths)
print(format_spec.format(*rules))
for row in rows_of_columns:
print(format_spec.format(*row))
def dump(obj):
print("Type")
print("====")
print(type(obj))
print()
print("Documentation")
print("=============")
print(inspect.getdoc(obj))
print()
print("Attributes")
print("==========")
all_attr_names = SortedSet(dir(obj))
method_names = SortedSet(
filter(lambda attr_name: callable(getattr(obj, attr_name)),
all_attr_names))
assert method_names <= all_attr_names
attr_names = all_attr_names - method_names
attr_names_and_values = [(name, reprlib.repr(getattr(obj, name)))
for name in attr_names]
print_table(attr_names_and_values, "Name", "Value")
print()
print("Methods")
print("=======")
methods = (getattr(obj, method_name) for method_name in method_names)
method_names_and_doc = [(full_sig(method), brief_doc(method))
for method in methods]
print_table(method_names_and_doc, "Name", "Description")
print()
if __name__ == "__main__":
dump('a')
## test result
## Type
## ====
## <class 'str'>
#
## Documentation
## =============
## str(object='') -> str
## str(bytes_or_buffer[, encoding[, errors]]) -> str
## Create a new string object from the given object. If encoding or
## errors is specified, then the object must expose a data buffer
## that will be decoded using the given encoding and error handler.
## Otherwise, returns the result of object.__str__() (if defined)
## or repr(object).
## encoding defaults to sys.getdefaultencoding().
## errors defaults to 'strict'.
#
## Attributes
## ==========
## Name Value
## ------- ------------------------------
## __doc__ "str(object='... to 'strict'."
#
## Methods
## =======
## Name Description
## ----------------- -----------------------------------------------------------------------
## __add__(value, /) Return self+value.
## str(...) str(object='') -> str
## __contains__(key, /) Return key in self.
## __delattr__(name, /) Implement delattr(self, name).
## __dir__(...) __dir__() -> list
## __eq__(value, /) Return self==value.
## __format__(...) S.__format__(format_spec) -> str
## __ge__(value, /) Return self>=value.
## __getattribute__(name, /) Return getattr(self, name).
## __getitem__(key, /) Return self[key].
## __getnewargs__(...)
## __gt__(value, /) Return self>value.
## __hash__() Return hash(self).
## __init__(*args, **kwargs) Initialize self. See help(type(self)) for accurate signature.
## __init_subclass__(...) This method is called when a class is subclassed.
## __iter__() Implement iter(self).
## __le__(value, /) Return self<=value.
## __len__() Return len(self).
## __lt__(value, /) Return self<value.
## __mod__(value, /) Return self%value.
## __mul__(value, /) Return self*value.n
## __ne__(value, /) Return self!=value.
## __new__(*args, **kwargs) Create and return a new object. See help(type) for accurate signature.
## __reduce__(...) helper for pickle
## __reduce_ex__(...) helper for pickle
## __repr__() Return repr(self).
## __rmod__(value, /) Return value%self.
## __rmul__(value, /) Return self*value.
## __setattr__(name, value, /) Implement setattr(self, name, value).
## __sizeof__(...) S.__sizeof__() -> size of S in memory, in bytes
## __str__() Return str(self).
## __subclasshook__(...) Abstract classes can override this to customize issubclass().
## capitalize(...) S.capitalize() -> str
## casefold(...) S.casefold() -> str
## center(...) S.center(width[, fillchar]) -> str
## count(...) S.count(sub[, start[, end]]) -> int
## encode(...) S.encode(encoding='utf-8', errors='strict') -> bytes
## endswith(...) S.endswith(suffix[, start[, end]]) -> bool
## expandtabs(...) S.expandtabs(tabsize=8) -> str
## find(...) S.find(sub[, start[, end]]) -> int
## format(...) S.format(*args, **kwargs) -> str
## format_map(...) S.format_map(mapping) -> str
## index(...) S.index(sub[, start[, end]]) -> int
## isalnum(...) S.isalnum() -> bool
## isalpha(...) S.isalpha() -> bool
## isdecimal(...) S.isdecimal() -> bool
## isdigit(...) S.isdigit() -> bool
## isidentifier(...) S.isidentifier() -> bool
## islower(...) S.islower() -> bool
## isnumeric(...) S.isnumeric() -> bool
## isprintable(...) S.isprintable() -> bool
## isspace(...) S.isspace() -> bool
## istitle(...) S.istitle() -> bool
## isupper(...) S.isupper() -> bool
## join(...) S.join(iterable) -> str
## ljust(...) S.ljust(width[, fillchar]) -> str
## lower(...) S.lower() -> str
## lstrip(...) S.lstrip([chars]) -> str
## maketrans(x, y=None, z=None, /) Return a translation table usable for str.translate().
## partition(...) S.partition(sep) -> (head, sep, tail)
## replace(...) S.replace(old, new[, count]) -> str
## rfind(...) S.rfind(sub[, start[, end]]) -> int
## rindex(...) S.rindex(sub[, start[, end]]) -> int
## rjust(...) S.rjust(width[, fillchar]) -> str
## rpartition(...) S.rpartition(sep) -> (head, sep, tail)
## rsplit(...) S.rsplit(sep=None, maxsplit=-1) -> list of strings
## rstrip(...) S.rstrip([chars]) -> str
## split(...) S.split(sep=None, maxsplit=-1) -> list of strings
## splitlines(...) S.splitlines([keepends]) -> list of strings
## startswith(...) S.startswith(prefix[, start[, end]]) -> bool
## strip(...) S.strip([chars]) -> str
## swapcase(...) S.swapcase() -> str
## title(...) S.title() -> str
## translate(...) S.translate(table) -> str
## upper(...) S.upper() -> str
## zfill(...) S.zfill(width) -> str
```
<file_sep>+++
date = "2016-01-10T14:59:31+11:00"
title = "Ubuntu 14 -- desktop setup & dual boot "
description = "Post-installation for Ubuntu 14 desktop"
draft = false
+++
> *This article is mainly to help beginner install Ubuntu desktop at the first time. If you are looking for setup of Ubuntu server, please check out the blog -- [Ubuntu server setup](/blog/ubuntu-server-14/)*
## Where to install Linux?
* How to answer this quetion really depends user's computer knowledge and skills. Basically Linux can be installed on almost any PC, laptop, embedded device or tablet. So there are some suggestions for people with different level skills.*
* Beginner -- If you never install any operating system, or you never use `Unix/Linux` system, but still want to try something new. You should consider to install virtual machine on your computer and then install ubuntu on the virtual machine. [VMware](http://www.vmware.com) and [VirtualBox](https://www.virtualbox.org) are both very good products.
* Intermediate -- If you have installed operating system, or you have used `Unix/Linux` system, you can install it on your old machine. or for safe side try it on virtual machine at first.
* Expert -- You can try dual boot or multiple boot operating systems on your PC. Install 10-20 operating systems on a PC with 400GB harddisk should be alright. The only problem I encountered before some operating system can not find all proper drivers to support the all devices on your PC/laptop, such as the drivers for camera, touchpad, wifi, etc. It would take you so much time to research and try.
### Install virtual machine
__*Let's get our hands dirty*__
* Install VirtualBox/VMWare on your computer. IMO, use VirtualBox is quite handy and save you much effort, even it is free. Because in the real environment, you will use remote tool to do your admin task instead of really handling a phyiscal machine. And you can try another Liunx OS on VirtualBox.
* If you lean to commercial product, you can choose VMWare. There is free trial option for you.
### Which version to choose
* Ubuntu has variant versions for you to download and play. I will suggest you always pick LTS (Long Term Support) version to download. As beginner, desktop version is the best option for you to start.
* After downlaod the [Ubuntu Desktop](https://www.ubuntu.com/download/desktop) from the internet. You will get a `ISO` file like this: ubuntu-xx.xx.x-desktop-amd64.iso, if your OS is 64 bits, or something like ubuntu-xx.xx.x-desktop-x86.iso for 32 bits.
### Install Unbuntu
* Before you install, you had better to backup anything on the device which you are going to install, and chcek your internet is working properly.
* Create a new virtual machine within VMWare or VirtualBox. The processes on both softwares are almost the same.
* Assume your new virtual machine will sit in `C:\vbox` for VirtualBox or `C:\vm` for VMware
* Create a new machine from menu. Type in the name of vm. e.g. Unbuntu
* Select the type of operating system: Linux
* You can choose Ubuntu(32/64 bit ) or something else. It doesn't matter. We don't use any built-in xxx.iso files from VirtualBox or VMware. Then click `Next`
* Select the memory size for the Unbuntu. 2G RAM is minimal requirement. I prefer up to 30% of total memory size. And then click `Next`.
* Select "Create a virtual hard drive now", and then click `Next`.
* Select defaut VDI, then click `Next`.
* Select "Dynamically allocated", then click `Next`.
* Choose the location of Unbuntu. e.g. c:\vbox\ubuntu\ubuntu.vdi or c:\vm\ubuntu\ubuntu.vmdk. Select the size of VDI\VMDK file. At least 8G. I'd like to select 16 or more. Then a Unbuntu virtual machine has been created.
* Config the Unbuntu hardware setting.
**VirtualBox**
* On VirtualBox toolbar, there is a `Start` button. Click `Start`, then go the Storage item.
* Under the Storage Tree section, there is Empty CD icon. Click the Empty icon.
* Under the Attributes section, click the CD icon at the end of the dropdown list of CD/DVD Drive. Choose the Unbuntu iso file which you download from Unbuntu.org. Click `OK`.
* Leave all the other setting as default. Click the `Start`button on the toolbar.
**VMWare**
* On VMware, you can find CD/DVD button on the tab page of new virtual machine.
* Click the CD button at the end of the dropdown list of CD/DVD Drive. Choose the Unbuntu iso file which you download from Unbuntu.org. Click `OK`.
* Leave all the other setting as default. Click the `Power on this virtual machine` option on the tab page.
* Ubuntu provides a friendly and beautiful UI to complete installation instead of ugly and terrified terminal, as geek's computer shown on sci-fi movie. If you choose VMWare or VirtulBox as machine, you can open the page of [installation steps](https://www.ubuntu.com/download/desktop/install-ubuntu-desktop) on your browser. You just need to follow the instruction step by step, it will take you around 1-2 hours to complete.
## Things to do after installing Ubuntu desktop
* Ubuntu desktop is very nice and friendly, even it is different from your Windows. Basically you don't need any geek's skill to play around on Uubuntu desktop and use it as your Windows. There are tons of free software you can download from Ubuntu Software Center, so you don't worry where to find the softwre you need. Considering you are the beginner, some suggestions and caveats will be highlighted below, but none of these needs command line and terminal.
* Disable the system upgrade automatically to new LTS version.
* Disable the system power manager to suspend your PC.
* Disable the system problem report service.
* Enbale the third party packages.
* Install `Utity Tweek` to help you customize your UI.
## Things don't do
* Something below is suggested not to do, because I assume you are Ubuntu or Linux beginner. I don't wnat you to feel frustrated at the beginning of your Ubuntu desktop journey. It is the same that 99.9999% of Windows user should not delete cache files `C:\Windows` or change the system registry, expect they really understand what they are doing.
* Don't optimzie your memory setting. It is really not a big deal.
* Don't try to change your `Utity` to other Ubuntu desktop, e.g. Ubuntu MATE, Ubuntu Xface, etc.
* Don't follow the tips online to use `root` in terminal before you fully understand what the commands do.
* Don't try to mount other drives on your computer, if it is mounted automatically.
## Dual boot or multiple boot with Windows
**This section is for people who want to install multiple operating system on the actual PC, instead of virtual machine as above. Obviously, it is not for beginner user, but everyone must experience the first time to go to higher level. For safe side, I strongly suggest you should use a old PC or redundant one to test it.**
* For dual or multiple boot, you need to make sure your disk is formatted as GPT. It will save you so much effort later to install other operating systems.
* I suggest Windows first approach for multiple boot systems, because that is easier than the other way around. After install Windows on your PC, you need to shrink Windows disk space for other operating systems with `Disk Management`.
* Now you need to prepare Ubuntu USB installer or DVD. Place the USB stick or DVD in the appropriate drive, reboot the machine and instruct the BIOS/UEFI to boot-up from the DVD/USB by pressing a special function key (usually F12, F10 or F2 depending on the vendor specifications).
* When you install Ubuntu, you need to select a separate `boot` drive for dual boot systems. Usually you just need to pick the drive `efi` as `boot` drive. Your drives will be supposed to format with GPT as following structure.
```ini
sda
+----sda1 nfts 500M Windows recovery
+----sda2 efi /boot 100M grub2 , Windows boot manager
+----sda3 / 10M
+----sda4 ntfs / 40000M Window 7/8/10
+----sda5 swap <Double size of your RAM size>
+----sda6 ext4 / 20000M Ubuntu 14 desktop
+----sda7 ext4 / 20000M Fedora 20 desktop
+----sda8 ext4 / 20000M CentOS 6 desktop
+----sda9 ext4 / 20000M OpenSuse desktop
+ ...
```
## Troubleshooting: Windows always boots first
**Change BIOS**
* Start your PC by pressing a pressing a special function key (usually F12, F10 or F2 depending on the vendor specifications).
* Some PC's BIOS has `BOOT` tab option, open the `BOOT` tab, you will find the `OS Boot Manager`. It is the simplest way to fix the issue. If your PC's BIOS has no such setting feature, you need to check the next section.
**Change the Windows Boot Manager**
* Login windows with common prompt
* Restart windows, meanwhile press shift key
* In the options page, choose change to other options
* Troubleshooting
* Command Prompt
* Login in Windows with Common prompt
* Use BCDEdit to change windows boot manager. Change to boot ubuntu at first
```bash
REM backup
bcdedit /enum > X:\Users\public\documents\bcdedit.txt
REM change the bootmgr
bcdedit /set {bootmgr} path \EFI\ubuntu\grubx64.efi
```
* After you reboot system, you will see the Grub 2 menu as follow.
```bash
GNU GRUB version 2.0
----------------------------------------------------------------------------------
| Ubuntu
| Advanced options for Unbuntu
| Windows Boot Manager ( on /dev/sda2 )
| Fedora 20
| Advanced options for Fedora 20
| OpenSuse
| Advanced options for OpenSuse
| ....
```
<file_sep>+++
title = "Getting started"
description="Golang Introduction: Basic Command, Assignment operator,Array, Slice ..."
weight=1
+++
## Go Introduction
> Golang's popularity is skyrocketing. The thriving of Docker and Kubernetes push the Golang to a higher level.
> Go is easy to become functional with and appropriate for junior developers to work on. Also, having a language that encourages readability and comprehension is extremely useful. The mixture of duck typing (via interfaces) and convenience features such as ":=" for short variable declarations give Go the feel of a dynamically typed language while retaining the positives of a strongly typed one.
> Go's native concurrency is a boon for network applications that live and die on concurrency. Go is an explicitly engineered programming language, specifically designed with these new requirements in mind. Written expressly for the cloud, Go has been growing in popularity because of its mastery of concurrent operations and the beauty of its construction.
### Purpose
* This Golang notes will be different from other language's note. It will include more basic Golang stuff comparing with other languages.
* Including more basic stuff, but it doesn't mean I will go through the basic types, conditions, etc. Golang website has done excellent job to explain all these clearly, including code sample as well.
* As a ployglot developer, I shift between strong type language and dynamic type language depends on projects in the last decade. My programming paradigm has been mixed, and that is why I have notes to remind myself the best practices for different languages.
* As I mentioned above, I will only highlight the feature and best practice of Golang different from other languages I am familiar with.
### Basic Command
* Install Golang
* Setup the environment variables properly
GOOS=linux
GOROOT=/usr/local/go
GOARCH=amd64
GOPATH=/home/<user_account>/ws/go
GOBIN=/home/<user_account>/ws/go/bin
* You can find the Golang helloworld program from the home page
* Save the program to the location `$GOPATH/src`
* Build, Run & Install the helloworld program `hello.go`
```bash
# Navigate to hello.go location
cd $GOPATH/src
# Build the hello.go
go build hello.go
# Run the executable program
./hello.go
# Build and Run in one command
go run hello.go
# Install the program
go install hello.go
hello
```
### Assignment Operator
* The `:=` operator effectively makes a new variable; it is also called an initializing declaration.This is the preferred form, but it can only be used inside functions, not in package scope.
```go
// Multiple declarations on a single line
a, b, c := 5, 7, “abc”
```
### Parallel or Simultaneous assignment
* There is no need to make a swap function in Go
```go
// Following code perform a swap function
a,b = b,a
```
### Powerful blank variable
* The blank identifier _ can also be used to throw away values. _ is in effect a write-only variable, you cannot ask for its value. It exists because a declared variable in Go must also be used, and sometimes you don’t need to use all return values from a function.
```go
_, b = "abc", 7
// A function return val and error, but error can be ignored.
val, _ = FuncReturnValAndErr(' Ignore the return error ')
```
### Array & Slice
#### Array
> An array is a numbered and fixed-length sequence of data items (elements) of the same single type; The length must be a constant expression, that must evaluate to a non-negative integer value.
* Create an array variable
```go
var arr1 [5]int;
for i := range arr1 {
fmt.Printf(" index = %d , val = %d \n", i, arr1[i])
}
// ----------- Output ---------------
// index = 0 , value = 0
// index = 1 , value = 0
// index = 2 , value = 0
// index = 3 , value = 0
// index = 4 , value = 0
```
* Create an arrary with Array literal
```go
// Print out the array below will get the same output as arr1 above
var arr2 = [...]int{0,0,0,0,0};
var arr3 = []int{0,0,0,0,0}; // This is a slice instead of array
```
#### Slice
* A slice is a reference to a contiguous segment(section) of an array (which we will call the underlying array, and which is usually anonymous), so a slice is a reference type (thus more akin to the array type in C/C++, or the list type in Python).
```go
arr := [5]int {1,2,3,4,5}
slice := arr[0:2]
fmt.Printf( " slice = %v \n", slice)
slice = arr[0:]
fmt.Printf( " slice = %v \n", slice)
slice = arr[:2]
fmt.Printf( " slice = %v \n", slice)
slice = arr[1:5]
fmt.Printf( " slice = %v \n", slice)
slice = arr[:5]
fmt.Printf( " slice = %v \n", slice)
// ---- output -----
// slice = [1 2]
// slice = [1 2 3 4 5]
// slice = [1 2]
// slice = [2 3 4 5]
// slice = [1 2 3 4 5]
```
#### Re-slicing
* Changing the length of the slice is called re-slicing, it is done e.g. like: `slice1 = slice1[0:end]` where end is another end-index (length) than before.
* Resizing a slice by 1 can be done as follows: `sl = sl[0:len(sl)+1] // extend length by 1`
* A slice can be resized until it occupies the whole underlying array.
#### Copy and append slices
```go
package main
import “fmt”
func main() {
sl_from := []int{1,2,3}
sl_to := make([]int,10)
n := copy(sl_to, sl_from)
fmt.Println(sl_to)
// output: [1 2 3 0 0 0 0 0 0 0]
fmt.Printf("Copied %d elements\n", n) // n == 3
sl3 := []int{1,2,3}
sl3 = append(sl3, 4, 5, 6)
fmt.Println(sl3)
}
```
### make() and new()
* new(T) allocates zeroed storage for a new item of type T and returns its address, a value of type *T: it returns a pointer to a newly allocated zero value of type T, ready for use; it applies to value types like arrays and structs; it is equivalent to &T{ }
* make(T) returns an initialized value of type T; it applies only to the 3 built-in reference types: slices, maps and channels.
* In other words, new allocates; make initializes.
```go
var p *[]int = new([]int) // *p == nil; with len and cap 0
// or use assignment operator
p := new([]int)
var v []int = make([]int, 10, 50)
// or use assignment operator
v := make([]int, 10, 50)
```
### Good practices
#### Use Buffer to concat string
* Go has a package bytes with manipulation functions for that kind of type Buffer. The sample below is much more memory and CPU-efficient than +=, especially if the number of strings to concatenate is large.
```go
var buffer bytes.Buffer
for {
if s, ok := getNextString(); ok { //method getNextString() not shown here
buffer.WriteString(s)
} else {
break
}
}
fmt.Print(buffer.String(), “\n”)
```
#### Substr
Slice makes substringing more easier than other language. `subStr := str[start:end]`
#### Change a character in string
* To do this you first have to convert the string to an array of bytes, then an array-item of a certain index can be changed, and then the array must be converted back to a new string.
```go
str := "golang"
chars:= []byte(str)
chars[1] = '0'
newStr := string(chars) // g0lang
```
<file_sep>+++
title = "DigitialOcean: Droplet"
description="Droplet Introduction"
weight=1
+++
## Droplet
DigitalOcean Droplets are Linux-based virtual machines (VMs) that run on top of virtualized hardware. Each Droplet you create is a new server you can use, either standalone or as part of a larger, cloud-based infrastructure.
### Prerequisite
* Prepare a bank account or credit card
* Signup DigitalOcean account and activate it
* Create a new prject
### OS Options
* Ubuntu
* FreeBSD
* Fedora
* Debian
* CentOS
### Plans and Pricing
We offer four different kinds of Droplet plans: one shared CPU plan and three dedicated CPU plans.
Droplet Plan| CPU | Range of Resources | RAM-to-CPU Ratio| Processor
----|----|----|----|----
Standard | Shared | 1 - 32 vCPUs 1 - 192 GB | RAM Variable |
General | Purpose Dedicated| 2 - 40 vCPUs 8 - 160 GB RAM | 4 GB per vCPU | Intel Xeon Skylake (2.7 GHz, 3.7 GHz turbo)
CPU-Optimized | Dedicated | 2 - 32 vCPUs| 4 - 64 GB RAM 2 GB per vCPU | Intel Xeon Broadwell (2.6 GHz) Intel Xeon Skylake (2.7 GHz, 3.7 GHz turbo)
Memory-Optimized |Dedicated| 2 - 32 vCPUs| 16 - 256 GB RAM 8 GB per vCPU |
### Create a droplet
* Create a droplet
* Choose the operating system you prefer
* Pick up the CPU and RAM you need.
* The lowest one is 1 vCPU and 1G RAM
* The pricing for above setting is $5/month
* Choose the region your droplet will install
* Always use SSH
* Create a new SSH key via `ssh-keygen`
* Add the publc key to your droplet
* Click the button "Create Droplet"
### Access Droplet
#### Get Public IP
> After you create your droplet, you will see a list of droplests as follow
Name | IP Address | Created | Tags
----|----|---|---
droplet-name | xxx.xxx.xxx.xxx | 10 mins ago |
#### Login via SSH
```
ssh -i ~/.ssh/<your_droplet_rsa> root@<xxx.xxx.xxx.xxx>
```
#### Congrats
* Your droplet is up and running
* Next we will continue the setup droplet as web server.<file_sep>+++
title = "Project, Vector, String & Hashmap"
description="Rustlang Introduction: Project management, Vector, String and Hashmap"
weight = 2
+++
### Project management
* Rust has a number of features that allow you to manage your code’s organization, including which details are exposed, which details are private, and what names are in each scope in your programs.
* Packages: A Cargo feature that lets you build, test, and share crates
* Crates: A tree of modules that produces a library or executable
* Modules and use: Let you control the organization, scope, and privacy of paths
* Paths: A way of naming an item, such as a struct, function, or module
#### Package & Crate
* A package is one or more crates that provide a set of functionality. A package contains a Cargo.toml file that describes how to build those crates.
* A crate will group related functionality together in a scope so the functionality is easy to share between multiple projects.
#### Module
* Modules let us organize code within a crate into groups for readability and easy reuse. Modules also control the privacy of items, which is whether an item can be used by outside code (public) or is an internal implementation detail and not available for outside use (private).
#### Path
* A path can take two forms:
* An absolute path starts from a crate root by using a crate name or a literal crate.
* A relative path starts from the current module and uses self, super, or an identifier in the current module.
### Vector
* Vectors allow you to store more than one value in a single data structure that puts all the values next to each other in memory.
```rs
let v = vec![1, 2, 3, 4, 5];
let third: &i32 = &v[2];
println!("The third element is {}", third);
match v.get(2) {
Some(third) => println!("The third element is {}", third),
None => println!("There is no third element."),
}
```
#### Iterating over the Values
* Immutable
```rs
let v = vec![100, 32, 57];
for i in &v {
println!("{}", i);
}
```
* Mutable
```rs
let mut v = vec![100, 32, 57];
for i in &mut v {
*i += 50;
}
```
### String
* Rust has only one string type in the core language, which is the string slice str that is usually seen in its borrowed form &str.
* The String type, which is provided by Rust’s standard library rather than coded into the core language, is a growable, mutable, owned, UTF-8 encoded string type. When Rustaceans refer to “strings” in Rust, they usually mean the String and the string slice &str types, not just one of those types.
* Rust’s standard library also includes a number of other string types, such as OsString, OsStr, CString, and CStr. Library crates can provide even more options for storing string data.
#### New string
* use new function
```rs
let mut s = String::new();
```
#### Updating a String
* A String can grow in size and its contents by using the push_str method to append a string slice.
* The push method takes a single character as a parameter and adds it to the String.
```rs
let mut s1 = String::from("foo");
let s2 = "bar";
s1.push_str(s2);
println!("s1 is {}", s1);
println!("s2 is {}", s2);
let mut s3 = String::from("lo");
s3.push('l');
println!("s3 is {}", s3);
```
* Concatenation with the + Operator or the format! Macro
```rs
let s1 = String::from("Hello, ");
let s2 = String::from("world!");
let s3 = s1 + &s2; // note s1 has been moved here and can no longer be used
```
* Iterates char of string
```rs
for c in "नमस्ते".chars() {
println!("{}", c);
}
for b in "नमस्ते".bytes() {
println!("{}", b);
}
```
#### Strings Are Not So Simple
* To summarize, strings are complicated. Different programming languages make different choices about how to present this complexity to the programmer. Rust has chosen to make the correct handling of String data the default behavior for all Rust programs, which means programmers have to put more thought into handling UTF-8 data upfront.
### Hashmap
* The type HashMap<K, V> stores a mapping of keys of type K to values of type V. It does this via a hashing function, which determines how it places these keys and values into memory.
* Hash maps are useful when you want to look up data not by using an index, as you can with vectors, but by using a key that can be of any type.
#### New hashmap
* create an empty hash map with new and add elements with insert.
```rs
use std::collections::HashMap;
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
```
* Another way of constructing a hash map is by using the collect method on a vector of tuples, where each tuple consists of a key and its value. The collect method gathers data into a number of collection types, including HashMap.
```rs
use std::collections::HashMap;
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
let scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
```
#### Hash Maps and Ownership
* For types that implement the Copy trait, like i32, the values are copied into the hash map. For owned values like String, the values will be moved and the hash map will be the owner of those values.
```rs
use std::collections::HashMap;
let field_name = String::from("Favorite color");
let field_value = String::from("Blue");
let mut map = HashMap::new();
map.insert(field_name, field_value);
println!("field_name: {} field_value: {}", field_name, field_value);
// error[E0382]: borrow of moved value: `field_name`
// error[E0382]: borrow of moved value: `field_value`
```
#### Accessing Values in a Hash Map
* get a value out of the hash map by providing its key to the get method
```rs
use std::collections::HashMap;
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
for (key, value) in &scores {
println!("{}: {}", key, value);
}
```
#### Updating a Hash Map
__Overwrites value__
If we insert a key and a value into a hash map and then insert that same key with a different value, the value associated with that key will be replaced.
```rs
use std::collections::HashMap;
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Blue"), 25);
println!("{:?}", scores);
```
__Only Inserting a Value If the Key Has No Value__
* It’s common to check whether a particular key has a value and, if it doesn’t, insert a value for it. Hash maps have a special API for this called entry that takes the key you want to check as a parameter. The return value of the entry method is an enum called Entry that represents a value that might or might not exist.
```rs
#![allow(unused_variables)]
fn main() {
use std::collections::HashMap;
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.entry(String::from("Yellow")).or_insert(50);
scores.entry(String::from("Blue")).or_insert(50);
println!("{:?}", scores);
// {"Blue": 10, "Yellow": 50}
}
```
__Updating a Value Based on the Old Value__
Another common use case for hash maps is to look up a key’s value and then update it based on the old value.
```rs
use std::collections::HashMap;
let text = "hello world wonderful world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0);
*count += 1;
}
println!("{:?}", map);
```
### Hashing Functions
* By default, HashMap uses a “cryptographically strong”1 hashing function that can provide resistance to Denial of Service (DoS) attacks. This is not the fastest hashing algorithm available, but the trade-off for better security that comes with the drop in performance is worth it.
* A hasher is a type that implements the BuildHasher trait.<file_sep>+++
title = "F# Computations 2"
description = "F# Computation - Bind, Return, Map & Apply"
weight = 11
+++
## Map
Common Names: map, fmap, lift, Select
Common Operators: `<$> <!>`
What it does: Lifts a function into the elevated world
Signature: `(a->b) -> E<a> -> E<b>`.
Alternatively with the parameters reversed: `E<a> -> (a->b) -> E<b>`
Description
“map” is the generic name for something that takes a function in the normal world and transforms it into a corresponding function in the elevated world.
An alternative interpretation of map is that it is a two parameter function that takes an elevated value `E<a>` and a normal function `a->b`, and returns a new elevated value `E<b>` generated by applying the function a->b to the internal elements of `E<a>`.
```fsharp
/// map for Options
let mapOption f opt =
match opt with
| None ->
None
| Some x ->
Some (f x)
// has type : ('a -> 'b) -> 'a option -> 'b option
/// map for Lists
let rec mapList f list =
match list with
| [] ->
[]
| head::tail ->
// new head + new tail
(f head) :: (mapList f tail)
// has type : ('a -> 'b) -> 'a list -> 'b list
```
## Return
Common Names: return, pure, unit, yield, point
Common Operators: None
What it does: Lifts a single value into the elevated world
Signature: `a -> E<a>`
Description
“return” (also known as “unit” or “pure”) simply creates an elevated value from a normal value.
This function goes by many names, but I’m going to be consistent and call it return as that is the common term for it in F#, and is the term used in computation expressions.
```fsharp
// A value lifted to the world of Options
let returnOption x = Some x
// has type : 'a -> 'a option
// A value lifted to the world of Lists
let returnList x = [x]
// has type : 'a -> 'a list
```
## Apply
Common Names: apply, ap
Common Operators: <*>
What it does: Unpacks a function wrapped inside an elevated value into a lifted function `E<a> -> E<b>`
Signature: `E<(a->b)> -> E<a> -> E<b>`
Description
“apply” unpacks a function wrapped inside an elevated value (E<(a->b)>) into a lifted function `E<a> -> E<b>`
An alternative interpretation of apply is that it is a two parameter function that takes an elevated value `(E<a>)` and an elevated function `(E<(a->b)>)`, and returns a new elevated value `(E<b>)` generated by applying the function `a->b` to the internal elements of `E<a>`.
For example, if you have a one-parameter function `(E<(a->b)>)`, you can apply it to a single elevated parameter to get the output as another elevated value.
If you have a two-parameter function `(E<(a->b->c)>)`, you can use apply twice in succession with two elevated parameters to get the elevated output.
```fsharp
module Option =
// The apply function for Options
let apply fOpt xOpt =
match fOpt,xOpt with
| Some f, Some x -> Some (f x)
| _ -> None
module List =
// The apply function for lists
// [f;g] apply [x;y] becomes [f x; f y; g x; g y]
let apply (fList: ('a->'b) list) (xList: 'a list) =
[ for f in fList do
for x in xList do
yield f x ]
let add x y = x + y
let resultOption =
let (<*>) = Option.apply
(Some add) <*> (Some 2) <*> (Some 3)
// resultOption = Some 5
let resultList =
let (<*>) = List.apply
[add] <*> [1;2] <*> [10;20]
// resultList = [11; 21; 12; 22]
```
## Apply vs. Map
The combination of apply and return is considered “more powerful” than map, because if you have apply and return, you can construct map from them, but not vice versa.
Here’s how it works: to construct a lifted function from a normal function, just use return on the normal function and then apply. This gives you the same result as if you had simply done map in the first place.
```fsharp
let resultOption2 =
let (<!>) = Option.map
let (<*>) = Option.apply
add <!> (Some 2) <*> (Some 3)
// resultOption2 = Some 5
let resultList2 =
let (<!>) = List.map
let (<*>) = List.apply
add <!> [1;2] <*> [10;20]
// resultList2 = [11; 21; 12; 22]
let batman =
let (<!>) = List.map
let (<*>) = List.apply
// string concatenation using +
(+) <!> ["bam"; "kapow"; "zap"] <*> ["!"; "!!"]
// result =
// ["bam!"; "bam!!"; "kapow!"; "kapow!!"; "zap!"; "zap!!"]
```
## Zip, ZipList
Common Names: zip, zipWith, map2
Common Operators: `<*>` (in the context of ZipList world)
What it does: Combines two lists (or other enumerables) using a specified function
Signature: `E<(a->b->c)> -> E<a> -> E<b> -> E<c>` where E is a list or other enumerable type, or `E<a> -> E<b> -> E<a,b>` for the tuple-combined version.
Description
Some data types might have more than one valid implementation of apply. For example, there is another possible implementation of apply for lists, commonly called ZipList or some variant of that.
In this implementation, the corresponding elements in each list are processed at the same time, and then both lists are shifted to get the next element. That is, the list of functions [f; g] applied to the list of values [x; y] becomes the two-element list [f x; g y]
```fsharp
// alternate "zip" implementation
// [f;g] apply [x;y] becomes [f x; g y]
let rec zipList fList xList =
match fList,xList with
| [],_
| _,[] ->
// either side empty, then done
[]
| (f::fTail),(x::xTail) ->
// new head + new tail
(f x) :: (zipList fTail xTail)
// has type : ('a -> 'b) -> 'a list -> 'b list
```
## Bind
Called for let! and do! in computation expressions.
```
M<'T> * ('T -> M<'U>) -> M<'U>
```
The `let!` keyword binds the result of a call to another computation expression to a name. let! is defined by the Bind(x, f) member on the builder type.
The `do!` keyword is for calling a computation expression that returns a unit-like type (defined by the Zero member on the builder)
- Bind is also actually functional concepts than functions
- Bind is about lifting a function input that to an "Elavated" world
- Bind is similar to Linq's SelectMany method
Common Names: bind, flatMap, andThen, collect, SelectMany
Common Operators: `>>=` (left to right), `=<<` (right to left )
What it does: Allows you to compose world-crossing (“monadic”) functions
Signature: `(a->E<b>) -> E<a> -> E<b>`
Alternatively with the parameters reversed: `E<a> -> (a->E<b>) -> E<b>`
Description
We frequently have to deal with functions that cross between the normal world and the elevated world.
An alternative interpretation of bind is that it is a two parameter function that takes a elevated value `E<a>` and a “monadic function” `a -> E<b>`, and returns a new elevated value `E<b>` generated by “unwrapping” the value inside the input, and running the function `a -> E<b>` against it.
For example: a function that parses a string to an int might return an `Option<int>` rather than a normal int, a function that reads lines from a file might return `IEnumerable<string>`, a function that fetches a web page might return Async<string>, and so on.
These kinds of “world-crossing” functions are recognizable by their signature `a -> E<b>`; their input is in the normal world but their output is in the elevated world. Unfortunately, this means that these kinds of functions cannot be linked together using standard composition.
```fsharp
type Int32 with
static member ParseAsOption str =
match Int32.TryParse (str:string) with
| false, _ -> None
| true, x -> Some x
let bindOption f opt =
match opt with
| Some x -> f x
| None -> None
let joinOption opt =
match opt with
| Some innerOpt -> innerOpt
| None -> None
let bindOption2 f opt = joinOption (Option.map f opt)
let input1 = Some "abcd" |> bindOption Int32.ParseAsOption
printfn "%A" input1
// None
let input2 = Some "100" |> bindOption2 Int32.ParseAsOption
printfn "%A" input3
// Some 100
let input3 = Some "200" |> Option.map Int32.ParseAsOption
printfn "%A" input3
// Some (Some 200)
let (>>=) m f = Option.bind f m
let add x y = x + y
let liftedAdd = Some add
let apply f m =
match f, m with
| Some f, Some m -> Some(f m)
| _ -> None
let (<*>) = apply
let input4 =
liftedAdd <*>
(Some "100" >>= Int32.ParseAsOption) <*>
(Some "200" >>= Int32.ParseAsOption)
printfn "%A" input4
```
<file_sep>+++
title="AWS: Labs"
description="Labs - "
weight=90
draft=true
+++
## Labs 1
AWS has tons of services available on its resources panel. By reading the document or watching the online tutorial is a simple way to learn key concepts, but only the hands-on approach can guarantee you really master the knowledge and become a known-how AWS expert.
### Prerequisite
* Have an AWS account
* Install AWS Cli tool
* Under how to use AWS Cli
* Most of labs below are executed with AWS Cli (Version 1)
### Upgrade the AWS Cli to latest version
```bash
# Mac OS
pip3 install awscli --upgrade --user
```
### TODO - Lab 1
* Create a VPC - 10.100.0.0/16
* Create a private subnet and a public subnet
* Create a wp site with mysql db
* Test the wp
```
VPC=<vpc_name>
aws
## Attach IGW
aws ec2 attach-internet-gateway \
--vpc-id "vpc-0d6d50e43fbfb23ff" \
--internet-gateway-id "igw-00da6dd9fc8648a25" \
--region ap-southeast-2
```
### TODO - Lab 2
* Move EC2 from VPC 1 to VPC 2
```bash
aws ec2 create-image --instance-id i-04f9dcfadd1767427 --name "NSW_PROD_REDIRECT_Last" --description "NSW_PROD_REDIRECT_Last_Large"
```
<file_sep>+++
title = "Rustlang"
description = "Rustlang Notes"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "AWS: S3 - 3"
description = "How to allow specific VPC endpoints or IP addresses to access S3 bucket"
weight=7
+++
## Use Case
### Problem
Block all traffic to my Amazon Simple Storage Service (Amazon S3) bucket unless the traffic is from specific Amazon Virtual Private Cloud (VPC) endpoints or certain external IP addresses.
### Resolution
Use a bucket policy to specify which VPC endpoints or external IP addresses can access the S3 bucket.
> Note: An external IP address is a public IP address that can be from within a VPC or outside of a VPC. For example, an external IP address can be an Amazon Elastic Compute Cloud (Amazon EC2) instance's Elastic IP address, or the IP address of a VPC's NAT gateway or proxy server.
For example, the following bucket policy blocks traffic to the bucket unless the request is from specified VPC endpoints (aws:sourceVpce) or external IP addresses (aws:SourceIp). Note the following:
* To use this policy with the aws:sourceVpce condition, you must have a VPC endpoint for Amazon S3 attached to the route table of the EC2 instance's subnet. The VPC endpoint must be in the same AWS Region as the bucket.
* To allow users to perform S3 actions on the bucket from the VPC endpoints or IP addresses, you must explicitly allow the user-level permissions. You can explicitly allow user-level permissions on either an AWS Identity and Access Management (IAM) policy or another statement in the bucket policy.
__Warning__: This example bucket policy explicitly denies access to any requests outside the allowed VPC endpoints or IP addresses.
```json
{
"Version": "2012-10-17",
"Id": "VPCe and SourceIP",
"Statement": [{
"Sid": "VPCe and SourceIP",
"Effect": "Deny",
"Principal": "*",
"Action": "s3:*",
"Resource": [
"arn:aws:s3:::awsexamplebucket",
"arn:aws:s3:::awsexamplebucket/*"
],
"Condition": {
"StringNotEquals": {
"aws:sourceVpce": [
"vpce-1111111",
"vpce-2222222"
]
},
"NotIpAddress": {
"aws:SourceIp": [
"11.11.11.11/32",
"22.22.22.22/32"
]
}
}
}]
}
```
Allow specific users (within the same AWS account) access to the bucket even if the users aren't sending requests from the allowed VPC endpoints or IP addresses
```json
"StringNotLike": {
"aws:userId": [
"AROAEXAMPLEID:*",
"AIDAEXAMPLEID",
"111111111111"
]
}
```
* AROAEXAMPLEID is the role ID of an IAM role that you want to allow
* AIDAEXAMPLEID is the user ID of an IAM user that you want to allow
* 111111111111 is the AWS account ID of the bucket, which represents the account's root credentials<file_sep>+++
title = "C# Console App"
description = "How to create C# console application without Visual Studio"
weight=10
+++
> __C# is an elegant and type-safe object-oriented language that enables developers to build a variety of secure and robust applications that run on the .NET Framework. C# syntax is highly expressive, yet it is also simple and easy to learn. The curly-brace syntax of C# will be instantly recognizable to anyone familiar with C, C++ or Java.__
Here I am going to demonstrate how to create simple .net project without Visual Studio
### Prerequisites
* .Net Framework has been install to your PC or laptop, and the version of .Net Framework on your PC is 2.0+ or later
* Assume the path of .Net Frameowork is __c:\Windows\Microsfot.Net\Frameowork\v2.0.50727__
### Create a project
* Create a project named `csharp-project`
```bat
md csharp-project
cd csharp-project
md bin src
echo.>csharp-project.proj
echo.>src\helloworld.cs
```
### Update config file
* Update config file `csharp-project.proj` as below
```xml
<Project DefaultTargets = "Compile"
xmlns="http://schemas.microsoft.com/developer/msbuild/2003" >
<!-- Set the application name as a property -->
<PropertyGroup>
<appname>csharp-app</appname>
</PropertyGroup>
<!-- Specify the inputs by type and file name -->
<ItemGroup>
<CSFile Include = "src\helloworld.cs"/>
</ItemGroup>
<Target Name = "Compile">
<!-- Run the Visual C# compilation using input files of type CSFile -->
<CSC
Sources = "@(CSFile)"
OutputAssembly = "bin\$(appname).exe">
<!-- Set the OutputAssembly attribute of the CSC task
to the name of the executable file that is created -->
<Output
TaskParameter = "OutputAssembly"
ItemName = "EXEFile" />
</CSC>
<!-- Log the file name of the output file -->
<Message Text="The output file is @(EXEFile)"/>
</Target>
</Project>
```
### Create the main program
* Create a new file named `HelloWorld.cs` and copy following coding to the new file
```csharp
public class Hello
{
public static void Main()
{
System.Console.WriteLine("Hello, World!");
}
}
```
### Run the console app
* Compile with MSBuild & Run
```bat
c:\Windows\Microsfot.Net\Frameowork\v2.0.50727\MSBuild
bin\csharp-app.exe
```
<file_sep>+++
title = "AWS: EKS - 4"
description = "VPC, Networking"
weight=14
+++
## EKS - Part 4
### VPC Tagging
* Key: The <cluster-name> value matches your Amazon EKS cluster's name.
* Value: The shared value allows more than one cluster to use this VPC.
Key | Value
---|----
kubernetes.io/cluster/\<cluster-name\>| shared
### Load Balancing
Amazon EKS supports the Network Load Balancer and the Classic Load Balancer for pods running on Amazon EC2 instance worker nodes through the Kubernetes service of type LoadBalancer. Classic Load Balancers and Network Load Balancers are not supported for pods running on AWS Fargate (Fargate).
* All subnets (public and private) should have this tag.
Key | Value
---|----
kubernetes.io/cluster/\<cluster-name\>| shared
* Public subnet tagging
Key | Value
---|----
kubernetes.io/role/elb| 1
* Private subnet tagging
Key | Value
---|----
kubernetes.io/role/internal-elb| 1
### ALB Ingress Controller
The AWS ALB Ingress Controller for Kubernetes is a controller that triggers the creation of an Application Load Balancer (ALB) and the necessary supporting AWS resources whenever an Ingress resource is created on the cluster with the kubernetes.io/ingress.class: alb annotation. The Ingress resource configures the ALB to route HTTP or HTTPS traffic to different pods within the cluster. The ALB Ingress Controller is supported for production workloads running on Amazon EKS clusters.
* To ensure that your Ingress objects use the ALB Ingress Controller, add the following annotation to your Ingress specification.
annotations:
kubernetes.io/ingress.class: alb
* Create an IAM OIDC provider and associate it with your cluster.
CLUSTER_NAME="pg-prd"
REGION_CODE="ap-southeast-2"
eksctl utils associate-iam-oidc-provider \
--region ${REGION_CODE} \
--cluster ${CLUSTER_NAME} \
--approve
* Create an IAM policy called __ALBIngressControllerIAMPolicy__ for the ALB Ingress Controller pod that allows it to make calls to AWS APIs on your behalf.
aws iam create-policy \
--policy-name ALBIngressControllerIAMPolicy \
--policy-document https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.4/docs/examples/iam-policy.json
* Create a Kubernetes service account named __alb-ingress-controller__ in the kube-system namespace, a cluster role, and a cluster role binding for the ALB Ingress Controller to use with the following command.
kubectl apply -f https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.4/docs/examples/rbac-role.yaml
* Create an IAM role for the ALB ingress controller and attach the role to the service account created in the previous step.
CLUSTER_NAME="pg-prd"
REGION_CODE="ap-southeast-2"
eksctl create iamserviceaccount \
--region ${REGION_CODE} \
--name alb-ingress-controller \
--namespace kube-system \
--cluster ${CLUSTER_NAME} \
--attach-policy-arn arn:aws:iam::202756970286:policy/ALBIngressControllerIAMPolicy \
--override-existing-serviceaccounts \
--approve
* Deploy the ALB Ingress Controller
kubectl apply -f https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.4/docs/examples/alb-ingress-controller.yaml
kubectl get clusterroles | grep aws-alb-ingress-controller
* Add a line for the cluster name after the __--ingress-class=alb__ line.
spec:
containers:
- args:
- --ingress-class=alb
- --cluster-name=prod
- --aws-vpc-id=vpc-03468a8157edca5bd
- --aws-region=region-code
* Log the ingress controller
kubectl logs -n kube-system deployment.apps/alb-ingress-controller
* Deploy a sample application
kubectl apply -f https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.4/docs/examples/2048/2048-namespace.yaml
kubectl apply -f https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.4/docs/examples/2048/2048-deployment.yaml
kubectl apply -f https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.4/docs/examples/2048/2048-service.yaml
kubectl apply -f https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.4/docs/examples/2048/2048-ingress.yaml
* Play the game on browser
http://07f34453-2048game-2048ingr-6fa0-1986376393.ap-southeast-2.elb.amazonaws.com/
<file_sep>+++
title = "NeoVim"
description = "Empower the vi with NeoVimn"
+++
__This article will show you how to empower your vi with NeoVim.__
## NeoVim
> [Neovim](https://github.com/neovim/neovim) is a refactor, and sometimes redactor, in the tradition of Vim (which itself derives from Stevie). It is not a rewrite but a continuation and extension of Vim. Many clones and derivatives exist, some very clever—but none are Vim. Neovim is built for users who want the good parts of Vim, and more.
### Install Neovim
Please follow the installation instructions [here](https://github.com/neovim/neovim/wiki/Installing-Neovim)
### Vim Plugin
Please follow the installation instructions from [Vim-Plug's README](https://github.com/neovim/neovim)https://github.com/junegunn/vim-plug
### Init
- Create a file `init.vim`
```lua
:set number
:set relativenumber
:set autoindent
:set tabstop=4
:set shiftwidth=4
:set smarttab
:set softtabstop=4
:set mouse=a
call plug#begin()
Plug 'http://github.com/tpope/vim-surround' " Surrounding ysw)
Plug 'https://github.com/preservim/nerdtree' " NerdTree
Plug 'https://github.com/tpope/vim-commentary' " For Commenting gcc & gc
Plug 'https://github.com/vim-airline/vim-airline' " Status bar
Plug 'https://github.com/lifepillar/pgsql.vim' " PSQL Pluging needs :SQLSetType pgsql.vim
Plug 'https://github.com/ap/vim-css-color' " CSS Color Preview
Plug 'https://github.com/rafi/awesome-vim-colorschemes' " Retro Scheme
Plug 'https://github.com/neoclide/coc.nvim' " Auto Completion
Plug 'https://github.com/ryanoasis/vim-devicons' " Developer Icons
Plug 'https://github.com/tc50cal/vim-terminal' " Vim Terminal
Plug 'https://github.com/preservim/tagbar' " Tagbar for code navigation
Plug 'https://github.com/terryma/vim-multiple-cursors' " CTRL + N for multiple cursors
Plug 'https://github.com/EdenEast/nightfox.nvim' " A highly customizable theme
Plug 'ctrlpvim/ctrlp.vim'
Plug 'kassio/neoterm'
set encoding=UTF-8
call plug#end()
nnoremap <C-f> :NERDTreeFocus<CR>
nnoremap <C-n> :NERDTree<CR>
nnoremap <C-t> :NERDTreeToggle<CR>
nnoremap <C-l> :call CocActionAsync('jumpDefinition')<CR>
nmap <F8> :TagbarToggle<CR>
:set completeopt-=preview " For No Previews
:colorscheme nightfox
let g:NERDTreeDirArrowExpandable="+"
let g:NERDTreeDirArrowCollapsible="~"
" --- Just Some Notes ---
" :PlugClean :PlugInstall :UpdateRemotePlugins
"
" :CocInstall coc-python
" :CocInstall coc-clangd
" :CocInstall coc-snippets
" :CocCommand snippets.edit... FOR EACH FILE TYPE
" air-line
let g:airline_powerline_fonts = 1
if !exists('g:airline_symbols')
let g:airline_symbols = {}
endif
" airline symbols
let g:airline_left_sep = ''
let g:airline_left_alt_sep = ''
let g:airline_right_sep = ''
let g:airline_right_alt_sep = ''
let g:airline_symbols.branch = ''
let g:airline_symbols.readonly = ''
let g:airline_symbols.linenr = ''
inoremap <expr> <Tab> pumvisible() ? coc#_select_confirm() : "<Tab>"
```
- Install plugins
- Open nvim
- Enter command mode and enter `PlugInstall`
### Trouble shooting
#### Fonts
- Mac
- Install Nerd fonts
- [Follow the instruction to install Powerline Nerd Font](https://webinstall.dev/nerdfont/)
- Download other fonts, e.g. Caskaydia Cove Nerd Font
```
cd ~/Library/Fonts
open .
# Copy font files to fonts folder
# Caskaydia Cove Nerd Font Complete Regular.otf
# Caskaydia Cove Nerd Font Complete Mono Regular.otf
```
#### coc.nvim
- Windows pwsh
```powershell
cd <coc.nvim_folder>
sudo nvm use 14.19.3
npm install -g yarn
yarn install
```
- mac
```
cd ~/.local/share/nvim/plugged/coc.nvim
yarn install
```<file_sep>+++
title="AWS: Labs"
description="Labs - "
weight=90
draft=true
+++
## Labs 1
AWS has tons of services available on its resources panel. By reading the document or watching the online tutorial is a simple way to learn key concepts, but only the hands-on approach can guarantee you really master the knowledge and become a known-how AWS expert.
### Prerequisite
* Have an AWS account
* Install AWS Cli tool
* Under how to use AWS Cli
* Most of labs below are tested with AWS Cli
### TODO - Lab 1
* Create a VPC - 10.100.0.0/16
* Create a private subnet and a public subnet
* Create a wp site with mysql db
* Test the wp
```
```
### TODO - Lab 2<file_sep>#![allow(unused_variables)]
fn main() {
let string1 = String::from("abcd");
let string2 = "xyz";
let result = longest(string1.as_str(), string2);
println!("The longest string is {}", result);
}
fn longest<'a>(x: &'a str, y: &'a str) -> &'a str {
if x.len() > y.len() {
x
} else {
y
}
}
// #![allow(unused_variables)]
// fn main() {
// {
// let r;
// {
// let x = 5;
// r = &x;
// }
// println!("r: {}", r);
// }
// }
// #![allow(unused_variables)]
// fn main() {
// enum Coin {
// Penny,
// Nickel,
// Dime,
// Quarter,
// }
// fn value_in_cents(coin: Coin) -> u8 {
// match coin {
// Coin::Penny => 1,
// Coin::Nickel => 5,
// Coin::Dime => 10,
// Coin::Quarter => 25,
// }
// }
// }
// #[derive(Debug)]
// struct Rectangle {
// width: u32,
// height: u32,
// }
// impl Rectangle {
// fn area(&self) -> u32 {
// self.width * self.height
// }
// fn can_hold(&self, other: &Rectangle) -> bool {
// self.width > other.width && self.height > other.height
// }
// }
// fn main() {
// let rect1 = Rectangle {
// width: 30,
// height: 50,
// };
// println!(
// "The area of the rectangle is {} square pixels.",
// rect1.area()
// );
// let rect2 = Rectangle {
// width: 10,
// height: 40,
// };
// let rect3 = Rectangle {
// width: 60,
// height: 45,
// };
// println!("Can rect1 hold rect2? {}", rect1.can_hold(&rect2));
// println!("Can rect1 hold rect3? {}", rect1.can_hold(&rect3));
// }
// fn main() {
// // let x = 5;
// // let x = x + 1;
// // let x = x * 2;
// // println!("The value of x is: {}", x);
// // let a = 8;
// // let b = a;
// // println!(" a = {} ", a )
// let s = String::from("hello");
// let slice1 = &s[0..2];
// let slice2 = &s[..2];
// let slice3 = &s[..];
// println!("{}", slice1 );
// println!( "{}",slice2);
// println!( "{}", slice3 );
// }
<file_sep>+++
date = "2018-12-04T14:59:31+11:00"
title = "Amazon Linux 2 "
description = "Amazon Linux 2 - Setup & Configure"
+++
## Amazon Linux 2
Amazon Linux 2 is the next generation of Amazon Linux, a Linux server operating system from Amazon Web Services (AWS). It provides a secure, stable, and high performance execution environment to develop and run cloud and enterprise applications. With Amazon Linux 2, you get an application environment that offers long term support with access to the latest innovations in the Linux ecosystem. Amazon Linux 2 is provided at no additional charge.
### Package update
sudo yum update
### Get system info
cat /etc/image-id
cat /etc/system-release
### Mount a volume (EBS)
```bash
# Get drives info
suod lsblk
# Get volumen info
sudo file -s /dev/xvdf
# Format volumne
sudo mkfs -t xfs /dev/xvdf
# Mount volume to folder data
sudo mkdir /data
sudo mount /dev/xvdf /data
```
### Extend the EBS
```bash
sudo xfs_growfs -d /data
```
### Auto attached volume
```bash
sudo cp /etc/fstab /etc/fstab.orig
sudo lsblk -o +UUID
# You will see similar output below
# NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT UUID
# xvda 202:0 0 20G 0 disk
# └─xvda1 202:1 0 20G 0 part / e75a1891-3463-448b-8f59-5e3353af90ba
# xvdb 202:16 0 60G 0 disk /data 897b5130-c5c1-4ac9-aae3-699d1eaa9fd5
# Use vim to edit /etc/fstab
sudo vim /etc/fstab
# Add following line
# UUID=897b5130-c5c1-4ac9-aae3-699d1eaa9fd5 /data xfs defaults,nofail 0 2
# Verify the mounting
sudo umount /data
sudo mount -a
lsblk
```
### Add new user
* Add new user without password
```bash
sudo adduser new_user
## Add user without password
sudo adduser new_user --disabled-password
```
* Switch to new user
```bash
sudo su - new_user
```
* Set password for new user
```bash
sudo passwd new_user
```
* Allow the new user to use sudo
```bash
sudo usermod -aG wheel new_user
```
### Install MySql
* Install MySql 5.7
```bash
# Add repo
sudo wget https://dev.mysql.com/get/mysql57-community-release-el7-11.noarch.rpm
sudo yum localinstall mysql57-community-release-el7-11.noarch.rpm
# Install mysql
sudo yum install mysql-community-server
```
* MySql configuration file sits in /etc/my.cnf
* Update data directory
```bash
# backup original one
sudo cp /etc/my.cnf /etc/my.cnf.orig
```
* Use vim to update the data directory
```bash
datadir=/data/mysql
```
#### Start MySql as service
* Install polkit before start the service, otherwise you will get error
```bash
sudo yum install polkit
```
* Enable & Start mysql
```bash
sudo systemctl enable mysqld.service
sudo systemctl start mysqld.service
```
* Find the temporay password created for root in /var/log/mysql.log
```bash
sudo cat /var/log/mysql.log | grep "temporary password"
# output
# [Note] A temporary password is generated for root@localhost: l<C-eX&GW8?m
```
#### Reset root password
sudo mysql_secure_installation
#### Create remote login credentials
```sql
CREATE USER 'user_id'@'localhost' IDENTIFIED BY 'your_secret';
CREATE USER 'user_id'@'%' IDENTIFIED BY 'your_secret';
GRANT ALL ON *.* TO 'user_id'@'localhost';
GRANT ALL ON *.* TO 'user_id'@'%';
```
### Install AWS CLI
```bash
# Install aws cli without sudo
curl "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" -o "awscli-bundle.zip"
unzip awscli-bundle.zip
./awscli-bundle/install -b ~/bin/aws
# Configure cli
aws configure
AWS Access Key ID [None]: <KEY>
AWS Secret Access Key [None]: <KEY>
Default region name [None]: region-code
Default output format [None]: json
```
### Install EKS
```bash
curl --silent --location "https://github.com/weaveworks/eksctl/releases/latest/download/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp
sudo mv /tmp/eksctl /usr/local/bin
ekstool --version
```
### Install kubectl
```bash
curl -o kubectl https://amazon-eks.s3.us-west-2.amazonaws.com/1.15.10/2020-02-22/bin/linux/amd64/kubectl
chmod +x ./kubectl
mkdir -p $HOME/bin && cp ./kubectl $HOME/bin/kubectl && export PATH=$PATH:$HOME/bin
```
### Install EPEL repository
```bash
## Amazon Linux 1 / 2
sudo yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
sudo yum update
## Amazon Linux 2
sudo amazon-linux-extras install epel
```
<file_sep>+++
title = "DB & SQL"
description = "RMDB & SQL Notes"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title="Template"
description="Template"
weight=10
+++
### Template
* Package template implements data-driven templates for generating textual output.
* Templates are executed by applying them to a data structure. Annotations in the template refer to elements of the data structure (typically a field of a struct or a key in a map) to control execution and derive values to be displayed. Execution of the template walks the structure and sets the cursor, represented by a period '.' and called "dot", to the value at the current location in the structure as execution proceeds.
* The input text for a template is UTF-8-encoded text in any format. "Actions"--data evaluations or control structures--are delimited by "{{" and "}}"; all text outside actions is copied to the output unchanged. Except for raw strings, actions may not span newlines, although comments can.
#### Pipelines
* A pipeline is a possibly chained sequence of "commands". A command is a simple value (argument) or a function or method call, possibly with multiple arguments.
#### Example of letter code
```go
func main() {
// Define a template.
const letter = `
Dear {{.Name}},
{{if .Attended}}
It was a pleasure to see you at the wedding.
{{- else}}
It is a shame you couldn't make it to the wedding.
{{- end}}
{{with .Gift -}}
Thank you for the lovely {{.}}.
{{end}}
Best wishes,
Josie
`
// Prepare some data to insert into the template.
type Recipient struct {
Name, Gift string
Attended bool
}
var recipients = []Recipient{
{"<NAME>", "bone china tea set", true},
{"<NAME>", "moleskin pants", false},
{"<NAME>", "", false},
}
// Create a new template and parse the letter into it.
t := template.Must(template.New("letter").Parse(letter))
// Execute the template for each recipient.
for _, r := range recipients {
err := t.Execute(os.Stdout, r)
if err != nil {
log.Println("executing template:", err)
}
}
}
```
#### Glob example
* demonstrate loading a set of templates from a directory
```go
// templateFile defines the contents of a template to be stored in a file, for testing.
type templateFile struct {
name string
contents string
}
func createTestDir(files []templateFile) string {
dir, err := ioutil.TempDir("", "template")
if err != nil {
log.Fatal(err)
}
for _, file := range files {
f, err := os.Create(filepath.Join(dir, file.name))
if err != nil {
log.Fatal(err)
}
defer f.Close()
_, err = io.WriteString(f, file.contents)
if err != nil {
log.Fatal(err)
}
}
return dir
}
func main() {
// Here we create a temporary directory and populate it with our sample
// template definition files; usually the template files would already
// exist in some location known to the program.
dir := createTestDir([]templateFile{
// T0.tmpl is a plain template file that just invokes T1.
{"T0.tmpl", `T0 invokes T1: ({{template "T1"}})`},
// T1.tmpl defines a template, T1 that invokes T2.
{"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
// T2.tmpl defines a template T2.
{"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
})
// Clean up after the test; another quirk of running as an example.
defer os.RemoveAll(dir)
// pattern is the glob pattern used to find all the template files.
pattern := filepath.Join(dir, "*.tmpl")
// Here starts the example proper.
// T0.tmpl is the first name matched, so it becomes the starting template,
// the value returned by ParseGlob.
tmpl := template.Must(template.ParseGlob(pattern))
err := tmpl.Execute(os.Stdout, nil)
if err != nil {
log.Fatalf("template execution: %s", err)
}
}
```
#### Helper example
* demonstrates one way to share some templates and use them in different contexts. In this variant we add multiple driver templates by hand to an existing bundle of templates.
```go
// templateFile defines the contents of a template to be stored in a file, for testing.
type templateFile struct {
name string
contents string
}
func createTestDir(files []templateFile) string {
dir, err := ioutil.TempDir("", "template")
if err != nil {
log.Fatal(err)
}
for _, file := range files {
f, err := os.Create(filepath.Join(dir, file.name))
if err != nil {
log.Fatal(err)
}
defer f.Close()
_, err = io.WriteString(f, file.contents)
if err != nil {
log.Fatal(err)
}
}
return dir
}
func main() {
// Here we create a temporary directory and populate it with our sample
// template definition files; usually the template files would already
// exist in some location known to the program.
dir := createTestDir([]templateFile{
// T1.tmpl defines a template, T1 that invokes T2.
{"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
// T2.tmpl defines a template T2.
{"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
})
// Clean up after the test; another quirk of running as an example.
defer os.RemoveAll(dir)
// pattern is the glob pattern used to find all the template files.
pattern := filepath.Join(dir, "*.tmpl")
// Here starts the example proper.
// Load the helpers.
templates := template.Must(template.ParseGlob(pattern))
// Add one driver template to the bunch; we do this with an explicit template definition.
_, err := templates.Parse("{{define `driver1`}}Driver 1 calls T1: ({{template `T1`}})\n{{end}}")
if err != nil {
log.Fatal("parsing driver1: ", err)
}
// Add another driver template.
_, err = templates.Parse("{{define `driver2`}}Driver 2 calls T2: ({{template `T2`}})\n{{end}}")
if err != nil {
log.Fatal("parsing driver2: ", err)
}
// We load all the templates before execution. This package does not require
// that behavior but html/template's escaping does, so it's a good habit.
err = templates.ExecuteTemplate(os.Stdout, "driver1", nil)
if err != nil {
log.Fatalf("driver1 execution: %s", err)
}
err = templates.ExecuteTemplate(os.Stdout, "driver2", nil)
if err != nil {
log.Fatalf("driver2 execution: %s", err)
}
}
```<file_sep>+++
title = "Shell"
description = "Shell, Batch & Powershell Notes"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
date = "2016-12-07T16:56:21+11:00"
title = "Vue 2 Admin Project"
description="Vue2Admin is a fully responsive admin template that is inspired by AdminLTE"
+++
## Summary
**Vue2Admin** is a fully responsive admin template that is inspired by [AdminLTE](https://almsaeedstudio.com).
## Features
* This template is built-in with Vue 2 at the start.
* It include Vue Resource, Vuex as well.
* The plugins for this project are loaded with script loader.
## Structure of Reetek Vue2Admin
``` ini
path\to\vue2admin
+---build <-// webpack files
+---config <-// configuration of dev or prod environment
+---screenshots
+---src <-// vue components
| +---components
| | +---charts
| | +---dashboard
| | +---forms
| | +---mailbox
| | +---misc
| | +---pages
| | +---tables
| | +---ui
| | \---widget
| \---filters
+---static <-// css, dump data, fonts, image files and plugins
| +---css
| +---data
| +---fonts
| +---img
| | +---credit
| | \---stock
| \---js
| \---plugins <-// plugins for admin dashboard
| +---AdminLTE
| +---bootstrap
| +---bootstrap-slider
| +---bootstrap-wysihtml5
| +---chartjs
| +---ckeditor
| + ...
\---test
+---e2e
\---unit
```
## Screenshots
> 
> 
## Browse [Repository](https://github.com/harryho/vue2admin.git)
<file_sep>+++
title = "AWS: Mesh - 1"
description = "Getting started"
draft="true"
+++
## Mesh
AWS App Mesh is a service mesh based on the Envoy proxy that helps you monitor and control services. App Mesh standardizes how your services communicate, giving you end-to-end visibility into and helping to ensure high-availability for your applications.
#### Create a mesh and virtual service
A service mesh is a logical boundary for network traffic between the services that reside within it.
- A mesh named pgMesh, since all of the services in the scenario are registered to the pgMesh.local namespace.
- A virtual service named svc1.pgMesh.local, since the virtual service represents a service that is discoverable with that name, and you don't want to change your code to reference another name. A virtual service named svc2.pgMesh.local is added in a later step.
aws appmesh create-mesh --mesh-name pgMesh
- Create a virtual service with the create-virtual-service command.
aws appmesh create-virtual-service \
--mesh-name pgMesh \
--virtual-service-name svc1.pgMesh.local \
--spec {}
#### Create a virtual node
A virtual node acts as a logical pointer to an actual service.
Create a virtual node named svcNode1, since one of the virtual nodes represents the actual service named svcNode1. The actual service that the virtual node represents is discoverable through DNS with a hostname of svc1.pgMesh.local. Alternately, you can discover actual services using AWS Cloud Map. The virtual node will listen for traffic using the HTTP/2 protocol on port 80.
* Create a file named create-virtual-node-svc1.json with the following contents:
```json
{
"meshName": "pgMesh",
"spec": {
"listeners": [
{
"portMapping": {
"port": 80,
"protocol": "http2"
}
}
],
"serviceDiscovery": {
"dns": {
"hostname": "svcNode1.pgMesh.local"
}
}
},
"virtualNodeName": "svcNode1"
}
```
* Create the virtual node with the create-virtual-node command using the JSON file as input.
aws appmesh create-virtual-node --cli-input-json file://create-virtual-node-svc1.json
#### Create a virtual router and route
Virtual routers route traffic for one or more virtual services within your mesh.
Create the following resources:
> A virtual router named svcNode1, since the svcNode1.pgMesh.local virtual service doesn't initiate outbound communication with any other service. Remember that the virtual service that you created previously is an abstraction of your actual svc1.pgMesh.local service. The virtual service sends traffic to the virtual router. The virtual router will listen for traffic using the HTTP/2 protocol on port 80. Other protocols are also supported.
> A route named svcNode1. It will route 100 percent of its traffic to the svcNode1 virtual node. You'll change the weight in a later step once you've added the svcNode1v2 virtual node. Though not covered in this guide, you can add additional filter criteria for the route and add a retry policy to cause the Envoy proxy to make multiple attempts to send traffic to a virtual node when it experiences a communication problem.
#### Create a virtual router
Create a file named create-virtual-router.json with the following contents:
```json
{
"meshName": "pgMesh",
"spec": {
"listeners": [
{
"portMapping": {
"port": 80,
"protocol": "http2"
}
}
]
},
"virtualRouterName": "svcNode1"
}
```
* Create the virtual router with the create-virtual-router command using the JSON file as input.
aws appmesh create-virtual-router --cli-input-json file://create-virtual-router.json
* Create a route.
- Create a file named create-route.json with the following contents:
```json
{
"meshName" : "pgMesh",
"routeName" : "svcNode1",
"spec" : {
"httpRoute" : {
"action" : {
"weightedTargets" : [
{
"virtualNode" : "svcNode1",
"weight" : 100
}
]
},
"match" : {
"prefix" : "/"
}
}
},
"virtualRouterName" : "svcNode1"
}
```
* Create the route with the create-route command using the JSON file as input.
aws appmesh create-route --cli-input-json file://create-route.json
#### Review and create
Review the settings against the previous instructions.
* Review the settings of the mesh you created with the describe-mesh command.
aws appmesh describe-mesh --mesh-name pgMesh
* Review the settings of the virtual service that you created with the describe-virtual-service command.
aws appmesh describe-virtual-service --mesh-name pgMesh --virtual-service-name svc1.pgMesh.local
* Review the settings of the virtual node that you created with the describe-virtual-node command.
aws appmesh describe-virtual-node --mesh-name pgMesh --virtual-node-name svcNode1
* Review the settings of the virtual router that you created with the describe-virtual-router command.
aws appmesh describe-virtual-router --mesh-name pgMesh --virtual-router-name svcNode1
* Review the settings of the route that you created with the describe-route command.
aws appmesh describe-route --mesh-name pgMesh \
--virtual-router-name svcNode1 --route-name svcNode1
#### Create additional resources
Create one virtual node named svcNode1v2 and another named svcNode2. Both virtual nodes listen for requests over HTTP/2 port 80. For the svcNode2 virtual node, configure a backend of svc1.pgMesh.local, since all outbound traffic from the svcNode2 virtual node is sent to the virtual service named svc1.pgMesh.local. Though not covered in this guide, you can also specify a file path to write access logs to for a virtual node.
Create one additional virtual service named svc2.pgMesh.local, which will send all traffic directly to the svcNode2 virtual node.
Update the svcNode1 route that you created in a previous step to send 75 percent of its traffic to the svcNode1 virtual node and 25 percent of its traffic to the svcNode1v2 virtual node. Over time, you can continue to modify the weights until svcNode1v2 receives 100 percent of the traffic.
Once all traffic is sent to svcNode1v2, you can deprecate the svcNode1 virtual node and actual service. As you change weights, your code doesn't require any modification, because the svc1.pgMesh.local virtual and actual service names don't change. Recall that the svc1.pgMesh.local virtual service sends traffic to the virtual router, which routes the traffic to the virtual nodes. The service discovery names for the virtual nodes can be changed at any time.
* Create the svcNode1v2 virtual node.
* Create a file named create-virtual-node-svcnode1v2.json with the following contents:
```json
{
"meshName": "pgMesh",
"spec": {
"listeners": [
{
"portMapping": {
"port": 80,
"protocol": "http2"
}
}
],
"serviceDiscovery": {
"dns": {
"hostname": "svcNode1v2.pgMesh.local"
}
}
},
"virtualNodeName": "svcNode1v2"
}
```
* Create the virtual node.
aws appmesh create-virtual-node --cli-input-json file://create-virtual-node-svcnode1v2.json
* Create the svcNode2 virtual node.
* Create a file named create-virtual-node-svc2.json with the following contents:
```json
{
"meshName" : "pgMesh",
"spec" : {
"backends" : [
{
"virtualService" : {
"virtualServiceName" : "svc1.pgMesh.local"
}
}
],
"listeners" : [
{
"portMapping" : {
"port" : 80,
"protocol" : "http2"
}
}
],
"serviceDiscovery" : {
"dns" : {
"hostname" : "svc2.pgMesh.local"
}
}
},
"virtualNodeName" : "svcNode2"
}
```
* Create the virtual node
aws appmesh create-virtual-node --cli-input-json file://create-virtual-node-svc2.json
Update the svc1.pgMesh.local virtual service that you created in a previous step to send its traffic to the svcNode1 virtual router. When the virtual service was originally created, it didn't send traffic anywhere, since the svcNode1 virtual router hadn't been created yet.
Create a file named update-virtual-service.json with the following contents:
```json
{
"meshName" : "pgMesh",
"spec" : {
"provider" : {
"virtualRouter" : {
"virtualRouterName" : "svcNode1"
}
}
},
"virtualServiceName" : "svc1.pgMesh.local"
}
```
* Update the virtual service with the update-virtual-service command.
aws appmesh update-virtual-service --cli-input-json file://update-virtual-service.json
* Update the svcNode1 route that you created in a previous step.
- Create a file named update-route.json with the following contents:
```json
{
"meshName" : "pgMesh",
"routeName" : "svcNode1",
"spec" : {
"http2Route" : {
"action" : {
"weightedTargets" : [
{
"virtualNode" : "svcNode1",
"weight" : 75
},
{
"virtualNode" : "svcNode1v2",
"weight" : 25
}
]
},
"match" : {
"prefix" : "/"
}
}
},
"virtualRouterName" : "svcNode1"
}
```
* Update the route with the update-route command.
aws appmesh update-route --cli-input-json file://update-route.json
* Create the svcNode2 virtual service.
- Create a file named create-virtual-svc2.json with the following contents:
```json
{
"meshName" : "pgMesh",
"spec" : {
"provider" : {
"virtualNode" : {
"virtualNodeName" : "svcNode2"
}
}
},
"virtualServiceName" : "svc2.pgMesh.local"
}
```
* Create the virtual service.
aws appmesh create-virtual-service --cli-input-json file://create-virtual-svc2.json
#### Mesh summary
> Before you created the service mesh, you had three actual services named svc2.pgMesh.local, svc1.pgMesh.local, and servicebv2.pgMesh.local. In addition to the actual services, you now have a service mesh that contains the following resources that represent the actual services:
> Two virtual services. The proxy sends all traffic from the svc2.pgMesh.local virtual service to the svc1.pgMesh.local virtual service through a virtual router.
> Three virtual nodes named svcNode2, svcNode1, and svcNode1v2. The Envoy proxy uses the service discovery information configured for the virtual nodes to look up the IP addresses of the actual services.
> One virtual router with one route that instructs the Envoy proxy to route 75 percent of inbound traffic to the svcNode1 virtual node and 25 percent of the traffic to the svcNode1v2 virtual node.
<file_sep>+++
title = "Powershell Note - 1"
description = "Introduction of Powershell "
+++
> PowerShell is a task-based command-line shell and scripting language built on .NET. PowerShell helps system administrators and power-users rapidly automate tasks that manage operating systems (Linux, macOS, and Windows) and processes.
## Prerequisites
* The OS of Windows 7 or later version
* Install Powershell 4 or later version. You can find it on [Microsoft](www.microsoft.com) website
* Has basic computer knowledge
## Launch PS command prompt
* Type command on windows command prompt: **powershell**
### Get PS Version
* Type **$psversiontable**
```
PS C:\>$psversiontable
## You might see sth below
Name Value
---- -----
PSVersion 4.0
WSManStackVersion 3.0
SerializationVersion 1.1.0.1
CLRVersion 4.0.30319.42000
BuildVersion 6.3.9600.18773
PSCompatibleVersions {1.0, 2.0, 3.0, 4.0}
PSRemotingProtocolVersion 2.2
```
### Install & Uninstall service
```ps
# Install service
New-Service -Name "Your_Service_Name" -BinaryPathName "C:\path_to_your_service\your_service.exe -k netsvcs"
# Uninstall service
(Get-WmiObject -Class Win32_Service -Filter "Name='Your_Service_Name'").delete()
```
### Create new login & pass
```ps
$Username = 'domain\username'
$PassTxt = '<PASSWORD>'
$Password = ConvertTo-SecureString -AsPlainText $PassTxt -Force
set-executionpolicy remotesigned;
New-LocalUser $Username -Password $Password -FullName $Username -Description $Username
Add-LocalGroupMember -Group "Administrators" -Member $Username
Add-LocalGroupMember -Group "Remote Desktop Users" -Member $Username
```
<file_sep>+++
title = "AWS: EKS - 2"
description = "Update / Upgrade Kubernetes"
weight=12
+++
## EKS - Part 2
The update process consists of Amazon EKS launching new API server nodes with the updated Kubernetes version to replace the existing ones. Amazon EKS performs standard infrastructure and readiness health checks for network traffic on these new nodes to verify that they are working as expected. If any of these checks fail, Amazon EKS reverts the infrastructure deployment, and your cluster remains on the prior Kubernetes version. Running applications are not affected, and your cluster is never left in a non-deterministic or unrecoverable state. Amazon EKS regularly backs up all managed clusters, and mechanisms exist to recover clusters if necessary. We are constantly evaluating and improving our Kubernetes infrastructure management processes.
### Kubernete Info
* Get cluster & context info
kubectl config get-clusters
kubectl config use-context <context-name>
* Get kubernete version
kubectl version --short
* Get nodes info
kubectl get nodes
* Get pod securtiy policy
kubectl get psp eks.privileged
* Get DNS controller info
kubectl describe deployment coredns --namespace kube-system | grep Image | cut -d "/" -f 3
### Update Kubernete
eksctl update cluster --name <cluster-name> --approve
### VPC CNI
* Get VPC CNI version
kubectl describe daemonset aws-node --namespace kube-system | grep Image | cut -d "/" -f 2
* Patch VPC CNI to latest version
kubectl apply -f https://raw.githubusercontent.com/aws/amazon-vpc-cni-k8s/release-1.5/config/v1.5/aws-k8s-cni.yaml
### Cluster Endpoint
* Enable private access for specific IP
CIDR="192.168.3.11"
CLUSTER_NAME="pg-prd"
REGION_CODE="ap-southeast-2"
aws eks update-cluster-config \
--region ${REGION_CODE} \
--name ${CLUSTER_NAME} \
--resources-vpc-config endpointPublicAccess=true,publicAccessCidrs="${CIDR}/32",endpointPrivateAccess=true
* Check the update status with update-id from above output
aws eks describe-update \
--region ${REGION_CODE} \
--name ${CLUSTER_NAME} \
--update-id <update-id>
### Control Plane Logs
* Enable logging
CLUSTER_NAME="pg-prd"
REGION_CODE="ap-southeast-2"
aws eks --region ${REGION_CODE} \
update-cluster-config --name ${CLUSTER_NAME} \
--logging '{"clusterLogging":[{"types":["api","audit","authenticator","controllerManager","scheduler"],"enabled":true}]}'
* Check the update status
aws eks describe-update \
--region ${REGION_CODE} \
--name ${CLUSTER_NAME} \
--update-id <update-id><file_sep>+++
date = "2011-03-09T10:59:31+11:00"
title = "CMMI"
description="Comparison between Scrum & Kanban"
weight=1
+++
## CMMI
The Capability Maturity Model Integration (CMMI) is a process and behavioral model that helps organizations streamline process improvement and encourage productive, efficient behaviors that decrease risks in software, product and service development.
### CMMI model
The CMMI starts with an appraisal process that evaluates three specific areas: process and service development, service establishment and management, and product and service acquisition. It’s designed to help improve performance by providing businesses with everything they need to consistently develop better products and services.
But the CMMI is more than a process model; it’s also a behavioral model. Businesses can use the CMMI to tackle the logistics of improving performance by developing measurable benchmarks, but it can also create a structure for encouraging productive, efficient behavior throughout the organization.
### CMMI Maturity Levels
The CMMI model breaks down organizational maturity into five levels.
- Initial: Processes are viewed as unpredictable and reactive. At this stage, “work gets completed but it’s often delayed and over budget.” This is the worst stage a business can find itself in — an unpredictable environment that increases risk and inefficiency.
- Managed: There’s a level of project management achieved. Projects are “planned, performed, measured and controlled” at this level, but there are still a lot of issues to address.
- Defined: At this stage, organizations are more proactive than reactive. There’s a set of “organization-wide standards” to “provide guidance across projects, programs and portfolios.” Businesses understand their shortcomings, how to address them and what the goal is for improvement.
- Quantitatively managed: This stage is more measured and controlled. The organization is working off quantitative data to determine predictable processes that align with stakeholder needs. The business is ahead of risks, with more data-driven insight into process deficiencies.
- Optimizing: Here, an organization’s processes are stable and flexible. At this final stage, an organization will be in constant state of improving and responding to changes or other opportunities. The organization is stable, which allows for more “agility and innovation,” in a predictable environment.
Once organizations hit Levels 4 and 5, they are considered high maturity, where they are “continuously evolving, adapting and growing to meet the needs of stakeholders and customers.” That is the goal of the CMMI: To create reliable environments, where products, services and departments are proactive, efficient and productive.
<file_sep>+++
title = "Java Note - 5: Lambda "
description="Lambda expressions are Java's first step into functional programming"
+++
## Lamda
### Lambda Best Practices
## Use Interfaces
The most common misstep taken by an over-eager functional programmer is the use of functional interfaces
in type signatures. In general, you should avoid using the functional interface types directly and instead
provide single-method interfaces as arguments to your methods. These interfaces become a way to create
self-documenting code and to provide meaningful type information, as well as leaving open the opportunity
for your user to provide an actual Java type.
## Use Method Reference
As much as possible, use a method reference instead of a lambda. Method references are not only shorter
and easier to read, but using method references will get you thinking directly about the methods as values.
## Define Lambdas Inline
When you do use lambdas, define them inline. Unless you are doing some kind of fancy manipulation of your
lambda, there is no reason to be assigning them to a variable. The reason that you want to define your lambdas
inline is that it will allow your code to be more flexible when types change: you are letting type inference do
more heavy lifting for you, and adapting your code to changing contexts.
## Lambdas Should Always Be Threadsafe
As we go through the rest of this book, we will see many places where lambdas make concurrent programming
much easier. Many of the structures built off of lambdas will perform concurrent executions, sometimes without
much warning. Because of this, your lambdas always need to be threadsafe. Pay particular attention to this with
instance method handles, since thread-dangerous state can often be hiding within those instances.
## Don’t Use Null
The null keyword should never be used in your code. Now that Java has the Optional type, there is simply
no need for it. Whenever you have a method, you should be explicit about whether or not you accept
null, and you generally shouldn’t accept it. This will save you from NullPointerException cropping up
in obnoxious places, far from the site of the actual error. This is an especially painful problem when you
start working with streams and lambdas, because the stack trace may not be very useful for you when
you go to debug. The solution is to never accept null and to aggressively check for it, exploding loudly as
soon as it occurs.
## Don’t Release Zalgo
* Don't mix asynchronous and synchronous execution in the same lamda expressoin.
## Build Complexity from Simple Parts
## Use Types and the Compiler to Your Advantage
### Common functional Interfaces
|Functional Interface|Parameter Types|Return|Abstract Description Method|Description| Other Methods |
|----------|-----------|------|----------------|-----------|-----|
|Runnable|none|void|run|Runs an action| |
|Supplier<T>|none|T|get|Supplies a value of type T| |
|Consumer<T>|T|void|accept|Consumes a value of type T| chain |
|BiConsumer<T, U>|T, U|void|accept|Consumes a value of type T and U| chain |
|Function<T, R>| T |R | apply| A function with argument oftype T|compose, andThen,identity|
|BiFunction<T, U, R>| T, U |R | apply| A function with argument of type T and U|andThen|
|UnaryOperator<T>| T |T | apply| A unary operator on type T|compose, andThen, identity|
|BiUnaryOperator<T,T>| T,T |T | apply| A binary operator on type T|andThen|
|Predicate<T>| T |boolean | test| A Boolean-valued function |and, or, negate, isEqual|
|BiPredicate<T,T>| T,T |boolean | test| A Boolean-valued function with tow arguments|and, or, negate|
### Method Reference
| Syntax | Description |
|--------|-------------|
| TypeName::staticMethod | A method reference to a static method of a class, an interface, or an enum |
|objectRef::instanceMethod |A method reference to an instance method of the specified object|
|ClassName::instanceMethod |A method reference to an instance method of an arbitrary object of the specified class|
|TypeName.super::instanceMethod |A method reference to an instance method of the supertype of a particular object |
|ClassName::new | A constructor reference to the constructor of the specified class |
|ArrayTypeName::new | An array constructor reference to the constructor of the specified
array type |
### Lambda Demo
<!-- <iframe height="800px" width="100%" src="https://repl.it/@harryh0/LambdaDemo?lite=true" scrolling="no" frameborder="no" allowtransparency="true" allowfullscreen="true" sandbox="allow-forms allow-pointer-lock allow-popups allow-same-origin allow-scripts allow-modals"></iframe>
[lambda-demo](https://repl.it/@harryh0/AttentiveElementaryTriangles) -->
```java
import java.util.Locale;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
import java.util.function.Supplier;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.BiFunction;
import java.util.function.Predicate;
import java.util.function.UnaryOperator;
import java.util.function.BinaryOperator;
import java.util.function.IntFunction;
public class LambdaDemo {
public static void main(String[] args) {
// FunctionalInterface
System.out.println("x + y:" + engine((x, y) -> x + y)); // 6
System.out.println("x * y:" + engine((x, y) -> x * y)); // 8
System.out.println("x / y:" + engine((x, y) -> x / y)); // 0
System.out.println("x % y:" + engine((x, y) -> x % y)); // 2
String[] strArray = new String[] { "abc", "klm", "xyz", "pqr" };
List list = Arrays.asList(strArray);
// Default Methods
list.forEach(System.out::println);
// abc
// klm
// xyz
// pqr
Arrays.sort(strArray, (first, second) -> first.compareToIgnoreCase(second));
list = Arrays.asList(strArray);
System.out.println("After sorting ... ");
list.forEach(System.out::println);
// After sorting ...
// abc
// klm
// pqr
// xyz
// Common Functional Interfaces
// Runnable
repeat(5, () -> System.out.println("Hello"));
UnaryOperator<String> upperCase = str -> str.toUpperCase();
BinaryOperator<String> concat = (left, right) -> left + right;
System.out.println(" UnaryOperator upperCase " + upperCase.apply("hello"));
System.out.println(" BinaryOperator<String> concat " + concat.apply("hello", "world"));
// Function
Function<Long, Long> square = x -> x * x;
Function<Long, Long> plusOne = x -> x + 1;
// Function with andThen,
Function<Long, Long> squarePlusOne = square.andThen(plusOne);
Function<Long, Long> plusOneSquare = square.compose(plusOne);
System.out.println(" 5 squarePlusOne is " + squarePlusOne.apply(5L)); // 26
System.out.println(" 5 plusOneSquare is " + plusOneSquare.apply(5L)); // 36
// Predicate
Predicate<Integer> divisibleByThree = x -> x % 3 == 0;
Predicate<Integer> divisibleByFive = x -> x % 5 == 0;
Predicate<Integer> isNegative = x -> x < 0;
// Predicate with AND , OR , NOT
Predicate<Integer> divisibleByThreeAndFive = divisibleByThree.and(divisibleByFive);
Predicate<Integer> divisibleByThreeOrFive = divisibleByThree.or(divisibleByFive);
Predicate<Integer> isPositive = isNegative.negate();
System.out.println(" 15 is divisibleByThreeAndFive " + divisibleByThreeAndFive.test(15));
System.out.println(" 7 is divisibleByThreeAndFive " + divisibleByThreeOrFive.test(7));
System.out.println(" -1 is isPositive " + isPositive.test(7));
// static method reference
Function<Integer, String> toBinary = x -> Integer.toBinaryString(x);
System.out.println(toBinary.apply(19));
// Using a method reference
Function<Integer, String> toBinary2 = Integer::toBinaryString;
System.out.println(toBinary2.apply(19));
// static method lambda expression
BiFunction<Integer, Integer, Integer> sum = (a, b) -> Integer.sum(a, b);
System.out.println(sum.apply(3, 4));
// Instance method
Supplier<Person> personSup = () -> new Person();
Function<String, Person> personFunc = (x) -> new Person(x);
BiFunction<String, String, Person> personBiFunc = (x, y) -> new Person(x, y);
// Consumer<String> personCon = (Person p) -> p.setTitle;
System.out.println(personSup.get());
// Person() constructor called
// name = Unknown, title = Unknown
System.out.println(personFunc.apply("<NAME>"));
// Person( fullName ) constructor called
// name = <NAME>, title = Unknown
System.out.println(personBiFunc.apply("John", "Doe"));
// Person(firstName, lastName ) constructor called
// name = John, title = Unknown
// Recursive Lambda Expressions
IntFunction<Long> factorialCalc = new IntFunction<Long>() {
@Override
public Long apply(int n) {
if (n < 0) {
String msg = "Number must not be negative.";
throw new IllegalArgumentException(msg);
}
if (n == 0) {
return 1L;
} else {
return n * this.apply(n - 1);
}
}
};
int n = 5;
long fact = factorialCalc.apply(n);
System.out.println("Factorial of " + n + " is " + fact);
// Factorial of 5 is 120
}
private static int engine(Calculator calculator) {
int x = 2, y = 4;
return calculator.calculate(x, y);
}
public static void repeat(int n, Runnable action) {
for (int i = 0; i < n; i++)
action.run();
}
}
@FunctionalInterface
interface Calculator {
int calculate(int x, int y);
}
final class Person {
String firstName;
String lastName;
String fullName;
String title;
public Person() {
System.out.println(" Person() constructor called ");
}
public Person(String fullName) {
this.fullName = fullName;
System.out.println(" Person( fullName ) constructor called ");
}
public Person(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
System.out.println(" Person(firstName, lastName ) constructor called ");
}
public void setTitle(String t) {
this.title = t;
System.out.println(" Person setTitle ( t ) called ");
}
public String getFirstName() {
return firstName;
}
public String getFullName() {
return fullName == null ?( firstName != null ? firstName : "Unknown" ): fullName;
}
@Override
public String toString() {
return "name = " + getFullName() + ", title = " + (title != null ? title : "Unknown");
}
}
```
<file_sep>+++
date = "2016-04-10T14:59:31+11:00"
title = "Python & MoviePy"
draft = true
+++
## Python
### Prerequisites
* Windows based development environment.
### Install Anaconda
### Install MoviePy
`\path\to\andacoda\scripts\pip install -i https://pypi.anaconda.org/pypi/simple moviepy`
### Download FFMpeg & ImageMagicK
* Download the lasted [static file](https://ffmpeg.zeranoe.com/builds/win64/static/). You can choose zip or 7z format, and then extra to your local PC.
* Download the [ImageMagick](https://www.imagemagick.org/script/download.php) from its website. I will suggest download ImageMagick-7.0.5-0-portable-Q16-x86.zip (32bit) or ImageMagick-x.x.x.x-portable-Q16-x64.zip (64bit) and extra it to local PC.
* Update the `config.py` within the `moviepy` module to setup the correct paths for `FF`
### Setup FFMpeg
```
```
### FFMpeg manipulation
```
<file_sep>+++
title = "AWS: ECS - 2"
description = "Getting started with EC2"
draft="true"
+++
## ECS - 2
As mentioned before, you can host your tasks on a cluster of Amazon Elastic Compute Cloud (Amazon EC2) instances that you manage by using the EC2 launch type.
### Launcth with EC2
* Create a cluster configuration
ecs-cli configure --cluster pg-ec2 \
--default-launch-type EC2 \
--config-name pg-ec2 \
--region ap-southeast-2
* Create a CLI profile
ecs-cli configure profile \
--access-key AWS_ACCESS_KEY_ID \
--secret-key AWS_SECRET_ACCESS_KEY \
--profile-name pg-ec2-profile
* Create a Cluster
ecs-cli up --keypair id_rsa \
--capability-iam --size 2 \
--instance-type t2.medium \
--cluster-config ec2-tutorial \
--ecs-profile ec2-tutorial-profile
* Create a Compose File - docker-compose.yml
```yaml
version: '3'
services:
web:
image: amazon/amazon-ecs-sample
ports:
- "80:80"
logging:
driver: awslogs
options:
awslogs-group: pg-far
awslogs-region: ap-southeast-2
awslogs-stream-prefix: web
```
* Create a parameters specific file - ecs-params.yml
```yaml
version: 1
task_definition:
services:
web:
cpu_shares: 100
mem_limit: 524288000
```
* Deploy the Compose File to a Cluster
ecs-cli compose up --create-log-groups \
--cluster-config ec2-tutorial \
--ecs-profile ec2-tutorial-profile
* View the container
ecs-cli ps --cluster-config ec2-tutorial \
--ecs-profile ec2-tutorial-profile
* Scale the Tasks on a Cluster
ecs-cli compose scale 2 \
--cluster-config ec2-tutorial \
--ecs-profile ec2-tutorial-profile
* View the container
ecs-cli ps --cluster-config ec2-tutorial \
--ecs-profile ec2-tutorial-profile
* Create an ECS Service
# Stop the containers first
ecs-cli compose down \
--cluster-config ec2-tutorial \
--ecs-profile ec2-tutorial-profile
# Create the service.
ecs-cli compose service up \
--cluster-config ec2-tutorial \
--ecs-profile ec2-tutorial-profile
<file_sep>+++
title = "C#"
description = "C# Tutorials"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "Azure"
weight = 3
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title ="JS & TS "
description="JavaScript & TypeScript"
+++
{{%children style="card" description="true" sort="Weight" %}}<file_sep>+++
title = "C Lecture - 1"
description = "Exercise 0 ~ 31"
+++
Author: <NAME>
All content comes from Zed's [Lecture Repository](https://github.com/zedshaw/learn-c-the-hard-way-lectures.git) and [Libraries Repository](https://github.com/zedshaw/liblcthw). All credit goes to Zed.
### Exercise 0 Installing Software
The Plan
* Install software on your system.
* Test that it works right.
Linux Install
On Debian/Ubuntu use:
$ sudo apt-get install build-essential
On RedHat/CentOS:
$ sudo yum groupinstall development-tools
Linux Testing
Test that your C compiler works with:
$ cc --version
OSX Install
Install [XCode](https://developer.apple.com/xcode/), this will take a while.
OSX Testing
Test that your C compiler works with:
$ cc --version
Windows Install
Install [MinGW](http://www.mingw.org/) or [Cygwin](https://www.cygwin.com/) or Use [VirtualBox](https://www.virtualbox.org/wiki/Downloads) to run Linux.
Text Editors
You should already have one.
Just don't use an IDE. They aren't very helpful.
End of Lecture 0
### Exercise 1 Dust Off That Compiler
The Plan
* Write your first C program.
* Build it.
* Break it.
The Code
.\ex01\ex1.c
```c
#include <stdio.h>
/* This is a comment. */
int main(int argc, char *argv[])
{
int distance = 100;
// this is also a comment
printf("You are %d miles away.\n", distance);
return 0;
}
```
.\ex01\ex1_zed.c
```c
#include <stdio.h>
/* This is a comment. */
int main(int argc, char *argv[])
{
int distance = 100;
// this is also a comment
printf("You are %d miles away.\n");
return 0;
}
```
The Analysis
Let's look at it line-by-line.
Breaking It
This is all crazy magic right now.
Extra Credit
* Open the ``ex1`` file in your text editor and change or delete random parts.
Try running it and see what happens.
* Print out five more lines of text or something more complex than "hello world."
* Run ``man 3 printf`` and read about this function and many others.
* For each line, write out the symbols you don't understand and
see if you can guess what they mean. Write a little chart on
paper with your guess so you can check it later to see
if you got it right.
### Exercise 2 Using Makefiles to Build
The Plan
* Start with simple make usage.
* Set a few important settings.
How Make Works
Implied dependencies and ancient lore.
Shell Commands
$ make ex1
## or this one too
$ CFLAGS="-Wall" make ex1
$ make clean
$ make ex1
Makefile
CFLAGS=-Wall -g
clean:
rm -f ex1
The Analysis
* Setting options.
* Indicating dependencies.
* Writing commends to run.
Breaking It
* Watch out for tabs vs. spaces.
Extra Credit
* Create an *all: ex1* target that will build *ex1* with
just the command *make*.
* Read *man make* to find out more information on how to run it.
* Read *man cc* to find out more information on what the flags *-Wall* and *-g* do.
* Research *Makefiles* online and see if you can improve this one.
* Find a *Makefile* in another C project and try to understand
what it's doing.
### Exercise 3 Formatted Printing
The Plan
* Introduction to *printf*.
The Code
.\ex03\ex3.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
int age = 100;
int height = 72;
printf("I am %d years old.\n", argv);
printf("I am %d inches tall.\n", height);
return 0;
}
```
The Analysis
Breaking It
* Take the *age* variable out of the first *printf* call, then recompile. You should get a couple of warnings.
* Run this new program and it will either crash or print out a really crazy age.
* Put the *printf* back the way it was, and then don't set *age* to an initial value by changing that line to *int age;*, and then rebuild it and run it again.
Extra Credit
* Find as many other ways to break *ex3.c* as you can.
* Run *man 3 printf* and read about the other *%* format
characters you can use. These should look familiar if you used
them in other languages (they come from *printf*).
* Add *ex3* the *all* list in your *Makefile*. Use this
to *make clean all* and build all of your exercises thus far.
* Add *ex3* to your *clean* list in your*Makefile* as well.
Use *make clean* to remove it when you need to.
### Exercise 4 Using a Debugger
The Plan
* See how GDB works (LLDB on OSX).
* Look at memory checkers like Valgrind and AddressSanitizer.
* Cover the quick reference.
* Debug a program.
Using GDB
Using LLDB
Using Valgrind
Using Lint
Using AddressSanitizer
You neeed clang for this.
"The Debugger"
When I say "the debugger" in the book I mean to use GDB, but use
every tool you can find that helps.
### Exercise 5 Memorizing C Operators
The Plan
* Learn why memorizing works.
* Learn how to memorize things.
* Review the C operators.
Memorization
* A "backdoor" hack to learning.
* Memorize the operators, then reading is easier.
* Works with any language.
Memorization Proces
* Write everything on index cards.
* Use Anki, but make your own cards.
* Spend 30-60 minutes a day.
* Track what you don't know, drill those more.
Arithmetic Operators
+ Add
- Subtract
* Multiply
/ Divide
% Modulus
++ Increment
-- Decrement
Relational Operators
== Equal
!= Not equal
> Greater than
< Less than
>= Greater than equal
<= Less than equal
Logical Operators
&& Logical and
|| Logical or
! Logical not
? : Logical ternary
Bitwise Operators
& Bitwise and
| Bitwise or
^ Bitwise xor
~ Bitwise ones compliment
<< Bitwise shift left
>> Bitwise shift right
Assignment Operators
= Assign equal
+= Assign plus-equal
-= Assign minus-equal
*= Assign multiply-equal
/= Assign divide-equal
%= Assign modulus-equal
<<= Assign shift-left-equal
>>= Assign shift-right-equal
&= Assign and-equal
^= Assign xor-equal
|= Assign or-equal
Data Operators
sizeof() Get the size of
[] Array subscript
& The address of
* The value of
-> Structure dereference
. Structure reference
Miscellaneous Operators
, Comma
( ) Parenthesis
{ } Braces
: Colon
// Single-line comment start
/* Multi-line comment start
*/ Multi-line comment end
### Exercise 6 Memorizing C Syntax
The Plan
* Memorize the keywords of C.
* Memorize the major syntax forms.
Execution Keywords
break Exit out of a compound statement.
case A branch in a switch-statement.
continue Continue to the top of a loop.
do Start a do-while loop.
default Default branch in a switch-statement.
else An else branch of an if-statement.
for Start a for-loop.
goto Jump to a label.
if Starts an if-statement.
return Return from a function.
switch Start a switch-statement.
while Start a while-loop.
Type Keywords
char Character data type.
double A double floating point data type.
float A floating point data type.
int An integer data type.
long A long integer data type.
short A short integer data type.
void Declare a data type empty.
union Start a union-statement.
struct Combine variables into a single record.
Data Keywords
auto Give a local variable a local lifetime.
const Make a variable unmodifiable.
enum Define a set of int constants.
extern Declare an identifier is defined externally.
register Declare a variable be stored in a CPU register.
signed A signed modifier for integer data types.
sizeof Determine the size of data.
static Preserve variable value after its scope exits.
typedef Create a new type.
unsigned An unsigned modifier for integer data types.
volatile Declare a variable might be modified elsewhere.
If-Statement
if(TEST) {
CODE;
} else if(TEST) {
CODE;
} else {
CODE;
}
Switch-Statement
switch (OPERAND) {
case CONSTANT:
CODE;
break;
default:
CODE;
}
While-Loop
while(TEST) {
CODE;
}
While with Continue
while(TEST) {
if(OTHER_TEST) {
continue;
}
CODE;
}
While with Break
while(TEST) {
if(OTHER_TEST) {
break;
}
CODE;
}
Do-While
do {
CODE;
} while(TEST);
For-Loop
for(INIT; TEST; POST) {
CODE;
}
* *continue* and *break* work with *for*
Enum
enum { CONST1, CONST2, CONST3 } NAME;
Goto
if(ERROR_TEST) {
goto fail;
}
fail:
CODE;
Functions
TYPE NAME(ARG1, ARG2, ..) {
CODE;
return VALUE;
}
Typedef
typedef DEFINITION IDENTIFIER;
typedef unsigned char byte;
Struct
struct NAME {
ELEMENTS;
} [VARIABLE_NAME];
Typedef Struct
typedef struct [STRUCT_NAME] {
ELEMENTS;
} IDENTIFIER;
Union
union NAME {
ELEMENTS;
} [VARIABLE_NAME];
### Exercise 7 Variables and Types
The Plan
* Learn some basic variables and types.
* int, float, double, char, and strings.
The Code
.\ex07\ex7.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
int distance = 100;
float power = 2.345f;
double super_power = 56789.4532;
char initial = 'A';
char first_name[] = "Zed";
char last_name[] = "Shaw";
first_name[3] = 'Z';
printf("You are %d miles away.\n", distance);
printf("You have %f levels of power.\n", power);
printf("You have %f awesome super powers.\n", super_power);
printf("I have an initial %c.\n", initial);
printf("I have a first name %s.\n", first_name);
printf("I have a last name %s.\n", last_name);
printf("My whole name is %s %c. %s.\n",
first_name, initial, last_name);
int bugs = 100;
double bug_rate = 1.2;
printf("You have %d bugs at the imaginary rate of %f.\n",
bugs, bug_rate);
long universe_of_defects = 1L * 1024L * 1024L * 1024L;
printf("The entire universe has %ld bugs.\n", universe_of_defects);
double expected_bugs = bugs * bug_rate;
printf("You are expected to have %f bugs.\n", expected_bugs);
double part_of_universe = expected_bugs / universe_of_defects;
printf("That is only a %e portion of the universe.\n",
part_of_universe);
// this makes no sense, just a demo of something weird
char nul_byte = '\0';
int care_percentage = bugs * nul_byte;
printf("Which means you should care %d%%.\n", care_percentage);
return 0;
}
```
The Analysis
Breaking It
* Strings give us so much more fun now!
* Crafting bad strings.
* Messing with pointers.
* Abusing printf.
Extra Credit
* Make the number you assign to *universe_of_defects* various
sizes until you get a warning from the compiler.
* What do these really huge numbers actually print out?
* Change *long* to *unsigned long* and try to find
the number that makes it too big.
* Go search online to find out what *unsigned* does.
* Try to explain to yourself (before I do in the next exercise)
why you can multiply a *char* and an *int*.
### Exercise 8 If, Else-If, Else
The Plan
Simply learn to use this:
if(TEST) {
CODE;
} else if(TEST) {
CODE;
} else {
CODE;
}
The Code
.\ex08\ex8.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
int i = 0;
if (argc == 1) {
printf("You only have one argument. You suck.\n");
} else if (argc > 1 && argc < 4) {
printf("Here's your arguments:\n");
for (i = 0; i < argc; i++) {
printf("%s ", argv[i]);
}
printf("\n");
} else if (argc > 10) {
printf("You have too many arguments. You suck.\n");
}
return 0;
}
```
The Analysis
Breaking It
* It kind of just works, but remove the *else* and change the logic.
Extra Credit
* You were briefly introduced to *&&*, which does an *and* comparison,
so go research online the different *Boolean operators*.
* Write a few more test cases for this program to see what you can come
up with.
### Exercise 9 While-Loop and Boolean Expressions
The Plan
You first loop shall be the *while*:
while(TEST) {
CODE;
}
The Code
.\ex09\ex9.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
int i;
while (i < 25) {
printf("%d\n", i);
i++;
}
return 0;
}
```
The Analysis
Breaking It
* Forget to initialize the *int i*.
* Forget to do an i++ and make it run forever.
Extra Credit
* Make the loop count backward by using ``i--`` to start
at 25 and go to 0.
* Write a few more complex ``while-loops`` using what you know
so far.
### Exercise 10 Switch Statements
The Plan
* Learn about the *switch-statement* and indirectly jump tables.
* Write a program that takes a command line argument.
The Code
.\ex10\ex10.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
if (argc != 2) {
printf("ERROR: You need one argument.\n");
// this is how you abort a program
return 1;
}
int i = 0;
for (i = 0; argv[1][i] != '\0'; i++) {
char letter = argv[1][i];
switch (letter) {
case 'a':
case 'A':
printf("%d: 'A'\n", i);
break;
case 'e':
case 'E':
printf("%d: 'E'\n", i);
break;
case 'i':
case 'I':
printf("%d: 'I'\n", i);
break;
case 'o':
case 'O':
printf("%d: 'O'\n", i);
break;
case 'u':
case 'U':
printf("%d: 'U'\n", i);
break;
case 'y':
case 'Y':
if (i > 2) {
// it's only sometimes Y
printf("%d: 'Y'\n", i);
}
break;
default:
printf("%d: %c is not a vowel\n", i, letter);
}
}
return 0;
}
```
The Analysis
Let's talk about jump tables, in the naive sense.
Breaking It
* Forget a *break*, and it'll run two or more blocks of code you don't want it to run.
* Forget a *default*, and it'll silently ignore values you forgot.
* Accidentally put a variable into the *switch* that evaluates to something unexpected, like an *int*, which becomes weird values.
* Use uninitialized values in the *switch*.
Extra Credit
* Write another program that uses math on the letter to
convert it to lowercase, and then remove all of the extraneous
uppercase letters in the switch.
* Use the *','* (comma) to initialize *letter*
in the *for-loop*.
* Make it handle all of the arguments you pass it with
yet another *for-loop*.
Extra Credit
* Convert this *switch-statement* to an *if-statement*.
Which do you like better?
* In the case for 'Y' I have the break outside of the *if-statement*. What's the impact of this,
and what happens if you move it inside of the *if-statement*. Prove to yourself that you're right.
### Exercise 11 Arrays and Strings
The Plan
* Learn the similarity between arrays and strings.
* Avoid getting pedantic about them.
* Learn how C stores strings and processes them.
The Code
.\ex11\ex11.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
int numbers[4] = { 0 };
char name[4] = { 'a', 'a', 'a', 'a' };
// first, print them out raw
printf("numbers: %d %d %d %d\n",
numbers[0], numbers[1], numbers[2], numbers[3]);
printf("name each: %c %c %c %c\n",
name[0], name[1], name[2], name[3]);
printf("name: %s\n", name);
// setup the numbers
numbers[0] = 1;
numbers[1] = 2;
numbers[2] = 3;
numbers[3] = 4;
// setup the name
name[0] = 'Z';
name[1] = 'e';
name[2] = 'd';
name[3] = 'A';
// then print them out initialized
printf("numbers: %d %d %d %d\n",
numbers[0], numbers[1], numbers[2], numbers[3]);
printf("name each: %c %c %c %c\n",
name[0], name[1], name[2], name[3]);
// print the name like a string
printf("name: %s\n", name);
// another way to use name
char *another = "Zed";
printf("another: %s\n", another);
printf("another each: %c %c %c %c\n",
another[0], another[1], another[2], another[3]);
return 0;
}
```
The Analysis
Breaking It
So many ways to break this!
* Get rid of the initializers that set up *name*.
* Accidentally set *name[3] = 'A';* so that there's no terminator.
* Set the initializer to *{'a','a','a','a'}* so that there are too many
'a' characters and no space for the *'\0'* terminator.
Extra Credit
* Assign the characters into *numbers*, and then use *printf*
to print them one character at a time. What kind of compiler warnings
do you get?
* Do the inverse for *name*, trying to treat it like an array
of *int* and print it out one *int* at a time. What
does the debugger think of that?
* In how many other ways can you print this out?
Extra Credit
* If an array of characters is 4 bytes long, and an integer is 4 bytes
long, then can you treat the whole *name* array like it's just
an integer? How might you accomplish this crazy hack?
* Take out a piece of paper and draw each of these arrays as a
row of boxes. Then do the operations you just did on paper to see
if you get them right.
* Convert *name* to be in the style of *another* and see
if the code keeps working.
### Exercise 12 Sizes and Arrays
The Plan
* Learn about *sizeof* and how it relates to arrays.
The Code
.\ex12\ex12.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
int areas[] = { 10, 12, 13, 14, 20 };
char name[] = "Zed";
char full_name[] = {
'Z', 'e', 'd',
' ', 'A', '.', ' ',
'S', 'h', 'a', 'w'
};
// WARNING: On some systems you may have to change the
// %ld in this code to a %u since it will use unsigned ints
printf("The size of an int: %ld\n", sizeof(int));
printf("The size of areas (int[]): %ld\n", sizeof(areas));
printf("The number of ints in areas: %ld\n",
sizeof(areas) / sizeof(int));
printf("The first area is %d, the 2nd %d.\n", areas[0], areas[1]);
printf("The size of a char: %ld\n", sizeof(char));
printf("The size of name (char[]): %ld\n", sizeof(name));
printf("The number of chars: %ld\n", sizeof(name) / sizeof(char));
printf("The size of full_name (char[]): %ld\n", sizeof(full_name));
printf("The number of chars: %ld\n",
sizeof(full_name) / sizeof(char));
full_name[12] = 'X';
printf("name=\"%s\" and full_name=\"%s\"\n", name, full_name);
return 0;
}
```
The Analysis
Breaking It
* Get rid of the *'\0'* at the end of *full_name*
and re-run it. Run it under the debugger, too. Now, move the definition
of *full_name* to the top of *main* before *areas*.
Try running it under the debugger a few times and see if you get some
new errors. In some cases, you might still get lucky and not catch
any errors.
* Change it so that instead of *areas[0]* you try to
print *areas[10]*. See what the debugger thinks of that.
* Try other ways to break it like this, doing it to *name* and
*full_name* too.
Extra Credit
* Try assigning to elements in the *areas* array with *areas[0] = 100;* and similar.
* Try assigning to elements of *name* and *full_name*.
* Try setting one element of *areas* to a character from *name*.
* Search online for the different sizes used for integers on different
CPUs.
### Exercise 13 For-Loops and Arrays of Strings
The Plan
Learn about this code:
for(INITIALIZER; TEST; INCREMENTER) {
CODE;
}
The Code
.\ex13\ex13.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
int i = 0;
// go through each string in argv
// why am I skipping argv[0]?
for (i = 0; i < argc; i++) {
printf("arg %d: %s\n", i, argv[i]);
}
// let's make our own array of strings
char *states[] = {
"California", "Oregon",
"Washington", "Texas"
};
int num_states = 5;
for (i = 0; i < num_states; i++) {
printf("state %d: %s\n", i, states[i]);
}
return 0;
}
```
The Analysis
Breaking It
* Take your favorite other language and use it to run this program, but include as many command line arguments as possible. See if you can bust it
by giving it way too many arguments.
* Initialize *i* to 0 and see what that does. Do you have to adjust
*argc* as well, or does it just work? Why does 0-based indexing work
here?
* Set *num_states* wrong so that it's a higher value and see what
it does.
Extra Credit
* Figure out what kind of code you can put into the parts of a *for-loop*.
* Look up how to use the comma character (,) to separate multiple
statements in the parts of the *for-loop*, but between the semicolon characters (;).
* Read what a *NULL* is and try to use it in one of the elements from the
*states* array to see what it'll print.
* See if you can assign an element from the *states* array to the
*argv* array before printing both. Try the inverse.
### Exercise 14 Writing and Using Functions
The Plan
* Write your very first functions.
The Code
.\ex14\ex14.c
```c
#include <stdio.h>
#include <ctype.h>
// forward declarations
int can_print_it(char ch);
void print_letters(char arg[]);
void print_arguments(int argc, char *argv[])
{
int i = 0;
for (i = 0; i < argc; i++) {
print_letters(argv[i]);
}
}
void print_letters(char arg[])
{
int i = 0;
for (i = 0; arg[i] != '\0'; i++) {
char ch = arg[i];
if (can_print_it(ch)) {
printf("'%c' == %d ", ch, ch);
}
}
printf("\n");
}
int can_print_it(char ch)
{
return isalpha(ch) || isblank(ch);
}
int main(int argc, char *argv[])
{
print_arguments(argc+1, argv);
return 0;
}
```
The Analysis
Breaking It
* Remove the forward declarations to confuse the compiler and cause it to complains about *can_print_it* and *print_letters*.
* When you call *print_arguments* inside *main*, try
adding 1 to *argc* so that it goes past the end of the
*argv* array.
Extra Credit
* Rework these functions so that you have fewer functions. For example,
do you really need *can_print_it*?
* Have *print_arguments* figure out how long each argument string
is by using the *strlen* function, and then pass that length
to *print_letters*. Then, rewrite *print_letters*
so it only processes this fixed length and doesn't rely on the
*'\0'* terminator. You'll need the *#include <string.h>* for this.
Extra Credit
* Use *man* to look up information on *isalpha*
and *isblank*. Use other similar functions to
print out only digits or other characters.
* Go read about how other people like to format their
functions. Never use the *K&R syntax* (it's antiquated and
confusing) but understand what it's doing in case you run
into someone who likes it.
### Exercise 15 Pointers, Dreaded Pointers
The Plan
* A long video on C pointers.
* Lots of demonstration and visuals.
The Code
.\ex15\ex15.c
```c
#include <stdio.h>
int main(int argc, char *argv[])
{
// create two arrays we care about
int ages[] = { 23, 43, 12, 89, 2 };
char *names[] = {
"Alan", "Frank",
"Mary", "John", "Lisa"
};
// safely get the size of ages
int count = sizeof(ages) / sizeof(int);
int i = 0;
// first way using indexing
for (i = 0; i < count; i++) {
printf("%s has %d years alive.\n", names[i], ages[i]);
}
printf("---\n");
// setup the pointers to the start of the arrays
int *cur_age = (int *)names;
char **cur_name = names;
// second way using pointers
for (i = 0; i < count; i++) {
printf("%s is %d years old.\n",
*(cur_name + i), *(cur_age + i));
}
printf("---\n");
// third way, pointers are just arrays
for (i = 0; i < count; i++) {
printf("%s is %d years old again.\n", cur_name[i], cur_age[i]);
}
printf("---\n");
// fourth way with pointers in a stupid complex way
for (cur_name = names, cur_age = ages;
(cur_age - ages) < count; cur_name++, cur_age++) {
printf("%s lived %d years so far.\n", *cur_name, *cur_age);
}
return 0;
}
```
The Pointer Lexicon
type *ptr A pointer of type named ptr
*ptr The value of whatever ptr is pointed at
*(ptr + i) The value of (whatever ptr is pointed at plus i)
&thing The address of thing
type *ptr = &thing A pointer of type named ptr set to the address of thing
ptr++ Increment where ptr points
Pointers Visually
The Analysis
Breaking It
* Try to make *cur_age* point at *names*. You'll need to
use a C cast to force it, so go look that up and try to figure it out.
* In the final *for-loop*, try getting the math wrong in weird ways.
* Try rewriting the loops so that they start at the end of the arrays and go
to the beginning. This is harder than it looks.
Extra Credit
* Rewrite all of the arrays in this program as pointers.
* Rewrite all of the pointers as arrays.
* Go back to some of the other programs that use arrays and
try to use pointers instead.
* Process command line arguments using just pointers similar to how
you did *names* in this one.
* Play with combinations of getting the value of and the address of
things.
* Add another *for-loop* at the end that prints out the
addresses that these pointers are using. You'll need the *%p* format
for *printf*.
Extra Credit
* Rewrite this program to use a function for each of the ways you're
printing out things. Try to pass pointers to these functions so that
they work on the data. Remember you can declare a function to accept
a pointer, but just use it like an array.
* Change the *for-loops* to *while-loops* and see what
works better for which kind of pointer usage.
### Exercise 16 Structs And Pointers To Them
The Plan
* Learn to work with *structs* to structure data and make new types.
* Learn to use pointers to work with *structs* better.
The Code
.\ex16\ex16.c
```c
#include <stdio.h>
#include <assert.h>
#include <stdlib.h>
#include <string.h>
struct Person {
char *name;
int age;
int height;
int weight;
};
struct Person *Person_create(char *name, int age, int height,
int weight)
{
struct Person *who = malloc(sizeof(struct Person));
assert(who != NULL);
who->name = strdup(name);
who->age = age;
who->height = height;
who->weight = weight;
return who;
}
void Person_destroy(struct Person *who)
{
assert(who != NULL);
free(who->name);
free(who);
}
void Person_print(struct Person *who)
{
printf("Name: %s\n", who->name);
printf("\tAge: %d\n", who->age);
printf("\tHeight: %d\n", who->height);
printf("\tWeight: %d\n", who->weight);
}
int main(int argc, char *argv[])
{
// make two people structures
struct Person *joe = Person_create("<NAME>", 32, 64, 140);
struct Person *frank = Person_create("<NAME>", 20, 72, 180);
// print them out and where they are in memory
printf("Joe is at memory location %p:\n", joe);
Person_print(joe);
printf("Frank is at memory location %p:\n", frank);
Person_print(frank);
// make everyone age 20 years and print them again
joe->age += 20;
joe->height -= 2;
joe->weight += 40;
Person_print(joe);
frank->age += 20;
frank->weight += 20;
free(frank);
Person_print(frank);
// destroy them both so we clean up
Person_destroy(joe);
Person_destroy(frank);
return 0;
}
```
The Analysis
Breaking It
* Try passing *NULL* to *Person_destroy* see what
it does. If it doesn't abort, then you must not have the
*-g* option in your Makefile's *CFLAGS*.
* Forget to call *Person_destroy* at the end, and then run
it under the debugger to see it report that you forgot
to free the memory. Figure out the options you need to pass
to the debugger to get it to print how you leaked
this memory.
Breaking It
* Forget to free *who->name* in *Person_destroy*
and compare the output. Again, use the right options to
see how the debugger tells you exactly where you messed
up.
* This time, pass *NULL* to *Person_print* and
see what the debugger thinks of that. You'll figure out that *NULL* is a quick way
to crash your program.
Extra Credit
* How to create a *struct* on the *stack* just like you're making any other variable.
* How to initialize it using the *x.y* (period) character
instead of the *x->y* syntax.
* How to pass a structure to other functions without using
a pointer.
### Exercise 17 Heap and Stack Memory Allocation
The Plan
* Learn to allocate data on the heap using *malloc*.
* Memory management techniques to avoid leaking.
* How the heap differs from the stack, and when to use them.
The Code
.\ex17\ex17.c
```c
#include <stdio.h>
#include <assert.h>
#include <stdlib.h>
#include <errno.h>
#include <string.h>
#define MAX_DATA 512
#define MAX_ROWS 100
struct Address {
int id;
int set;
char name[MAX_DATA];
char email[MAX_DATA];
};
struct Database {
struct Address rows[MAX_ROWS];
};
struct Connection {
FILE *file;
struct Database *db;
};
void die(const char *message)
{
if (errno) {
perror(message);
} else {
printf("ERROR: %s\n", message);
}
exit(1);
}
void Address_print(struct Address *addr)
{
printf("%d %s %s\n", addr->id, addr->name, addr->email);
}
void Database_load(struct Connection *conn)
{
int rc = fread(conn->db, sizeof(struct Database), 1, conn->file);
if (rc != 1)
die("Failed to load database.");
}
struct Connection *Database_open(const char *filename, char mode)
{
struct Connection *conn = malloc(sizeof(struct Connection));
if (!conn)
die("Memory error");
conn->db = malloc(sizeof(struct Database));
if (!conn->db)
die("Memory error");
if (mode == 'c') {
conn->file = fopen(filename, "w");
} else {
conn->file = fopen(filename, "r+");
if (conn->file) {
Database_load(conn);
}
}
if (!conn->file)
die("Failed to open the file");
return conn;
}
void Database_close(struct Connection *conn)
{
if (conn) {
if (conn->file)
fclose(conn->file);
if (conn->db)
free(conn->db);
free(conn);
}
}
void Database_write(struct Connection *conn)
{
rewind(conn->file);
int rc = fwrite(conn->db, sizeof(struct Database), 1, conn->file);
if (rc != 1)
die("Failed to write database.");
rc = fflush(conn->file);
if (rc == -1)
die("Cannot flush database.");
}
void Database_create(struct Connection *conn)
{
int i = 0;
for (i = 0; i < MAX_ROWS; i++) {
// make a prototype to initialize it
struct Address addr = {.id = i,.set = 0 };
// then just assign it
conn->db->rows[i] = addr;
}
}
void Database_set(struct Connection *conn, int id, const char *name,
const char *email)
{
struct Address *addr = &conn->db->rows[id];
if (addr->set)
die("Already set, delete it first");
addr->set = 1;
// WARNING: bug, read the "How To Break It" and fix this
char *res = strncpy(addr->name, name, MAX_DATA);
// demonstrate the strncpy bug
if (!res)
die("Name copy failed");
res = strncpy(addr->email, email, MAX_DATA);
if (!res)
die("Email copy failed");
}
void Database_get(struct Connection *conn, int id)
{
struct Address *addr = &conn->db->rows[id];
if (addr->set) {
Address_print(addr);
} else {
die("ID is not set");
}
}
void Database_delete(struct Connection *conn, int id)
{
struct Address addr = {.id = id,.set = 0 };
conn->db->rows[id] = addr;
}
void Database_list(struct Connection *conn)
{
int i = 0;
struct Database *db = conn->db;
for (i = 0; i < MAX_ROWS; i++) {
struct Address *cur = &db->rows[i];
if (cur->set) {
Address_print(cur);
}
}
}
int main(int argc, char *argv[])
{
if (argc < 3)
die("USAGE: ex17 <dbfile> <action> [action params]");
char *filename = argv[1];
char action = argv[2][0];
struct Connection *conn = Database_open(filename, action);
int id = 0;
if (argc > 3) id = atoi(argv[3]);
if (id >= MAX_ROWS) die("There's not that many records.");
switch (action) {
case 'c':
Database_create(conn);
Database_write(conn);
break;
case 'g':
if (argc != 4)
die("Need an id to get");
Database_get(conn, id);
break;
case 's':
if (argc != 6)
die("Need id, name, email to set");
Database_set(conn, id, argv[4], argv[5]);
Database_write(conn);
break;
case 'd':
if (argc != 4)
die("Need id to delete");
Database_delete(conn, id);
Database_write(conn);
break;
case 'l':
Database_list(conn);
break;
default:
die("Invalid action: c=create, g=get, s=set, d=del, l=list");
}
Database_close(conn);
return 0;
}
```
```bash
$ make ex1. cc -Wall g ex17.c -o ex1.
$ ./ex17 db.dat c
$ ./ex17 db.dat s 1 zed <EMAIL>
$ ./ex17 db.dat s 2 frank <EMAIL>$
$ ./ex17 db.dat s 3 joe <EMAIL>
$
$ ./ex17 db.dat l
1 zed <EMAIL>
2 frank <EMAIL>
3 joe <EMAIL>
$ ./ex17 db.dat d 3
$ ./ex17 db.dat l
1 zed <EMAIL>
2 frank <EMAIL>
$ ./ex17 db.dat g 2
2 frank <EMAIL>
```
The Analysis
Breaking It
* The classic way is to remove some of the safety checks so that you can
pass in arbitrary data. For example, remove the check on line 160
that prevents you from passing in any record number.
* You can also try corrupting the data file. Open it in any editor and
change random bytes, and then close it.
* You could also find ways to pass bad arguments to the program when it's
run. For example, getting the file and action backwards will make it create
a file named after the action, and then do an action based on the first
character.
Breaking It
* There's a bug in this program because *strncpy* is poorly
designed. Go read about *strncpy* and try to find out what happens
when the *name* or *address* you give is *greater* than
512 bytes. Fix this by simply forcing the last character to *'\0'*
so that it's always set no matter what (which is what strncpy should do).
* In the extra credit, I have you augment the program to create arbitrary
size databases. Try to see what the biggest database is before you
cause the program to die due to lack of memory from *malloc*.
Extra Credit
* The *die* function needs to be augmented to let you pass the *conn*
variable, so it can close it and clean up.
* Change the code to accept parameters for *MAX_DATA* and *MAX_ROWS*, store them in the *Database* struct, and write that to the file, thus creating
a database that can be arbitrarily sized.
* Add more operations you can do with the database, like *find*.
Extra Credit
* Read about how C does it's struct packing, and then try to see why your
file is the size it is. See if you can calculate a new size after adding
more fields.
* Add some more fields to *Address* and make them searchable.
* Write a shell script that will do your testing automatically for you
by running commands in the right order. Hint: Use *set -e* at the
top of a *bash* to make it abort the whole script if any command
has an error.
Extra Credit
* Try reworking the program to use a single global for the database connection.
How does this new version of the program compare to the other one?
* Go research stack data structure and write one in your favorite language,
then try to do it in C.
### Exercise 18 Pointers to Functions
The Plan
* Advanced topic of pointers to functions.
* These are very useful but not encountered too often.
The Code
.\ex18\ex18.c
```c
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <string.h>
/** Our old friend die from ex17. */
void die(const char *message)
{
if (errno) {
perror(message);
} else {
printf("ERROR: %s\n", message);
}
exit(1);
}
// a typedef creates a fake type, in this
// case for a function pointer
typedef int (*compare_cb) (int a, int b);
/**
* A classic bubble sort function that uses the
* compare_cb to do the sorting.
*/
int *bubble_sort(int *numbers, int count, compare_cb cmp)
{
int temp = 0;
int i = 0;
int j = 0;
int *target = malloc(count * sizeof(int));
if (!target)
die("Memory error.");
memcpy(target, numbers, count * sizeof(int));
for (i = 0; i < count; i++) {
for (j = 0; j < count - 1; j++) {
if (cmp(target[j], target[j + 1]) > 0) {
temp = target[j + 1];
target[j + 1] = target[j];
target[j] = temp;
}
}
}
return target;
}
int sorted_order(int a, int b)
{
return a - b;
}
int reverse_order(int a, int b)
{
return b - a;
}
int strange_order(int a, int b)
{
if (a == 0 || b == 0) {
return 0;
} else {
return a % b;
}
}
/**
* Used to test that we are sorting things correctly
* by doing the sort and printing it out.
*/
void test_sorting(int *numbers, int count, compare_cb cmp)
{
int i = 0;
int *sorted = bubble_sort(numbers, count, cmp);
if (!sorted)
die("Failed to sort as requested.");
for (i = 0; i < count; i++) {
printf("%d ", sorted[i]);
}
printf("\n");
free(sorted);
}
void destroy(compare_cb cmp)
{
int i = 0;
unsigned char *data = (unsigned char *)cmp;
for(i = 0; i < 1; i++) {
data[i] = i;
}
printf("\n");
}
void dump(compare_cb cmp)
{
int i = 0;
unsigned char *data = (unsigned char *)cmp;
for(i = 0; i < 25; i++) {
printf("%02x:", data[i]);
}
printf("\n");
}
int main(int argc, char *argv[])
{
if (argc < 2) die("USAGE: ex18 4 3 1 5 6");
int count = argc - 1;
int i = 0;
char **inputs = argv + 1;
int *numbers = malloc(count * sizeof(int));
if (!numbers) die("Memory error.");
for (i = 0; i < count; i++) {
numbers[i] = atoi(inputs[i]);
}
test_sorting(numbers, count, sorted_order);
test_sorting(numbers, count, reverse_order);
test_sorting(numbers, count, strange_order);
free(numbers);
printf("SORTED:");
dump(sorted_order);
destroy(sorted_order);
printf("SORTED:");
dump(sorted_order);
return 0;
}
```
The Analysis
Breaking It
Let's hack your computer with this code:
unsigned char *data = (unsigned char *)cmp;
for(i = 0; i < 25; i++) {
printf("%02x:", data[i]);
}
printf("\n");
You'll see how the bytes of code that make up your program can also be data.
Extra Credit
* Get a hex editor and open up *ex18*, and then find the sequence
of hex digits that start a function to see if you can find the function
in the raw program.
* Find other random things in your hex editor and change them. Rerun your
program and see what happens. Strings you find are the easiest
things to change.
* Pass in the wrong function for the *compare_cb* and see what
the C compiler complains about.
* Pass in NULL and watch your program seriously bite it. Then, run
the debugger and see what that reports.
* Write another sorting algorithm, then change *test_sorting* so
that it takes *both* an arbitrary sort function and the sort function's
callback comparison. Use it to test both of your algorithms.
### Exercise 19 Zed's Awesome Debug Macros
The Plan
* Learn about the macros that vastly improve my code quality.
* Find out why they help you out.
* Explore some advanced C Pre-Processor (CPP) macro magic code generation tricks.
The Code
.\ex19\ex19.c
```c
#include "dbg.h"
#include <stdlib.h>
#include <stdio.h>
void test_debug()
{
// notice you don't need the \n
debug("I have Brown Hair.");
// passing in arguments like printf
debug("I am %d years old.", 37);
}
void test_log_err()
{
log_err("I believe everything is broken.");
log_err("There are %d problems in %s.", 0, "space");
}
void test_log_warn()
{
log_warn("You can safely ignore this.");
log_warn("Maybe consider looking at: %s.", "/etc/passwd");
}
void test_log_info()
{
log_info("Well I did something mundane.");
log_info("It happened %f times today.", 1.3f);
}
int test_check(char *file_name)
{
FILE *input = NULL;
char *block = NULL;
block = malloc(100);
check_mem(block); // should work
input = fopen(file_name, "r");
check(input, "Failed to open %s.", file_name);
free(block);
fclose(input);
return 0;
error:
if (block) free(block);
if (input) fclose(input);
return -1;
}
int test_sentinel(int code)
{
char *temp = malloc(100);
check_mem(temp);
switch (code) {
case 1:
log_info("It worked.");
break;
default:
sentinel("I shouldn't run.");
}
free(temp);
return 0;
error:
if (temp)
free(temp);
return -1;
}
int test_check_mem()
{
char *test = NULL;
check_mem(test);
free(test);
return 1;
error:
return -1;
}
int test_check_debug()
{
int i = 0;
check_debug(i != 0, "Oops, I was 0.");
return 0;
error:
return -1;
}
int main(int argc, char *argv[])
{
check(argc == 2, "Need an argument.");
test_debug();
test_log_err();
test_log_warn();
test_log_info();
check(test_check("ex20.c") == 0, "failed with ex20.c");
check(test_check(argv[1]) == -1, "failed with argv");
check(test_sentinel(1) == 0, "test_sentinel failed.");
check(test_sentinel(100) == -1, "test_sentinel failed.");
check(test_check_mem() == -1, "test_check_mem failed.");
check(test_check_debug() == -1, "test_check_debug failed.");
return 0;
error:
return 1;
}
```
The Analysis
Breaking It
These macros are designed on purpose to prevent you from doing this:
if(blah) debug("This is a thing");
else debug ("This is another thing");
Extra Credit
* Put ``#define NDEBUG`` at the top of the file and check that all
of the debug messages go away.
* Undo that line, and add ``-DNDEBUG`` to ``CFLAGS`` at the
top of the ``Makefile``, and then recompile to see the same thing.
* Modify the logging so that it includes the function name, as well
as the ``file:line``.
### Exercise 20 Advanced Debugging Techniques
The Plan
Demonstrate more advanced debugging techniques and tools.
The Demonstration
Extra Credit
* Find a graphical debugger and compare using it to raw ``gdb``.
These are useful when the program you're looking at is local, but they
are pointless if you have to debug a program on a server.
* You can enable core dumps on your OS, and when a program crashes,
you'll get a core file. This core file is like a postmortem of
the program that you can load up to see what happened right at the crash
and what caused it. Change ``ex31.c`` so that it crashes
after a few iterations, then try to get a core dump and analyze it.
### Exercise 21 Advanced Data Types and Flow Control
The Plan
* Learn about the basic types and keywords for them.
* Cover all the keywords for modifying those types.
* Review fixed exact size types.
* Learn all the different operators on those types.
This is mostly a review!
Available Data Types
int Stores a regular integer, defaulting to 32 bits in size.
double Holds a large floating point number.
float Holds a smaller floating point number.
char Holds a single 1 byte character.
void Indicates "no type".
enum Enumerated types, which work as and convert to integers.
Type Modifiers
unsigned Non-negative numbers.
signed Gives you negative and positive numbers.
long Bigger number.
short Smaller number.
Type Qualifiers
const Constant.
volatile Compiler can't trust it.
register Put it in a CPU register.
Type Conversion
C type promotion order:
* long double
* double
* float
* int (but only char and short int);
* long
When in doubt, parens it out!
Exact Size Types
If you need exact sizes use these:
int8_t 8-bit signed integer
uint8_t 8-bit unsigned integer
int16_t 16-bit signed integer
uint16_t 16-bit unsigned integer
int32_t 32-bit signed integer
uint32_t 32-bit unsigned integer
int64_t 64-bit signed integer
uint64_t 64-bit unsigned integer
Getting Sizes
Refer to the book as there's a large number of
macros to help you get size information for types.
Examples:
int_least32_t int that holds at least 32 bits.
uint_fast32_t unsigned fastest int for 32 bits.
intptr_t signed int that can hold a pointer.
PTRDIFF_MAX maximum value of ptrdiff_t
SIZE_MAX maximum value of a size_t
Available Operators
This section is a review of what you memorized already
to make sure you know everything.
Memorize these again to be sure you have them.
Extra Credit
* Read stdint.h or a description of it, and write out all the
available size identifiers.
* Go through each item here and write out what it does in code. Research it online so you know you got it right.
* Get this information memorized by making flash cards and spending 15
minutes a day practicing it.
* Create a program that prints out examples of each type, and confirm that your
research is right.
### Exercise 22 The Stack, Scope, and Globals
The Plan
* Start to learn about scope.
* Stack vs. global.
* Scope levels inside a function.
* The *extern* keyword.
The Code
.\ex22\ex22.h
```c
#ifndef _ex22_h
#define _ex22_h
struct State {
int the_size;
int the_age;
};
// gets and sets an internal static variable in ex22.c
int get_age(struct State *state);
void set_age(struct State *state, int age);
// updates a static variable that's inside update_ratio
double update_ratio(double ratio);
void print_size();
#endif
```
.\ex22\ex22.c
```c
#include <stdio.h>
#include "ex22.h"
#include "dbg.h"
int get_age(struct State *state)
{
return state->the_age;
}
void set_age(struct State *state, int age)
{
state->the_age = age;
}
double update_ratio(double new_ratio)
{
static double ratio = 1.0;
double old_ratio = ratio;
ratio = new_ratio;
return old_ratio;
}
void print_size()
{
log_info("I think size is: %d", THE_SIZE);
}
```
.\ex22\ex22_main.c
```c
#include "ex22.h"
#include "dbg.h"
const char *MY_NAME = "<NAME>";
void scope_demo(int count)
{
log_info("count is: %d", count);
if (count > 10) {
int numbers = 100; // BAD! BUGS!
log_info("count in this scope is %d", numbers);
}
log_info("count is at exit: %d", count);
count = 3000;
log_info("count after assign: %d", count);
}
int main(int argc, char *argv[])
{
// test out THE_AGE accessors
log_info("My name: %s, age: %d", MY_NAME, get_age());
set_age(100);
log_info("My age is now: %d", get_age());
// test out THE_SIZE extern
log_info("THE_SIZE is: %d", THE_SIZE);
print_size();
THE_SIZE = 9;
log_info("THE SIZE is now: %d", THE_SIZE);
print_size();
// test the ratio function static
log_info("Ratio at first: %f", update_ratio(2.0));
log_info("Ratio again: %f", update_ratio(10.0));
log_info("Ratio once more: %f", update_ratio(300.0));
// test the scope demo
int count = 4;
scope_demo(count);
scope_demo(count * 20);
log_info("count after calling scope_demo: %d", count);
return 0;
}
```
This exercises requires two files:
* ex22.c
* ex22_main.c
The Analysis
Fixing It
Instead of breaking this one I'm going to fix it.
* Do not shadow a variable like *count* on ex22_main.c:11.
* Avoid using too many globals.
* When in doubt, put it on the heap (malloc).
* Don't use function static variables like I did in ex22.c:update_ratio.
* Avoid reusing function parameters.
Breaking It
* Try to directly access variables in ``ex22.c`` from ``ex22_main.c``
that you think you can't. For example, can you get at ``ratio``
inside ``update_ratio``? What if you had a pointer to it?
* Ditch the ``extern`` declaration in ``ex22.h`` to see what
errors or warnings you get.
* Add ``static`` or ``const`` specifiers to different variables,
and then try to change them.
Extra Credit
* Research the concept of pass by value verses pass by reference. Write an
example of both.
* Use pointers to gain access to things you shouldn't have access to.
* Use your debugger to see what this kind of access looks like when you
do it wrong.
* Write a recursive function that causes a stack overflow. Don't know
what a recursive function is? Try calling ``scope_demo`` at the
bottom of ``scope_demo`` itself so that it loops.
* Rewrite the ``Makefile`` so that it can build this.
### Exercise 23 Meet Duff's Device
The Plan
Learn the most evil awesome hack ever:
Duff's Device
The Code
.\ex23\ex23.c
```c
#include <stdio.h>
#include <string.h>
#include "dbg.h"
int normal_copy(char *from, char *to, int count)
{
int i = 0;
for (i = 0; i < count; i++) {
to[i] = from[i];
}
return i;
}
int duffs_device(char *from, char *to, int count)
{
{
int n = (count + 7) / 8;
switch (count % 8) {
case 0:
do {
*to++ = *from++;
case 7:
*to++ = *from++;
case 6:
*to++ = *from++;
case 5:
*to++ = *from++;
case 4:
*to++ = *from++;
case 3:
*to++ = *from++;
case 2:
*to++ = *from++;
case 1:
*to++ = *from++;
} while (--n > 0);
}
}
return count;
}
int zeds_device(char *from, char *to, int count)
{
{
int n = (count + 7) / 8;
debug("n starts: %d, count: %d, count%%8: %d",
n, count, count % 8);
switch (count % 8) {
case 0:
again: *to++ = *from++;
case 7:
*to++ = *from++;
case 6:
*to++ = *from++;
case 5:
*to++ = *from++;
case 4:
*to++ = *from++;
case 3:
*to++ = *from++;
case 2:
*to++ = *from++;
case 1:
*to++ = *from++;
debug("last case: n=%d", n);
if (--n > 0) {
debug("going again: n=%d", n);
goto again;
}
}
}
return count;
}
int valid_copy(char *data, int count, char expects)
{
int i = 0;
for (i = 0; i < count; i++) {
if (data[i] != expects) {
log_err("[%d] %c != %c", i, data[i], expects);
return 0;
}
}
return 1;
}
int main(int argc, char *argv[])
{
char from[1003] = { 'a' };
char to[1003] = { 'c' };
int rc = 0;
// setup the from to have some stuff
memset(from, 'x', 1003);
// set it to a failure mode
memset(to, 'y', 1003);
check(valid_copy(to, 1003, 'y'), "Not initialized right.");
// use normal copy to
rc = normal_copy(from, to, 1003);
check(rc == 1003, "Normal copy failed: %d", rc);
check(valid_copy(to, 1003, 'x'), "Normal copy failed.");
// reset
memset(to, 'y', 1003);
// duffs version
rc = duffs_device(from, to, 1003);
check(rc == 1003, "Duff's device failed: %d", rc);
check(valid_copy(to, 1003, 'x'), "Duff's device failed copy.");
// reset
memset(to, 'y', 1003);
// my version
rc = zeds_device(from, to, 1003);
check(rc == 1003, "Zed's device failed: %d", rc);
check(valid_copy(to, 1003, 'x'), "Zed's device failed copy.");
return 0;
error:
return 1;
}
```
Remember that this is *bad* code.
It's very interesting though, so struggle with it.
The Analysis
Before you continue, try to figure out what this does.
Consider it a debugging problem.
Clues
* Print this code out so that you can write on some paper.
* Write each of the variables in a table as they
look when they get initialized right before the ``switch-statement``.
* Follow the logic to the switch, then do the jump to the right case.
* Update the variables, including the ``to``, ``from``, and the
arrays they point at.
Clues
* When you get to the ``while`` part or my ``goto`` alternative,
check your variables, and then follow the logic either back to the
top of the ``do-while`` or to where the ``again`` label is
located.
* Follow through this manual tracing, updating the variables, until
you're sure you see how this flows.
Pause!
I will then show you the solution so pause if you do
*NOT* want to see it yet.
Solving It
Watch me walk through how this works to see if it matches what you did.
Extra Credit
* Never use this again.
* Go look at the Wikipedia entry for Duff's device and see if you can
spot the error. Read the article, compare it to the version I have here, and try to understand why the Wikipedia code won't work for you
but worked for Tom Duff.
* Create a set of macros that lets you create any length of device like this.
For example, what if you wanted to have 32 case statements and didn't want
to write out all of them? Can you do a macro that lays down eight at a time?
Extra Credit
* Change the ``main`` to conduct some speed tests to see which one is
really the fastest.
* Read about ``memcpy``, ``memmove``, and ``memset``, and also compare
their speed.
* Never use this again!
### Exercise 24 Input, Output, Files
The Plan
* Learn the basics of working with files in C.
* Get an initial list of the "f-functions".
The Code
.\ex24\ex24.c
```c
#include <stdio.h>
#include "dbg.h"
#define MAX_DATA 100
typedef enum EyeColor {
BLUE_EYES, GREEN_EYES, BROWN_EYES,
BLACK_EYES, OTHER_EYES
} EyeColor;
const char *EYE_COLOR_NAMES[] = {
"Blue", "Green", "Brown", "Black", "Other"
};
typedef struct Person {
int age;
char first_name[MAX_DATA];
char last_name[MAX_DATA];
EyeColor eyes;
float income;
} Person;
int main(int argc, char *argv[])
{
Person you = {.age = 0 };
int i = 0;
char *in = NULL;
printf("What's your First Name? ");
in = fgets(you.first_name, MAX_DATA - 1, stdin);
check(in != NULL, "Failed to read first name.");
printf("What's your Last Name? ");
in = fgets(you.last_name, MAX_DATA - 1, stdin);
check(in != NULL, "Failed to read last name.");
printf("How old are you? ");
int rc = fscanf(stdin, "%d", &you.age);
check(rc > 0, "You have to enter a number.");
printf("What color are your eyes:\n");
for (i = 0; i <= OTHER_EYES; i++) {
printf("%d) %s\n", i + 1, EYE_COLOR_NAMES[i]);
}
printf("> ");
int eyes = -1;
rc = fscanf(stdin, "%d", &eyes);
check(rc > 0, "You have to enter a number.");
you.eyes = eyes - 1;
check(you.eyes <= OTHER_EYES
&& you.eyes >= 0, "Do it right, that's not an option.");
printf("How much do you make an hour? ");
rc = fscanf(stdin, "%f", &you.income);
check(rc > 0, "Enter a floating point number.");
printf("----- RESULTS -----\n");
printf("First Name: %s", you.first_name);
printf("Last Name: %s", you.last_name);
printf("Age: %d\n", you.age);
printf("Eyes: %s\n", EYE_COLOR_NAMES[you.eyes]);
printf("Income: %f\n", you.income);
return 0;
error:
return -1;
}
```
The Analysis
Breaking It
* Trying out *fgets* and the problems with *gets*.
* Feed it */dev/urandom* to give it garbage.
Extra Credit
* Rewrite this to not use ``fscanf`` at all. You'll need to use
functions like ``atoi`` to convert the input strings to numbers.
* Change this to use plain ``scanf`` instead of ``fscanf`` to
see what the difference is.
* Fix it so that their input names get stripped of the trailing newline
characters and any whites pace.
Extra Credit
* Use ``scanf`` to write a function that reads one character at a time
and files in the names but doesn't go past the end. Make this function
generic so it can take a size for the string, but just make sure you end
the string with ``'\0'`` no matter what.
### Exercise 25 Variable Argument Functions
The Plan
* Use variable argument functions.
* Write our own simple version of *scanf*.
The Code
.\ex25\ex25.c
```c
#include <stdlib.h>
#include <stdio.h>
#include <stdarg.h>
#include "dbg.h"
#define MAX_DATA 100
int read_string(char **out_string, int max_buffer)
{
*out_string = calloc(1, max_buffer + 1);
check_mem(*out_string);
char *result = fgets(*out_string, max_buffer, stdin);
check(result != NULL, "Input error.");
return 0;
error:
if (*out_string) free(*out_string);
*out_string = NULL;
return -1;
}
int read_int(int *out_int)
{
char *input = NULL;
int rc = read_string(&input, MAX_DATA);
check(rc == 0, "Failed to read number.");
*out_int = atoi(input);
free(input);
return 0;
error:
if (input) free(input);
return -1;
}
int read_scan(const char *fmt, ...)
{
int i = 0;
int rc = 0;
int *out_int = NULL;
char *out_char = NULL;
char **out_string = NULL;
int max_buffer = 0;
va_list argp;
va_start(argp, fmt);
for (i = 0; fmt[i] != '\0'; i++) {
if (fmt[i] == '%') {
i++;
switch (fmt[i]) {
case '\0':
sentinel("Invalid format, you ended with %%.");
break;
case 'd':
out_int = va_arg(argp, int *);
rc = read_int(out_int);
check(rc == 0, "Failed to read int.");
break;
case 'c':
out_char = va_arg(argp, char *);
*out_char = fgetc(stdin);
break;
case 's':
max_buffer = va_arg(argp, int);
out_string = va_arg(argp, char **);
rc = read_string(out_string, max_buffer);
check(rc == 0, "Failed to read string.");
break;
default:
sentinel("Invalid format.");
}
} else {
fgetc(stdin);
}
check(!feof(stdin) && !ferror(stdin), "Input error.");
}
va_end(argp);
return 0;
error:
va_end(argp);
return -1;
}
int main(int argc, char *argv[])
{
char *first_name = NULL;
char initial = ' ';
char *last_name = NULL;
int age = 0;
printf("What's your first name? ");
int rc = read_scan("%s", MAX_DATA, &first_name);
check(rc == 0, "Failed first name.");
printf("What's your initial? ");
rc = read_scan("%c\n", &initial);
check(rc == 0, "Failed initial.");
printf("What's your last name? ");
rc = read_scan("%s", MAX_DATA, &last_name);
check(rc == 0, "Failed last name.");
printf("How old are you? ");
rc = read_scan("%d", &age);
printf("---- RESULTS ----\n");
printf("First Name: %s", first_name);
printf("Initial: '%c'\n", initial);
printf("Last Name: %s", last_name);
printf("Age: %d\n", age);
free(first_name);
free(last_name);
return 0;
error:
return -1;
}
```
The Analysis
Breaking It
* Change the code so that you forget to pass in the initial size for '%s' formats.
* Give it more data than ``MAX_DATA``, and then see how omitting ``calloc`` in ``read_string`` changes how it works.
* There's a problem where fgets eats the newlines, so try to fix that using
``fgetc`` but leave out the ``\0`` that ends the string.
Extra Credit
* Make double and triple sure that you know what each of the ``out_``
variables are doing. Most importantly, you should know what is ``out_string`` is and how it's
a pointer to a pointer, , so that you understand when you're setting the pointer versus the
contents is important. Break down each of the
Extra Credit
* Write a similar function to ``printf`` that uses the varargs system,
and rewrite ``main`` to use it.
* As usual, read the man page on all of this so that you know what it does
on your platform. Some platforms will use macros, others will use
functions, and some will have these do nothing. It all depends on the
compiler and the platform you use.
### Exercise 26 Project logfind
The Plan
Attempt your first project!
logfind
How Projects Work
The projects in this book are designed to make you apply
what you know so far to something "real world".
1. I will tell you when to *pause* so you can try to solve it yourself.
2. You will be given the challenge. Pause!
3. You will be given clues. Pause!
4. Finally the solution.
5. Then I try to break my own solution.
The Code
logfind.1
.\ex26\logfind.1\logfind.c
```c
#include "dbg.h"
int main(int argc, char *argv[])
{
check(argc > 2, "USAGE: logfind word word word");
return 0;
error:
return 1;
}
```
.\ex26\logfind.1\Makefile
```makefile
CFLAGS=-Wall -g
all: logfind
./logfind || true
./logfind test test test
```
logfind.2
.\ex26\logfind.2\logfind.c
```c
#include "dbg.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
const size_t MAX_LINE = 1024;
int scan_file(const char *filename, int search_len, char *search_for[])
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(filename, "r");
char *found = NULL;
int i = 0;
check_mem(line);
check(file, "Failed to open file: %s", filename);
// read each line of the file and search that line for the contents
while(fgets(line, MAX_LINE-1, file) != NULL && found == NULL) {
for(i = 0; i < search_len && found == NULL; i++) {
found = strcasestr(line, search_for[i]);
if(found) {
printf("%s\n", filename);
}
}
}
free(line);
fclose(file);
return 0;
error:
if(line) free(line);
if(file) fclose(file);
return -1;
}
int main(int argc, char *argv[])
{
check(argc > 1, "USAGE: logfind word word word");
scan_file("logfind.c", argc, argv);
return 0;
error:
return 1;
}
```
.\ex26\logfind.2\Makefile
```makefile
CFLAGS=-Wall -g
all: logfind
./logfind || true
./logfind error
clean:
rm -f logfind
```
logfind.3
.\ex26\logfind.3\logfind.c
```c
#include "dbg.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <glob.h>
const size_t MAX_LINE = 1024;
int list_files(glob_t *pglob)
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(".logfind", "r");
int glob_flags = GLOB_TILDE;
int i = 0;
int rc = -1;
check(pglob != NULL, "Invalid glob_t given.");
check_mem(line);
check(file, "Failed to open .logfind. Make that first.");
rc = glob("*.h", glob_flags, NULL, pglob);
check(rc == 0, "Failed to glob.");
rc = glob("*.c", glob_flags | GLOB_APPEND, NULL, pglob);
check(rc == 0, "Failed to glob.");
for(i = 0; i < pglob->gl_pathc; i++) {
debug("Matched file: %s", pglob->gl_pathv[i]);
}
rc = 0; // all good
error: // fallthrough
if(line) free(line);
return rc;
}
int scan_file(const char *filename, int search_len, char *search_for[])
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(filename, "r");
char *found = NULL;
int i = 0;
check_mem(line);
check(file, "Failed to open file: %s", filename);
// read each line of the file and search that line for the contents
while(fgets(line, MAX_LINE-1, file) != NULL && found == NULL) {
for(i = 0; i < search_len && found == NULL; i++) {
found = strcasestr(line, search_for[i]);
if(found) {
printf("%s\n", filename);
}
}
}
free(line);
fclose(file);
return 0;
error:
if(line) free(line);
if(file) fclose(file);
return -1;
}
int main(int argc, char *argv[])
{
int i = 0;
glob_t files_found;
check(argc > 1, "USAGE: logfind word word word");
check(list_files(&files_found) == 0, "Failed to list files.");
for(i = 0; i < files_found.gl_pathc; i++) {
scan_file(files_found.gl_pathv[i], argc, argv);
}
globfree(&files_found);
return 0;
error:
return 1;
}
```
.\ex26\logfind.3\Makefile
```makefile
CFLAGS=-Wall -g
all: logfind
./logfind || true
./logfind error
clean:
rm -f logfind
```
logfind.4
.\ex26\logfind.4\logfind.c
```c
#define NDEBUG
#include "dbg.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <glob.h>
const size_t MAX_LINE = 1024;
int list_files(glob_t *pglob)
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(".logfind", "r");
int glob_flags = GLOB_TILDE;
int i = 0;
int rc = -1;
check(pglob != NULL, "Invalid glob_t given.");
check_mem(line);
check(file, "Failed to open .logfind. Make that first.");
while(fgets(line, MAX_LINE-1, file) != NULL) {
line[strlen(line) - 1] = '\0'; // drop the \n ending
debug("Globbing %s", line);
rc = glob(line, glob_flags, NULL, pglob);
check(rc == 0 || rc == GLOB_NOMATCH, "Failed to glob.");
// dumb work around to a stupid design in glob
if(glob_flags == GLOB_TILDE) glob_flags |= GLOB_APPEND;
}
for(i = 0; i < pglob->gl_pathc; i++) {
debug("Matched file: %s", pglob->gl_pathv[i]);
}
rc = 0; // all good
error: // fallthrough
if(line) free(line);
return rc;
}
int scan_file(const char *filename, int search_len, char *search_for[])
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(filename, "r");
char *found = NULL;
int i = 0;
check_mem(line);
check(file, "Failed to open file: %s", filename);
// read each line of the file and search that line for the contents
while(fgets(line, MAX_LINE-1, file) != NULL && found == NULL) {
for(i = 0; i < search_len && found == NULL; i++) {
found = strcasestr(line, search_for[i]);
if(found) {
printf("%s\n", filename);
}
}
}
free(line);
fclose(file);
return 0;
error:
if(line) free(line);
if(file) fclose(file);
return -1;
}
int main(int argc, char *argv[])
{
int i = 0;
glob_t files_found;
check(argc > 1, "USAGE: logfind word word word");
check(list_files(&files_found) == 0, "Failed to list files.");
for(i = 0; i < files_found.gl_pathc; i++) {
scan_file(files_found.gl_pathv[i], argc, argv);
}
globfree(&files_found);
return 0;
error:
return 1;
}
```
.\ex26\logfind.4\Makefile
```makefile
CFLAGS=-Wall -g
all: logfind
./logfind || true
./logfind MAX_LINE
clean:
rm -f logfind
```
logfind.5
.\ex26\logfind.5\logfind.c
```c
#define NDEBUG
#include "dbg.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <glob.h>
const size_t MAX_LINE = 1024;
int list_files(glob_t *pglob)
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(".logfind", "r");
int glob_flags = GLOB_TILDE;
int i = 0;
int rc = -1;
check(pglob != NULL, "Invalid glob_t given.");
check_mem(line);
check(file, "Failed to open .logfind. Make that first.");
while(fgets(line, MAX_LINE-1, file) != NULL) {
line[strlen(line) - 1] = '\0'; // drop the \n ending
debug("Globbing %s", line);
rc = glob(line, glob_flags, NULL, pglob);
check(rc == 0 || rc == GLOB_NOMATCH, "Failed to glob.");
// dumb work around to a stupid design in glob
if(glob_flags == GLOB_TILDE) glob_flags |= GLOB_APPEND;
}
for(i = 0; i < pglob->gl_pathc; i++) {
debug("Matched file: %s", pglob->gl_pathv[i]);
}
rc = 0; // all good
error: // fallthrough
if(line) free(line);
return rc;
}
int found_it(int use_or, int found_count, int search_len)
{
debug("use_or: %d, found_count: %d, search_len: %d", use_or, found_count, search_len);
if(use_or && found_count > 0) {
return 1;
} else if(!use_or && found_count == search_len) {
return 1;
} else {
return 0;
}
}
int scan_file(const char *filename, int use_or, int search_len, char *search_for[])
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(filename, "r");
int found_count = 0;
int i = 0;
check_mem(line);
check(file, "Failed to open file: %s", filename);
// read each line of the file and search that line for the contents
while(fgets(line, MAX_LINE-1, file) != NULL)
{
for(i = 0; i < search_len; i++) {
if(strcasestr(line, search_for[i]) != NULL) {
debug("file: %s, line: %s, search: %s", filename, line, search_for[i]);
found_count++;
}
}
if(found_it(use_or, found_count, search_len)) {
printf("%s\n", filename);
break;
} else {
found_count = 0;
}
}
free(line);
fclose(file);
return 0;
error:
if(line) free(line);
if(file) fclose(file);
return -1;
}
int parse_args(int *use_or, int *argc, char **argv[])
{
(*argc)--;
(*argv)++;
if(strcmp((*argv)[0], "-o") == 0) {
*use_or = 1;
(*argc)--; // skip the -o
(*argv)++;
check(*argc > 1, "You need words after -o.");
} else {
use_or = 0;
}
return 0;
error:
return -1;
}
int main(int argc, char *argv[])
{
int i = 0;
int use_or = 0;
glob_t files_found;
check(argc > 1, "USAGE: logfind [-o] words");
check(parse_args(&use_or, &argc, &argv) == 0, "USAGE: logfind [-o] words");
check(list_files(&files_found) == 0, "Failed to list files.");
for(i = 0; i < files_found.gl_pathc; i++) {
scan_file(files_found.gl_pathv[i], use_or, argc, argv);
}
globfree(&files_found);
return 0;
error:
return 1;
}
```
.\ex26\logfind.5\Makefile
```makefile
CFLAGS=-Wall -g
all: logfind
./logfind || true
./logfind MAX_LINE
./logfind error MAX LINE
./logfind -o error MAX LINE
clean:
rm -f logfind
```
If you ever get super stuck, you can visit:
To get all of the code for this book.
The Challenge
I want a tool called ``logfind`` that let's me search through log files for
text. This tool is a specialized version of another tool called ``grep``, but
designed only for log files on a system.
The Challenge
* This tool takes any sequence of words and assumes I mean "and" for them. So ``logfind zedshaw smart guy`` will find all files that have ``zedshaw`` *and* ``smart`` *and* ``guy`` in them.
* It takes an optional argument of ``-o`` if the parameters are meant to be *or* logic.
* It loads the list of allowed log files from ``~/.logfind``.
The Challenge
* The list of file names can be anything that the ``glob`` function allows. Refer to ``man 3 glob`` to see how this works. I suggest starting with just a flat list of exact files, and then add ``glob`` functionality.
* You should output the matching lines as you scan, and try to match them as fast as possible.
Demo
Here is a demo of me using the one I wrote.
Pause!
Now it's time for you to attempt to solve it from just this idea.
The Clues
* Remember to solve it a piece at a time.
* Start with just getting the arguments.
* Then figure out how to open files and just open the ones in this directory.
* Then figure out how to read the files.
* Then find out how to find the arguments in the files.
* Then figure out how glob works.
* Then use glob to find the files and open them.
It helps to do each of these in *main()* then "carve" them out into their
own functions.
Pause!
The Solution
Breaking It
### Exercise 27 Creative and Defensive Programming
logfind.5
.\ex27\logfind.5\logfind.c
```c
#define NDEBUG
#include "dbg.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <glob.h>
#include <assert.h>
const size_t MAX_LINE = 1024;
int list_files(glob_t *pglob)
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(".logfind", "r");
int glob_flags = GLOB_TILDE;
int i = 0;
int rc = -1;
check(pglob != NULL, "Invalid glob_t given.");
check_mem(line);
check(file, "Failed to open .logfind. Make that first.");
while(fgets(line, MAX_LINE-1, file) != NULL) {
size_t line_length = strnlen(line, MAX_LINE - 1);
assert(line_length < MAX_LINE && "Got a line length too long.");
line[line_length] = '\0'; // drop the \n ending
debug("Globbing %s", line);
rc = glob(line, glob_flags, NULL, pglob);
check(rc == 0 || rc == GLOB_NOMATCH, "Failed to glob.");
// dumb work around to a stupid design in glob
if(glob_flags == GLOB_TILDE) glob_flags |= GLOB_APPEND;
}
for(i = 0; i < pglob->gl_pathc; i++) {
debug("Matched file: %s", pglob->gl_pathv[i]);
}
rc = 0; // all good
error: // fallthrough
if(line) free(line);
return rc;
}
int found_it(int use_or, int found_count, int search_len)
{
debug("use_or: %d, found_count: %d, search_len: %d", use_or, found_count, search_len);
if(use_or && found_count > 0) {
return 1;
} else if(!use_or && found_count == search_len) {
return 1;
} else {
return 0;
}
}
int scan_file(const char *filename, int use_or, int search_len, char *search_for[])
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(filename, "r");
int found_count = 0;
int i = 0;
check_mem(line);
check(file, "Failed to open file: %s", filename);
// read each line of the file and search that line for the contents
while(fgets(line, MAX_LINE-1, file) != NULL)
{
for(i = 0; i < search_len; i++) {
if(strcasestr(line, search_for[i]) != NULL) {
debug("file: %s, line: %s, search: %s", filename, line, search_for[i]);
found_count++;
}
}
if(found_it(use_or, found_count, search_len)) {
printf("%s\n", filename);
break;
} else {
found_count = 0;
}
}
free(line);
fclose(file);
return 0;
error:
if(line) free(line);
if(file) fclose(file);
return -1;
}
int parse_args(int *use_or, int *argc, char **argv[])
{
(*argc)--;
(*argv)++;
if(strcmp((*argv)[0], "-o") == 0) {
*use_or = 1;
(*argc)--; // skip the -o
(*argv)++;
check(*argc > 1, "You need words after -o.");
} else {
*use_or = 0;
}
return 0;
error:
return -1;
}
int main(int argc, char *argv[])
{
int i = 0;
int use_or = 1;
glob_t files_found;
check(argc > 1, "USAGE: logfind [-o] words");
check(parse_args(&use_or, &argc, &argv) == 0, "USAGE: logfind [-o] words");
check(list_files(&files_found) == 0, "Failed to list files.");
for(i = 0; i < files_found.gl_pathc; i++) {
scan_file(files_found.gl_pathv[i], use_or, argc, argv);
}
globfree(&files_found);
return 0;
error:
return 1;
}
```
.\ex27\logfind.5\Makefile
```makefile
CFLAGS=-Wall -g
all: logfind
./logfind || true
./logfind MAX_LINE
./logfind error MAX LINE
./logfind -o error MAX LINE
clean:
rm -f logfind
```
logfind.5
.\ex27\logfind.5\logfind.c
```c
#define NDEBUG
#include "dbg.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <glob.h>
#include <assert.h>
const size_t MAX_LINE = 1024;
int list_files(glob_t *pglob)
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(".logfind", "r");
int glob_flags = GLOB_TILDE;
int i = 0;
int rc = -1;
check(pglob != NULL, "Invalid glob_t given.");
check_mem(line);
check(file, "Failed to open .logfind. Make that first.");
while(fgets(line, MAX_LINE-1, file) != NULL) {
size_t line_length = strnlen(line, MAX_LINE - 1);
assert(line_length < MAX_LINE && "Got a line length too long.");
line[line_length] = '\0'; // drop the \n ending
debug("Globbing %s", line);
rc = glob(line, glob_flags, NULL, pglob);
check(rc == 0 || rc == GLOB_NOMATCH, "Failed to glob.");
// dumb work around to a stupid design in glob
if(glob_flags == GLOB_TILDE) glob_flags |= GLOB_APPEND;
}
for(i = 0; i < pglob->gl_pathc; i++) {
debug("Matched file: %s", pglob->gl_pathv[i]);
}
rc = 0; // all good
error: // fallthrough
if(line) free(line);
return rc;
}
int found_it(int use_or, int found_count, int search_len)
{
debug("use_or: %d, found_count: %d, search_len: %d", use_or, found_count, search_len);
if(use_or && found_count > 0) {
return 1;
} else if(!use_or && found_count == search_len) {
return 1;
} else {
return 0;
}
}
int scan_file(const char *filename, int use_or, int search_len, char *search_for[])
{
char *line = calloc(MAX_LINE, 1);
FILE *file = fopen(filename, "r");
int found_count = 0;
int i = 0;
check_mem(line);
check(file, "Failed to open file: %s", filename);
// read each line of the file and search that line for the contents
while(fgets(line, MAX_LINE-1, file) != NULL)
{
for(i = 0; i < search_len; i++) {
if(strcasestr(line, search_for[i]) != NULL) {
debug("file: %s, line: %s, search: %s", filename, line, search_for[i]);
found_count++;
}
}
if(found_it(use_or, found_count, search_len)) {
printf("%s\n", filename);
break;
} else {
found_count = 0;
}
}
free(line);
fclose(file);
return 0;
error:
if(line) free(line);
if(file) fclose(file);
return -1;
}
int parse_args(int *use_or, int *argc, char **argv[])
{
(*argc)--;
(*argv)++;
if(strcmp((*argv)[0], "-o") == 0) {
*use_or = 1;
(*argc)--; // skip the -o
(*argv)++;
check(*argc > 1, "You need words after -o.");
} else {
*use_or = 0;
}
return 0;
error:
return -1;
}
int main(int argc, char *argv[])
{
int i = 0;
int use_or = 1;
glob_t files_found;
check(argc > 1, "USAGE: logfind [-o] words");
check(parse_args(&use_or, &argc, &argv) == 0, "USAGE: logfind [-o] words");
check(list_files(&files_found) == 0, "Failed to list files.");
for(i = 0; i < files_found.gl_pathc; i++) {
scan_file(files_found.gl_pathv[i], use_or, argc, argv);
}
globfree(&files_found);
return 0;
error:
return 1;
}
```
.\ex27\logfind.5\Makefile
```makefile
CFLAGS=-Wall -g
all: logfind
./logfind || true
./logfind MAX_LINE
./logfind error MAX LINE
./logfind -o error MAX LINE
clean:
rm -f logfind
```
Read The Book
This video is a demonstration of the concepts in the book.
Go read the book.
Demonstration
I will demonstrate each of the following:
* Fail early and openly.
* Document assumptions.
* Prevention over documentation.
* Automate everything.
* Simplify and clarify.
* Question authority.
Fail Early and Openly
Document Assumptions
Prevention over Documentation
Automate Everything
Simplify and Clarify
Question Authority
Bonus: Assume Nothing
### Exercise 28 Intermediate Makefiles
The Plan
* Learn how to create a project skeleton to make starting easier.
* Learn more advanced GNU make tricks.
The Skeleton
.\ex28\c-skeleton
.\ex28\c-skeleton\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex28\c-skeleton\src\libex29.c
```c
#include <stdio.h>
#include <ctype.h>
#include "dbg.h"
int print_a_message(const char *msg)
{
printf("A STRING: %s\n", msg);
return 0;
}
int uppercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", toupper(msg[i]));
}
printf("\n");
return 0;
}
int lowercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", tolower(msg[i]));
}
printf("\n");
return 0;
}
int fail_on_purpose(const char *msg)
{
return 1;
}
```
.\ex28\c-skeleton\tests\libex29_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
typedef int (*lib_function) (const char *data);
char *lib_file = "build/libYOUR_LIBRARY.so";
void *lib = NULL;
int check_function(const char *func_to_run, const char *data,
int expected)
{
lib_function func = dlsym(lib, func_to_run);
check(func != NULL,
"Did not find %s function in the library %s: %s", func_to_run,
lib_file, dlerror());
int rc = func(data);
check(rc == expected, "Function %s return %d for data: %s",
func_to_run, rc, data);
return 1;
error:
return 0;
}
char *test_dlopen()
{
lib = dlopen(lib_file, RTLD_NOW);
mu_assert(lib != NULL, "Failed to open the library to test.");
return NULL;
}
char *test_functions()
{
mu_assert(check_function("print_a_message", "Hello", 0),
"print_a_message failed.");
mu_assert(check_function("uppercase", "Hello", 0),
"uppercase failed.");
mu_assert(check_function("lowercase", "Hello", 0),
"lowercase failed.");
return NULL;
}
char *test_failures()
{
mu_assert(check_function("fail_on_purpose", "Hello", 1),
"fail_on_purpose should fail.");
return NULL;
}
char *test_dlclose()
{
int rc = dlclose(lib);
mu_assert(rc == 0, "Failed to close lib.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dlopen);
mu_run_test(test_functions);
mu_run_test(test_failures);
mu_run_test(test_dlclose);
return NULL;
}
RUN_TESTS(all_tests);
```
The video is probably easier to follow than the book.
Watch me do this.
Using The Skeleton
.\ex28\c-skeleton
.\ex28\c-skeleton\src\dbg.h
```c
#ifndef __dbg_h__
#define __dbg_h__
#include <stdio.h>
#include <errno.h>
#include <string.h>
#ifdef NDEBUG
#define debug(M, ...)
#else
#define debug(M, ...) fprintf(stderr, "DEBUG %s:%d: " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#endif
#define clean_errno() (errno == 0 ? "None" : strerror(errno))
#define log_err(M, ...) fprintf(stderr,\
"[ERROR] (%s:%d: errno: %s) " M "\n", __FILE__, __LINE__,\
clean_errno(), ##__VA_ARGS__)
#define log_warn(M, ...) fprintf(stderr,\
"[WARN] (%s:%d: errno: %s) " M "\n",\
__FILE__, __LINE__, clean_errno(), ##__VA_ARGS__)
#define log_info(M, ...) fprintf(stderr, "[INFO] (%s:%d) " M "\n",\
__FILE__, __LINE__, ##__VA_ARGS__)
#define check(A, M, ...) if(!(A)) {\
log_err(M, ##__VA_ARGS__); errno=0; goto error; }
#define sentinel(M, ...) { log_err(M, ##__VA_ARGS__);\
errno=0; goto error; }
#define check_mem(A) check((A), "Out of memory.")
#define check_debug(A, M, ...) if(!(A)) { debug(M, ##__VA_ARGS__);\
errno=0; goto error; }
#endif
```
.\ex28\c-skeleton\src\libex29.c
```c
#include <stdio.h>
#include <ctype.h>
#include "dbg.h"
int print_a_message(const char *msg)
{
printf("A STRING: %s\n", msg);
return 0;
}
int uppercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", toupper(msg[i]));
}
printf("\n");
return 0;
}
int lowercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", tolower(msg[i]));
}
printf("\n");
return 0;
}
int fail_on_purpose(const char *msg)
{
return 1;
}
```
.\ex28\c-skeleton\tests\libex29_tests.c
```c
#include "minunit.h"
#include <dlfcn.h>
typedef int (*lib_function) (const char *data);
char *lib_file = "build/libYOUR_LIBRARY.so";
void *lib = NULL;
int check_function(const char *func_to_run, const char *data,
int expected)
{
lib_function func = dlsym(lib, func_to_run);
check(func != NULL,
"Did not find %s function in the library %s: %s", func_to_run,
lib_file, dlerror());
int rc = func(data);
check(rc == expected, "Function %s return %d for data: %s",
func_to_run, rc, data);
return 1;
error:
return 0;
}
char *test_dlopen()
{
lib = dlopen(lib_file, RTLD_NOW);
mu_assert(lib != NULL, "Failed to open the library to test.");
return NULL;
}
char *test_functions()
{
mu_assert(check_function("print_a_message", "Hello", 0),
"print_a_message failed.");
mu_assert(check_function("uppercase", "Hello", 0),
"uppercase failed.");
mu_assert(check_function("lowercase", "Hello", 0),
"lowercase failed.");
return NULL;
}
char *test_failures()
{
mu_assert(check_function("fail_on_purpose", "Hello", 1),
"fail_on_purpose should fail.");
return NULL;
}
char *test_dlclose()
{
int rc = dlclose(lib);
mu_assert(rc == 0, "Failed to close lib.");
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dlopen);
mu_run_test(test_functions);
mu_run_test(test_failures);
mu_run_test(test_dlclose);
return NULL;
}
RUN_TESTS(all_tests);
```
Now I'll use the skeleton to start a simple project for the next exercise.
The Analysis
Let's look at Makefile in depth.
Extra Credit
### Exercise 29 Libraries and Linking
The Plan
* Learn about libraries and how to link against them.
* Learn how to load a dynamic library from inside C.
The Code
.\ex29\ex29.c
```c
#include <stdio.h>
#include "dbg.h"
#include <dlfcn.h>
typedef int (*lib_function) (const char *data);
int main(int argc, char *argv[])
{
int rc = 0;
check(argc == 4, "USAGE: ex29 libex29.so function data");
char *lib_file = argv[1];
char *func_to_run = argv[2];
char *data = argv[3];
void *lib = dlopen(lib_file, RTLD_NOW);
check(lib != NULL, "Failed to open the library %s: %s", lib_file,
dlerror());
lib_function func = dlsym(lib, func_to_run);
check(func != NULL,
"Did not find %s function in the library %s: %s", func_to_run,
lib_file, dlerror());
rc = func(data);
check(rc == 0, "Function %s return %d for data: %s", func_to_run,
rc, data);
rc = dlclose(lib);
check(rc == 0, "Failed to close %s", lib_file);
return 0;
error:
return 1;
}
```
.\ex29\libex29.c
```c
#include <stdio.h>
#include <ctype.h>
#include "dbg.h"
int print_a_message(const char *msg)
{
printf("A STRING: %s\n", msg);
return 0;
}
int uppercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", toupper(msg[i]));
}
printf("\n");
return 0;
}
int lowercase(const char *msg)
{
int i = 0;
// BUG: \0 termination problems
for(i = 0; msg[i] != '\0'; i++) {
printf("%c", tolower(msg[i]));
}
printf("\n");
return 0;
}
int fail_on_purpose(const char *msg)
{
return 1;
}
```
I'll use the project I started from the previous exercise.
This covers some of the extra credit.
The Analysis
This analysis might take a while, but be sure you know Exercise 28 well.
Breaking It
* Wreck the libex29.so file.
Extra Credit
* Were you paying attention to the bad code I have in the ``libex29.c`` functions?
Do you see how, even though I use a for-loop they still check for ``'\0'``
endings? Fix this so that the functions always take a length for the
string to work with inside the function.
* Read the ``man dlopen`` documentation and read about all of the
related functions. Try some of the other options to ``dlopen``
beside ``RTLD_NOW``.
### Exercise 30 Automated Testing
The Plan
Continue the Exercise 28-29 project and add automated tests to it.
Why Automate Tests
You are a programmer.
Your job is automating.
EVERYTHING
The Code
.\ex30\ex30.c
```c
#include "minunit.h"
char *test_dlopen(int stuff)
{
return NULL;
}
char *test_functions()
{
return NULL;
}
char *test_failures()
{
return NULL;
}
char *test_dlclose()
{
return NULL;
}
char *all_tests()
{
mu_suite_start();
mu_run_test(test_dlopen);
mu_run_test(test_functions);
mu_run_test(test_failures);
mu_run_test(test_dlclose);
return NULL;
}
RUN_TESTS(all_tests);
```
Adding It To libex29
Breaking It
* Making tests fail first is useful.
Extra Credit
* This works but it's probably a bit messy. Clean the ``c-skeleton``
directory up so that it has all of these files, but remove any of the code
related to Exercise 29. You should be able to copy this directory
over and kick-start new projects without much editing.
* Study the ``runtests.sh``, and then go read about ``bash`` syntax
so you know what it does. Do you think you could write a C version of this
script?
### Exercise 31 Common Undefined Behavior
The Plan
Review the issues around Undefined and Unspecified Behavior (UB).
Read The Book
The book lists many of the UB and discusses why they are important to know
about.
The Code
.\ex31\ex31.c
```c
#include <unistd.h>
int main(int argc, char *argv[])
{
int i = 0;
while (i < 100) {
usleep(3000);
}
return 0;
}
```
There is no code for this exercise, just a quick discussion for the book.
Undefined Behavior
* Compiler writers can do whatever they want.
* This means even *nothing*, which will ruin you silently.
* It's best to avoid it.
Unspecified Behavior
* For practical purposes unspecified is the same as undefined.
Handy Tools
* Clang's UB helpful flags.
* Lint tools and static analyzers.
Extra Credit
Spend a day reading through as much of the UB as you can and find examples of each. Expect lots of frustration and failure when you do this.
<file_sep>#!/bin/bash
echo "Deploying updates to GitHub ..."
# Go To Public folder
cd public
# Add changes to git.
pwd
git config user.name "harryho"
git config user.email "<EMAIL>"
git remote show origin
git checkout master
git pull origin master
## back to root of blog-hugo
cd ..
# Build the project. If using a theme, replace by `hugo -t <yourtheme>`
hugo -t docdock --ignoreCache
# Navigate into public
cd public
git add -A
msg="rebuilding site $(date)"
if [ $# -eq 1 ]; then
msg="$1"
fi
git commit -m "$msg"
# Push source and build repos.
git push origin master
# Come Back
cd ..
<file_sep>+++
title = "DigitialOcean: First Web Host"
description="UFW, Nginx & Web Host"
weight=3
+++
> Here I contineu to setup SSL certificates for all sites on my web host
## UFW
UFW, or Uncomplicated Firewall, is a front-end to iptables. Its main goal is to make managing your firewall drop-dead simple and to provide an easy-to-use interface.
**DO NOT Enable UFW**
> DO NOT enable UFW without reading through the instructions
### Enable IP V6
* Open the UFW configuration with vi:
```
sudo vi /etc/default/ufw
```
* Make sure "IPV6" is set to "yes", like so:
```
...
IPV6=yes
...
```
### Set default rules
```
sudo ufw deny incoming
sudo ufw allow outgoing
```
### Allow SSH / OpenSSH
* Check app list & enable OpenSSH
```
# List applications
sudo ufw app list
# Allow SSH
sudo ufw allow OpenSSH
```
* Directly allow port 22 or other SSH port, e.g. 2222
```
sudo ufw allow 22
```
### Enable UFW
```
sudo ufw enable
sudo ufw sattus verbose
```
## Nginx
### Install Nginx
```
sudo apt install ngix
```
### Set UFW
```
# show applications
sudo ufw app list
# Allow Nginx
sudo ufw allow 'Nginx Full'
sudo ufw reload
```
## Build Web Host Block
### Create the Directory Structure
* The document root is the directory where the website files for a domain name are stored and served in response to requests. You can set the document root to any location you want.
* Basically, we will create a separate directory for each domain we want to host on our server inside the /var/www directory, which will store the domain website files.
```
/var/www/
├── domain-one.com
│ └── index.html
```
* Create the root directory domain-one.com:
```
sudo mkdir -p /var/www/domain-one.com
```
* Create an index.html file inside the domain’s root directory.
```bash
sudo touch /var/www/domain-one.com/index.html
```
* Copy following content to the file: __/var/www/domain-one.com/index.html__
```html
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>domain-one.com </title>
</head>
<body>
<script>
document.write(
`<h1>Welecome to domain-one.com
${new Date().toLocaleString()}
</h1>`
);
</script>
</body>
</html>
```
* To avoid any permission issues, change the ownership of the domain document root directory to the Nginx user (www-data):
```bash
sudo chown -R www-data: /var/www/domain-one.com
```
#### Create a Server Block
By default on Ubuntu systems, Nginx server blocks configuration files are stored in __/etc/nginx/sites-available__ directory, which are enabled through symbolic links to the __/etc/nginx/sites-enabled/__ directory.
Open your editor of choice and create the following server block file: __/etc/nginx/sites-available/domain-one.com__
```nginx
server {
listen 80;
listen [::]:80;
root /var/www/domain-one.com;
index index.html;
server_name domain-one.com www.domain-one.com;
access_log /var/log/nginx/domain-one.com.access.log;
error_log /var/log/nginx/domain-one.com.error.log;
location / {
try_files $uri $uri/ =404;
}
}
```
* To enable the new server block file, create a symbolic link from the file to the sites-enabled directory, which is read by Nginx during startup:
```bash
sudo ln -s /etc/nginx/sites-available/domain-one.com /etc/nginx/sites-enabled/
```
* Test the Nginx configuration for correct syntax:
```
sudo nginx -t
# If there are no errors, the output will look like this:
# nginx: the configuration file /etc/nginx/nginx.conf syntax is ok
# nginx: configuration file /etc/nginx/nginx.conf test is successful
```
* Restart the Nginx service for the changes to take effect
```
sudo systemctl restart nginx
```
### Disable Default Nginx site
* Chanage the default site configuration as below.
```nginx
server {
listen 80 default_server;
listen [::]:80 default_server;
server_name _;
deny all;
return 444;
}
```
### Security
* Next steg is to setup Les's Encrpyt.<file_sep>+++
title = "MySql: DDL & DML"
description="Introduction of SQL: DDL - Data Definition Language DML - Data Manipulation Language"
+++
> As one of most popular open source databases, MySql is mainly used as data storage, aka database. To store the data into MySql server, we need to use SQL - Structural Query Language. But before we store the data to MySql, we need to define the schema which tells MySql how to organize the data in the proper manner. To define the schema, there is a special set of SQL, which we call it DDL - Data Definition Language, such as, CREATE, DROP, ALTER, etc.
### Create a new database
```sql
-- Drop the old one
DROP SCHEMA IF EXISTS new_dbu
-- Create a new db
CREATE DATABASE new_db CHARACTER SET utf8
COLLATE utf8_general_ci;
```
### Create a table
```sql
DROP TABLE IF EXISTS new_table;
CREATE TABLE new_table (
id int NOT NULL AUTO_INCREMENT,
name varchar(50),
title varchar(50),
email varchar(250),
created_date datetime,
modified_date datetime,
PRIMARY KEY (id)
);
```
### Query
- JOIN and INNER JOIN
```sql
SELECT * FROM new_table_a na
JOIN new_table_b nb ON nb.new_table_a_id = na.id
LIMIT 10;
```
### Useful temporary table
```sql
DROP TEMPORARY TABLE IF EXISTS tmp_table;
CREATE TEMPORARY TABLE tmp_table AS
SELECT * FROM new_table;
SELECT * FROM tmp_table;
```
### Update data from other table
```sql
UPDATE tableB
INNER JOIN tableA ON tableB.name = tableA.name
SET tableB.value = IF(tableA.value > 0, tableA.value, tableB.value)
WHERE tableA.name = 'Joe'
```
### Delete data
{{% notice tip %}}
How to avoid error: `You can’t specify target table`
{{% /notice %}}
```sql
DELETE FROM TableA
WHERE id NOT IN (
SELECT * FROM (
SELECT a.id id FROM TableA a
JOIN TableB b ON a.tableb_id = b.id
) as t
);
```<file_sep>+++
title = "Azure: CAF - 1"
weight = 1
description="Introduction of Cloud Adoption Framework"
+++
## Cloud Adoption Framework
The Microsoft Cloud Adoption Framework for Azure is proven guidance that's designed to help you create and implement the business and technology strategies necessary for your organization to succeed in the cloud. It provides best practices, documentation, and tools that cloud architects, IT professionals, and business decision makers need to successfully achieve short-term and long-term objectives.
The Cloud Adoption Framework brings together cloud adoption best practices from Microsoft employees, partners, and customers. It provides a set of tools, guidance, and narratives that help shape technology, business, and people strategies for driving desired business outcomes during your cloud adoption effort. Review the guidance for each methodology below, providing you with easy access to the right guidance at the right time.
### Structure
methodology | descripton |
---|----
Strategy| define business justification and expected outcomes of adoption.
Plan| align actionable adoption plans to business outcomes.
Ready| Prepare the cloud environment for the planned changes.
Migrate| Migrate and modernize existing workloads.
Innovate| Develop new cloud-native or hybrid solutions.
Govern| Govern the environment and workloads.
Manage| Operations management for cloud and hybrid solutions.
Organize| Govern the environment and workloads.
### RACI
RACI - Responsible, Accountable,Consulted,and Informed
### Cloud Adoption Capability
Capability | Description
----|----
Cloud Adoption |Deliver technical solutions
Cloud Strategy |Align technical change to business needs
Cloud Operations |Support and operate adopted solutions
Cloud Center of Excellence |Improve quality, speed, and resiliency of adoption
Cloud Governance |Manage risk, drive consistency, governance, and compliance
Cloud Platform |Operate and mature the platform
Cloud Automation | Accelerate adoption and innovation
### SME of Capability
Capability | Expertise
----|-----
Cloud Strategy | Finance
Cloud Strategy | Project Management
Cloud Governance| IT Security
Cloud Governance| IT Governance
Cloud Platform| Network
Cloud Platform| Identity
Cloud Platform| Virtualization
Cloud Platform| Disaster Recovery
Cloud Operations| IT Operations
Cloud Operations| Application Owner
Cloud Adoption| Project Lead(s)
Cloud Adoption| Architect(s)
### Lifecycle
{{<mermaid>}}
graph LR
S(Define Strategy)
P(Plan)
R(Ready)
A(Adopt)
subgraph Layer_1
S-->P
P-->R
R-->A
end
{{</mermaid >}}
{{<mermaid>}}
graph BT
G(Govern)
M(Management)
subgraph Layer_2
G
M
end
{{</mermaid >}}
<file_sep>+++
date = "2018-12-04T14:59:31+11:00"
title = "VPN StrongSwam setup"
description = "VPN with StrongSwam"
+++
## VPN StrongSwan
**strongSwan** is a multiplatform IPsec implementation. The focus of the project is on strong authentication mechanisms using X.509 public key certificates and optional secure storage of private keys and certificates on smartcards through a standardized PKCS#11 interface and on TPM 2.0.
### Launch an instance with Ubuntu
### Update setup script
* Following is setup.sh
```bash
#!/bin/bash
usage() {
echo "Usage: strongswan.sh [install|start] [PATADDR] [ETHDEV]
'install' parameters:
PATADDR The private address on MARKETNET (eg. 172.17.133.10)
ETHDEV The name of the local ethernet device (eg. etho)
"
exit 1
}
install_function () {
apt update -y
apt install strongswan -y
cp ipsec.conf /etc/ipsec.conf
cp ipsec.secrets /etc/ipsec.secrets
sysctl -w net.ipv4.ip_forward=1
ip addr add 172.17.12.127 dev eth0
iptables -t nat -F
iptables -t nat -I POSTROUTING -m policy --pol ipsec --dir out -j ACCEPT
iptables -t nat -A POSTROUTING -d 172.16.17.32/24 -j SNAT --to 172.17.12.127
iptables-save
}
start_function () {
ipsec reload
ipsec rereadsecrets
ipsec up remote-vpn-b
ipsec down remote-vpn-b
ipsec up remote-vpn-a
ipsec down remote-vpn-a
}
if [ $# -lt 1 ]; then
echo "No command"
usage
fi
export operation=$1
if [ "$operation" = "install" ]; then
install_function
elif [ "$operation" = "start" ]; then
start_function
fi
```
### Update IPSec config
```
config setup
strictcrlpolicy=no
uniqueids = no
charondebug="ike 3,dmn 0, mgr 3, chd 2, cfg 2, knl 0, net 2, enc 0, esp 3"
conn %default
auto=route
compress=no
type=tunnel
keyexchange=ikev2
ike=aes256-sha512-modp2048
esp=aes256-sha512-modp2048
leftauth=psk
rightauth=psk
authby=secret
lifetime=28800
ikelifetime=28800
rekey=yes
reauth=no
inactivity=1800
conn remote-vpn-a
left=%defaultroute
leftsubnet=172.17.12.127/32
leftid=192.168.127.12
right=172.16.58.3
rightid=172.16.58.3
rightsubnet=172.16.17.32/24
conn remote-vpn-b
left=%defaultroute
leftsubnet=172.17.12.127/32
leftid=1172.16.17.32
right=172.16.17.32
rightid=172.16.17.32
rightsubnet=172.16.17.32/24
```
### Update IPSec secrets
```
192.168.127.12 172.16.58.3 : PSK Your_Remote_Key
192.168.127.12 172.16.17.32 : PSK Your_Remote_Key
```
### Setup & Test StrongSwan
```
sudo bash strongswan.sh install
sudo ipsec reload
sudo ipsec rereadsecrets
sudo ipsec up remote-vpn-b
sudo ipsec down remote-vpn-b
ipsec up remote-vpn-a
sudo ipsec up remote-vpn-a
ipsec up remote-vpn-b
sudo ipsec up remote-vpn-b
```<file_sep>+++
date = "2012-03-24T10:59:31+11:00"
title = "Windows cmd & hotkey - 2"
description="A note for everyone who wants to use Command and Hot Key as hacker "
weight=11
+++
*This article will continue the topic of Windows command & hotkeys. Part-1 shows you common hotkeys and short command lines for `Run` windnow dialog. The rest of this topic will focus on the advanced commands and how to create a batch script with all those commands.*
*Let me clarify something first. Advanced command here does not mean that commands here are very complicated or much more powerful than common ones, which have been shown in the Part-1. Here we call them advanced, because they are used by experienced users to complete their given tasks, and those commands are used seldom by majority people. Comparing with Part-1, advanced commands have some specific features which allow them to do some special jobs, which usually are done by system admin. Advanced command is known as Admin command as well.*
## Advanced commands and usages
### attrib
* Type `attrib +h a.txt` to hide file and use `attrib -h a.txt` to unhide it.
* Type `attrib +r a.txt` to change file to read-only and reverse the action by `-r`
### env
* Type `env>env.txt & notepad env.txt` Display all environment variable in text file
### set
* Type `set path` to display **PATH** environment variable, which is useful to check if your **PATH** has been setup properly.
* Type `set /P a=b` to set b as value to variable a. It will be used in bat/cmd script.
### net
**get sub-commands** -- type `net /? `
```
[ ACCOUNTS | COMPUTER | CONFIG | CONTINUE | FILE | GROUP | HELP |
HELPMSG | LOCALGROUP | PAUSE | SESSION | SHARE | START |
STATISTICS | STOP | TIME | USE | USER | VIEW ]
```
**get sub-command's help** -- type `net [sub-command] /?`
**net view**
* Use `net view` to show a list of computers and network devices on the network.
**net statistics**
* Use `net statistics workstation(/server)` to show the network statistics log for the Server or Workstation service
**net localgroup**
* Use `net localgroup` to show a list of local user group on your computer.
**net user**
* Type `net user %username%` to retrieve your user information
* Type `net user adminstrator` to check the status of administrator
* Type `net user administrator /active:yes` to activate adminstrator and inactivate by replacing `yes` with`no`
**net accounts**
* Use `net accounts <user>` to show current user's password and login requirement.
* Use `net accounts <user> /minpwlen:6` to set password minimum length requirement for user.
* Use `net accounts <user> /maxpwage:30` to force user to reset password every 30 days, or use `unlimited` to replace the number `30`, then user's password will never expire.
* User `net accounts /unique:5` to prevent user reuse previous passwords, and default value is 5.
### runas
```
start command prompt as administrator
runas /user:yourpc\administrator "cmd"
REM ##BE CAREFUL When you try the command below ###
REM it shows how to create, delete files as admin under C drive root.
runas /user:yourpc\administrator "cmd /C type \"\">c:\z.txt & \
dir c:\z.txt & pause & del c:\z.txt "
```
### sc
* sc command usage: `sc <server> [command] [service name] <option1> <option2>...`
**sc query**
* Basic usage
```
REM query all service on the PC -- <yourpcname>
sc \\<yourpcname> query
REM query status of given service
sc query <servicename>
sc query state= all | find "SERVICE_NAME"
```
* Retrieve service name and state. type parameter can be used twice in some case.
* state = {active | inactive | all}
* type = {driver | service | all}
* type= {own | share | interact | kernel | filesys | rec | adapt}
* __*IMPORTANT*__
* The command options for SC are case sensitive.
* If you run this inside a batch file, the percent signs (e.g. at %s) need to be doubled.
* Extra space within option is necessary. e.g. `state= all`
```
REM query all services which are inactive and type are driver and kernel
sc query state= inactive type= driver type= kernel
REM get all services name
for /f "tokens=2" %s in ('sc query state^= all ^| find "SERVICE_NAME"') do @echo %s
REM get all services name and state
for /f "tokens=2" %s in ('sc query state^= all ^| find "SERVICE_NAME"') do @(
for /f "tokens=4" %t in ('sc query %s ^| find "STATE" ')
do @echo %s -- %t
)
```
**sc queryex**
```
REM get all services name and pid
for /f "tokens=2" %s in ('sc queryex state^= all ^| find "SERVICE_NAME"') do @(
for /f "tokens=3" %t in ('sc queryex %s ^| find "PID" ')
do @echo %s -- %t
)
REM get all services name and pid
for /f "tokens=2" %s in ('sc queryex state^= all ^| find "SERVICE_NAME"') do @(
for /f "tokens=3" %t in ('sc queryex %s ^| find "BINARY_PATH_NAME" ')
do @echo %s -- %t
)
```
**sc qc**
```
REM get all services name and path
for /f "tokens=2" %s in ('sc queryex state^= all ^| find "SERVICE_NAME"') do @(
for /f "tokens=3 delims==:" %t in ('sc qc %s ^| find "BINARY_PATH_NAME" ')
do @echo %s -- C:%t
)
```
**sc start/stop**
```
REM start and stop service
sc start <servicename>
REM query service state
sc query <servicename>
REM stop service
sc stop <servicename>
```
### ipconfig
* Type `ipconfig /all` to display full configuration information.
* Type `ipconfig /flushdns` to purge the DNS Resolver cache.
### tasklist
**syntax**
* tasklist[.exe] [/s computer] [/u domain\user [/p password]] [/fo {TABLE|LIST|CSV}] [/nh] [/fi FilterName [/fi FilterName2 [ ... ]]] [/m [ModuleName] | /svc | /v
* FilterName: Status, Imagename,
* Find process by pid
```
REM get the mysqld process info
tasklist /v /fo list /fi "imagename eq mysqld.exe"
REM get the mongod process info
tasklist /v /fo list /fi "imagename eq mongod.exe"
REM get list of running processes under given user
tasklist /fi "USERNAME ne NT AUTHORITY\SYSTEM" /fi "STATUS eq running"
REM get list of non-responding processes under given user
tasklist /fi "USERNAME ne NT AUTHORITY\SYSTEM" /fi "STATUS eq not responding"
REM get process by PID
tasklist /fi "pid eq 4444"
```
### netstat
* Type `netstat` to get all ports and IP addresses, which are connected or listening
* Type PID of process which is using some given port, such as 80, 443, 22, etc.
```bash
netstat -ano | find ":80"
```
* Type the application which is using given port.
```
for /f "tokens=5" %p in ( 'netstat -ano ^| find ":80"') do @(
for /f "tokens=1" %s in ( 'tasklist /fi "pid eq %p" ^| find "%p"') do @(
echo PID:%p -- APP: %s
)
)
```
### taskkill
**syntax**
```
taskkill [/S system [/U username [/P [password]]]]
{ [/FI filter] [/PID processid | /IM imagename] } [/F] [/T]
```
**samples**
```
REM force to stop notepad application and any children processes
taskkill /F /IM notepad.exe /
REM stop process by PID and any children processes
taskkill /PID 1230 /PID 1241 /PID 1253 /T
REM force to stop applications which PID is equal or greater than 10000
REM and windows title of app is not starts with untitle
taskkill /F /FI "PID ge 1000" /FI "WINDOWTITLE ne untitle*"
taskkill /F /FI "USERNAME eq NT AUTHORITY\SYSTEM" /IM notepad.exe
```
### schtasks
* Syntax -- `schtasks /parameter [arguments]`
* parameters include -- Change, Create, Delete, End, Query, Run, ShowSid
* Type `schtasks` to list all scheduled tasks
**schtasks /Query**
```
REM get help info
SCHTASKS /Query /?
REM query tasks which are scheduled on given system
SCHTASKS /Query /S system /U user /P
REM get list of tasks in details
SCHTASKS /Query /FO LIST /V
REM get table of running tasks in details and output to csv file
SCHTASKS /Query /FO TABLE /NH /V | find "Running" >running_tasks.csv
```
## Combination of multiple commands
As we know, usually each command is designed to complete some specific actions, but sometimes we have to combine different commands together to achieve what we want. There are a few ways to put the commands together.
### Use `&`
It is used to connect to two commands and execute them sequentially
* Delete a folder with non-empty subdirectries `test` we need to combine `del` and `rd` together. Actually we can two commands one by one, but we can put it together and just execute once.
```bash
REM show the folder with non-empty subdirectries
tree test
\path\to\TEST
+---subdir1
| file1
| file2
|
\---subdir2
file1
file2
del /s/q test & rd /s/q
```
### Use pipeline `>`
It is used to setup a channel between commands pass the data through the commands.
Actually you have seen many samples from above advanced commands. I just use a very simple one to show you how it works.
```
REM write some content to a text file all.txt
echo aaa>all.txt & echo mark aaa >>all.txt & echo mark bbb>>all.txt
```
### Check CPU usage
```
wmic cpu get loadpercentage
@for /f "skip=1" %p in ('wmic cpu get loadpercentage') do @echo %p%
```
### Use `for`
It is used to loop to combine commands. Please check out the samples for `tasklist` or `netstat`.
## script
### Basic hello world script
* You can find it on the [home page](https://harryho.github.io)
### Customized script
* This sample script is used to query temp folders and clean up log files within the folder.
* We assume you have multiple temp folders in different drives and You want to delete log files inside temp folder and its subdirectries from time to time. Before you delete them, you want to list all files first. You can confirm if you want to delete them or not.
* Create a file named clean-logs.bat
* Copy the sample code and tailor anything you want.
* The sample shows you how to create interative command script and how to combine commands together with the condition statement and loop statement.
```cmd
@echo off
@echo."Task: "
@echo."You have multiple temp folders in different drives. "
@echo."You want to delete log files inside temp folder and its subdirectries. "
@echo."Before you delete them, you want to list all files first, file list"
@echo."should be sorted by time"
:again
echo "Checking all Recycle bins for each drive ..."
echo.-----------------------
for /f %%x in ('wmic logicaldisk get caption ^| find ":"') do @(
for /f "tokens=*" %%s in ('tree /f /a %%x\temp ^| find "log" ' ) do @(
echo.%%x\temp\%%s
)
)
set /p answer=Do you want to clean up log files (Y/N)?
if /i "%answer:~,1%" EQU "Y" (
@echo.Y
goto clean
)
if /i "%answer:~,1%" EQU "N" (
@echo.N
goto end
)
echo Please type Y for Yes or N for No
goto again
:clean
echo.'deleting logs'
for /f %%x in ('wmic logicaldisk get caption ^| find ":"') do @(
for /f "tokens=*" %%s in ('tree /f /a %%x\temp ^| find "log" ' ) do @(
del "%%x\temp\%%s"
)
)
:end
echo.'exiting program'
```<file_sep>+++
date = "2017-02-15T14:59:31+11:00"
title = "Create a blog site on GitHub Pages"
description="After I setup a blog site with Hugo on my ubuntu machine, I decided to use it to create a blog to GitHub pages on my windows machine "
+++
> If you use Unix-style system, I recommend you to follow the [Hugo Quick Start](https://gohugo.io/overview/quickstart/) and [Hosting on GitHub Pages](https://gohugo.io/tutorials/github-pages-blog) to create a blog to GitHub pages within 5 mins.
> When I decided to use hugo to create a blog on GitHub pages from my windows machine, it took me over 30 mins. I hope this blog can help someone want to do sth similar within Windows environment.*
### Step 1 - Plan and prepare
#### Prerequisite
* You already have *Hugo* on your computer. If not, please follow the instruction to [install hugo on Windows](https://gohugo.io/tutorials/installing-on-windows).
#### Manage your github repositories
* You will have two repositories **blog-hugo** and `<username>.github.io` repositories to hold your hugo content and blog site respectively.
* The **blog-hugo** repository will host actual Hugo’s blog content.
* `<username>.github.io repository` repository will host the static website.
#### Manage your blog site
* Your *Hugo* blog folder will be "C:\git\blog-hugo" in this example.
* Your blog site will finally sit in C driver and map to repositories as follow
```bash
C:\>
|--git
|--blog-hugo (https://github.com/<yourname>/blog-hugo.git
|--archetypes
|--content
|--data
|--layouts
|--public (https://github.com/<yourname>/<yourname>.github.io.git)
|--themes
|--
```
### Step -2: Create a blog site
#### Create github repositories
* Create on GitHub blog-hugo and <username>.github.io repositories repository via GitHub website
#### Create a bloodily good blog site
* Clone blog-hugo via Windows command prompt
```bat
c:\>
c:\>cd git
c:\git>git clone <<your-project>-hugo-url> && cd <your-project>-hugo
```
* Create hugo site and setup the theme you like
```bat
C:\git>hugo new site blog-hugo
C:\git>hugo server -t <yourtheme> -D
```
#### Setup a sub module for publish
* Clean up the `public` folder
* Set submodule inside the blog-hugo and map to folder `public`
```bat
C:\>cd git/blog-hugo
C:\git>del /s /q /f public\*
C:\git>rd /s /q public
C:\git>git submodule add -b master https://github.com/<username>/<username>.github.io.git public
```
### Deploy to Github page
* Deploy the blog site to GitHub page with the script `deploy.bat`.
* `deploy.bat "Your optional commit message"` will commit the changes to `<username>.github.io`. You can use and tailor the script below as your `deploy.bat`
```batch
@echo OFF
echo Deploying updates to GitHub...
REM Build the project.
hugo -t <yourtheme> -D
REM Go To Public folder
cd public
REM Add changes to git.
git add -A
REM Commit changes.
set msg="rebuilding site %date%"
if NOT "%1"=="" set msg=%1
git commit -m '%msg%'
REM Push source and build repos.
git push origin master
REM Come Back
cd ..
```
* You might want to commit the changes to **blog-hugo** repository. Please don't forget to add `public` into the `.gitignore`.
<file_sep>+++
date = "2017-05-04T14:59:31+11:00"
title = "Ubuntu 16 server note"
description = "Ubuntu 16 server note"
draft = false
+++
Prelude
> *This article is mainly to help experienced user install and setup Ubuntu 16 LTS Server. If you are looking for the information for Ubuntu 14, please go to the page [Ubuntu 14 server setup](/os/ubuntu-server-14/)*
## Prerequisites
* You are familiar with Ubuntu, at least you have some experience working on Linux system.
* You are familiar with basic bash/shell command
## Wireless Setup
> If you install ubuntu server on a laptop, you might end up to setup the wifi first. Usually you won't bring the cable with your laptop wherever you go, also you might just have no cable or run out all cables. Now let's dive into how to setup the wifi on sever.
* Find out network control installed on your laptop. The column link lists the name of interfaces. The interface name on your laptop will be possible slightly a bit different. It depends on laptop maker. Basically the interface with the type __wlan__ is your wifi interface.
```bash
networkctl
```
* You will see network interfaces on your laptop. enp1s0 is the ethernet interface., and `wlp3s0` is your wifi interface
```
IDX LINK TYPE OPERATIONAL SETUP
1 lo loopback n/a unmanaged
2 enp1s0 ether n/a unmanaged
3 wlp3s0 wlan n/a unmanaged
```
* Disable IP V6 if you don't use it. (Highly recommanded for personal laptop user)
- Add following setting to file `/etc/sysctl.conf`
```bash
net.ipv6.conf.all.disable_ipv6 = 1
net.ipv6.conf.default.disable_ipv6 = 1
net.ipv6.conf.lo.disable_ipv6 = 1
```
- Reconfigure by running the following command
```bash
sudo sysctl -p
## Check the ipv6 status. If you see 1 after running command below, it means ipv6
## has been disabled
cat /proc/sys/net/ipv6/conf/all/disable_ipv6
```
* Find out the status `wpa_supplicant`
```bash
sudo systemctl status wpa_supplicant
## If you find "disabled" in the output, you can simply enable it as below
sudo systemctl enable wpa_supplicant
```
* Find out your wifi ESSID
```bash
sudo iwlist wlp3s0 scan | grep ESSID
## If you get error message like "network is down", use ifconfig to bring it up and
## re-run previous commmand
sudo ifconfig wlp3s0 up
```
* Setup the wpa passphrase for your wifi
```bash
## Use following command to add pass phrase to your wpa_supplicant
##
## wpa_passphrase <your-ESSID> <your-passphrase> | sudo tee /etc/wpa_supplicant.conf
## If your ESSID is mywifi and password of wifi is <PASSWORD>, then you will end up the
## command below
wpa_passphrase mywifi mypasswork | sudo tee /etc/wpa_supplicant.conf
```
* Config your wpa supplicant for your wifi interface and run the comand as background process
```bash
sudo wpa_supplicant -c /etc/wpa_supplicant.conf -i wlp3s0 > /dev/null 2>1& &
```
* Add SSID scan into config `/etc/wpa_supplicant.conf`
```bash
network={
ssid="mywifi"
#psk="<PASSWORD>"
psk=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
scan_ssid=1
}
```
* Get IP address from external DHCP
```bash
sudo dhclient wlp3s0
## Check the ip address
ifconfig wlp3s0
```
## Wifi Trouble Shooting
* There is no ip address assigned to your wifi interface
- Check out the wpa_supplicant status
`sudo systemctl status wpa_supplicant`
- If you find some error like 'Failed to construct signal', it means
some network service has been disabled
`sudo systemctl list-unit-files --state disabled | grep network`
- Enable `systemd-networkd.service`, `networking.service` if they are disabled
* The error message comes after the `dhclient`
- Enable the `squid.service`
* Auto connect to wifi on Starup
```bash
sudo cp /lib/systemd/system/wpa_supplicant.service /etc/systemd/system/wpa_supplicant.service
sudo vi /etc/systemd/system/wpa_supplicant.service
```
* Replace the following line
`ExecStart=/sbin/wpa_supplicant -u -s -O /run/wpa_supplicant`
with
`ExecStart=/sbin/wpa_supplicant -u -s -c /etc/wpa_supplicant.conf -i wlp3s0`
* Add wifi interface into auto startup file `/etc/network/interfaces`
```bash
auto lo
iface lo net loopback
auto wlp3s0
iface wlp3s0 net dchp
```
## UFW setup
```bash
sudo ufw enable
sudo ufw allow 80/tcp
sudo ufw allow ssh
sudo ufw allow 443/tcp
sudo ufw allow 8000/tcp
```
## SSH server setup
`!!! For production environment, SSH should be secured by the CA`
```bash
sudo apt-get install openssh-server
## backup default config
sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.factory-defaults
sudo chmod a-w /etc/ssh/sshd_config.factory-defaults
## use any editor to update sshd_config
sudo nano /etc/ssh/sshd_config
## uncomment PasswordAuthentication yes to allow remote password login
## Password authentication is only for test environment
## setup ssh auto-start onboot
sudo update-rc.d ssh defaults
```
## !!! Install the software-properties-common Package
```bash
sudo apt-get install software-properties-common python-software-properties
```
## Time Zone setup
```bash
sudo dpkg-reconfigure tzdata
```
## Install tmux
```bash
sudo apt-get install tmux
```
* Most useful tmux commands
> Ctrl+b " — split pane horizontally.
>
> Ctrl+b % — split pane vertically.
>
> Ctrl+b arrow key — switch pane.
>
> Hold Ctrl+b, don’t release it and hold one of the arrow keys — resize pane.
>
> Ctrl+b c — (c)reate a new window.
>
> Ctrl+b , — rename reate a new window.
>
> Ctrl+b n — move to the (n)ext window.
>
> Ctrl+b p — move to the (p)revious window.
## Install git
```bash
sudo add-apt-repository ppa:git-core/ppa
sudo apt-get update
sudo apt-get install git
```
## install docker CE (Ubuntu 16 LTS)
```bash
## Update the apt package index
sudo apt-get update
## Install packages to allow apt to use a repository over HTTPS
sudo apt-get install \
apt-transport-https \
ca-certificates \
curl \
software-properties-common
## Add Docker’s official GPG key
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
## Verify the last 8 characters of the fingerprint.
sudo apt-key fingerprint xxxxxxxx
## set up the stable repository
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
## apt update
sudo apt-get update
## install docker CE
sudo apt-get install docker-ce
```
## Install JDK 8
* Downlaod the JDK from Oracle website.
```bash
sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update
sudo apt-get install oracle-java8-installer
java -version
```
* Setup environment
```bash
sudo apt-get install oracle-java8-set-default
sudo su
cat >> /etc/environment <<EOL
JAVA_HOME=/usr/lib/jvm/java-8-oracle
JRE_HOME=/usr/lib/jvm/java-8-oracle/jre
EOL
```
* Test JDK with a simple HelloWorld program
```java
import java.util.Calendar;
class HelloWorld {
public static void main(String[] args) {
Calendar cal = Calendar.getInstance();
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH) + 1;
int day = cal.get(Calendar.DATE);
int hour = cal.get(Calendar.HOUR_OF_DAY);
int minute = cal.get(Calendar.MINUTE);
String username = System.getProperty("user.name");
System.out.println(username+ ": Hello World! ");
System.out.println(year + "/" + month + "/" + day + " " + hour + ":" + minute);
}
}
```
* Compile and run the program
```bash
javac HelloWorld.java
java HelloWorld.java
```
## Install nodejs
* Install Nodejs 8.x
```bash
curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash -
sudo apt-get install -y nodejs
```
* Install latest npm, yarn and ts
```
sudo npm install -g npm
sudo npm install -g typescript
sudo mpm install -g yarn
```
## Install PHP
* Add new repo
```bash
sudo apt-get install -y python-software-properties
sudo add-apt-repository -y ppa:ondrej/php
sudo apt-get update -y
apt-cache pkgnames | grep php7.2
```
* Option 1: Install LAMP stack
```bash
sudo apt-get install -y apache2
sudo apt-get install -y php7.2 libapache2-mod-php7.2 php7.2-cli php7.2-common \
php7.2-mbstring php7.2-gd php7.2-intl php7.2-xml php7.2-mysql php7.1-mcrypt php7.2-zip
```
* Option 2: Install LEMP stack
```bash
sudo apt-get install -y nginx
sudo apt-get install -y php7.2 php7.2-fpm php7.2-cli php7.2-common php7.2-mbstring \
php7.2-gd php7.2-intl php7.2-xml php7.2-mysql php7.1-mcrypt php7.2-zip
```
* Disable Apache and Nginx if you install both
```bash
sudo systemctl disable apache2.service
sudo systemctl disable nginx.service
```
## Install Python2, Python3
* Ubuntu has python2 installed by default
```bash
sudo apt-get python-pip
sudo apt-get install python3-pip
sudo apt-get install python3-dev python-dev
## Install virtualenv
sudo pip install virtualenv
sudo pip3 install virtualenv
```
## Install Go
* Install Go
```bash
wget https://storage.googleapis.com/golang/go1.4.linux-amd64.tar.gz
## check hash
shasum -a 256 go*linux-amd64.tar.gz
## install tar ball
sudo tar -C /usr/local -xvzf go1.9.2.linux-amd64.tar.gz
```
* Setup GOROOT by overwriting the file `/etc/environment` with following content
```bash
PATH="/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games"
JAVA_HOME="/usr/lib/jvm/java-8-oracle"
JRE_HOME="/usr/lib/jvm/java-8-oracle/jre"
GOROOT="/usr/local/go"
```
* Setup GOPATH by adding following lines to the end of `.profile`
```bash
export GOPATH="$HOME/ws/go"
export GOBIN="$GOPATH/bin"
export PATH="$GOPATH/bin:$PATH"
```
* Create a simple `hello.go` file to test
```go
package main
import (
"fmt"
"log"
"os/user"
)
func main(){
user, err := user.Current()
if err != nil {
log.Fatal(err)
}
fmt.Printf(user.Name + " said : Hello World! \n" )
}
```
* Run the program
```bash
go run $GOPATH/src/hello.go
go install $GOPATH/src/hello.go
$GOBIN/hello
```
## Install clang & cmake
```bash
sudo apt-get install clang
sudo apt-get install cmake
```
## Install Rust
```bash
$ curl -f -L https://static.rust-lang.org/rustup.sh -O
$ sh rustup.sh
```
<file_sep>+++
date = "2016-02-09T16:56:21+11:00"
title = "Zend Framework 2 MVC Starter"
description="This starter is the starting point of zend framework 2 MVC project"
+++
## Summary
This starter is the starting point of zend framework 2 MVC project. This application is meant to be used as a starting place for those looking to get their feet wet with ZF2.
## Features
* This starter was built on the zend framework 2.x.
* This starter uses mysql as database setting by default.
* Include digest authentication by default.
* Include font-awesome files.
* Include Bootstrap 3 without bootstrap-loader.
* Include html5shiiv.js to support older IE browser.
## Structure of starter
```bash
\path\to\zf2-mvc-starter
+---config // Database, authorizaion, authentication setting
+---data
+---module // Customized application sources
| +---Application // Global module used by whole application
| | +---config
| | | \---module.config.php // Register all modules
| | +---language
| | +---src
| | | \---Application
| | | +---Controller
| | | \---Factory
| | | \---AuthenticationAdapterFactory.php
| | \---view // Contains common master, basic layout files
| | +---application
| | | \---index
| | +---error
| | +---layout
| | \---partial
| +---BookList // Customized module for business purpose
| | +---config
| | +---src
| | | \---BookList
| | | +---Controller
| | | +---Form
| | | \---Model
| | \---view
| | \---book-list
| | \---book
| \---Test
| \---config
+---public // Contains all fonts, css, images, and js files
| +---css
| +---fonts
| +---img
| \---js
\---vendor // Contains Zend Framework 2 source code
```
## Screenshot of home page
> 
## Browse [Repository](https://github.com/harryho/zf2-mvc-starter.git)
<file_sep>
+++
date = "2014-05-04T10:59:31+11:00"
title = "Windows cmd & hotkey - 3"
description="A note for everyone who wants to use Command and Hot Key as hacker "
weight=13
+++
*This article will continue the topic of Windows command & hotkeys. [Part-1](/blog/windows-command-1) shows you common hotkeys and short command lines for `Run` windnow dialog. [Part-2](/os/windows-command-2) advanced commands and how to create a batch script with all those commands. Here I am going to show you another secret weapon in Windows system-VBScript/JScript*
## Breif history
> VBScript/JScript is an Active Scripting language developed by Microsoft that is modeled on Visual Basic. It allows Microsoft Windows system administrators to generate powerful tools for managing computers with error handling, subroutines, and other advanced programming constructs.
> A VBScript script must be executed within a host environment, of which there are several provided with Microsoft Windows, including: Windows Script Host (WSH), Internet Explorer (IE), and Internet Information Services (IIS). VBScript uses the Component Object Model to access elements of the environment within which it is running.
> JScript is Microsoft's dialect of the ECMAScript standard that is used in Microsoft's Internet Explorer. JScript is implemented as an Active Scripting engine. This means that it can be "plugged in" to OLE Automation applications that support Active Scripting, such as Internet Explorer, Active Server Pages, and Windows Script Host.
> With Cscript.exe, you can run scripts by typing the name of a script file at the command prompt. Like Microsoft Internet Explorer, Windows Script Host serves as a controller of Windows Script compliant scripting engines, but Windows Script Host has very low memory requirements. Windows Script Host is ideal for both interactive and non-interactive scripting needs, such as logon scripting and administrative scripting.
## Sample of VBScript file
```
Set oFSO = CreateObject("Scripting.FileSystemObject"^)
Set oInput = oFSO.OpenTextFile(WScript.Arguments(0^), 1^)
sData = Replace(oInput.ReadAll, "," ^& VbCrLf, VbCrLf^)
Set oOutput = oFSO.CreateTextFile(WScript.Arguments(1^), True^)
oOutput.Write sData
oInput.Close
oOutput.Close
```
## Replace content script
* Create a file named `repltxt.bat`
* Copy the code into the file `repltxt.bat`
* Run the script
```
@if (@X)==(@Y) @end /* Harmless hybrid line that begins a JScript comment
::************ Documentation ***********
::NEWREPL.BAT version 1.0
:::
:::NEWREPL Search replace
:::NEWREPL /?
:::NEWREPL /V
@echo off
if .%2 equ . (
if "%~1" equ "/?" (
echo."%~f0"
<"%~f0" cscript //E:JScript //nologo "%~f0" "^:::" "" a
exit /b 0
) else if /i "%~1" equ "/V" (
<"%~f0" cscript //E:JScript //nologo "%~f0" "^::(NEWREPL\.BAT version)" "$1" a
exit /b 0
) else (
call :err "Insufficient arguments"
exit /b 2
)
)
cscript //E:JScript //nologo "%~f0" %*
exit /b %errorlevel%
:err
>&2 echo ERROR: %~1. Use newrepl /? to get help.
exit /b
************* JScript portion **********/
var rtn=1;
try {
var env=WScript.CreateObject("WScript.Shell").Environment("Process");
var args=WScript.Arguments;
var search=args.Item(0);
var replace=args.Item(1);
var options="g";
if (args.length>2) options+=args.Item(2).toLowerCase();
var alterations=(options.indexOf("a")>=0);
if (alterations) options=options.replace(/a/g,"");
if (options.indexOf("v")>=0) {
options=options.replace(/v/g,"");
search=env(search);
replace=env(replace);
}
var search=new RegExp(search,options);
var str1, str2;
while (!WScript.StdIn.AtEndOfStream) {
str1=WScript.StdIn.ReadLine();
str2=str1.replace(search,replace);
if (!alterations || str1!=str2) WScript.Stdout.WriteLine(str2);
if (str1!=str2) rtn=0;
}
} catch(e) {
WScript.Stderr.WriteLine("JScript runtime error: "+e.message);
rtn=3;
}
WScript.Quit(rtn);
```
The sample above is a bit complicated, and it is combination of Batch script and JScript.
The batch script part checks and validate the input arguments, if there is no issue, then it will trigger the JScript to complete the work.
## Troubleshooting
From the Windows 7 and on, Windows is no longer to recognize the file with suffix "vbs" as executable file. You need to change the add "vbs" to Windows's registory. <file_sep>+++
date = "2014-03-20T14:59:31+11:00"
title = "JavaScript and OOP"
description="How to power JavaScript with Object Oriendted Programming ... "
+++
## Prerequisites
* You should have basic knowledge of Javascript and Object Oriented Programming.
* You should know how to test sample code on Chrome or Firefox. It is simple, just open your browser and click `F12`, copy the code to console and then press `Enter`.
## What is JavaScript?
* JavaScript, not to be confused with Java, was created in 10 days in May 1995 by <NAME>, then working at Netscape and now of Mozilla. The original name of this language was Mocha, in September of 1995 it was changed to LiveScript, then in December of the same year, the name JavaScript was adopted, because of very popular Java around then.
* JavaScript is the programming language of the web, mobile, back-end API, etc.. It’s one of the most popular and in demand skills in today’s job market for good reason. As a software developer, it is essential that you have a solid understanding of this versatile language.
## What is OOP?
> Object-oriented programming (OOP) is a programming paradigm based on the concept of "objects", which may contain data, in the form of fields, as known as attributes or properties; and actions, in the form of functions, as known as methods.
> For example, Car is an object. The color and model of the car are attributes. Then accelerate to 60km/h, brake to 0km/h, and turn left or right of the car are the actions. From this sample, you can tell OOP makes the code more close to the real world. That is why it is most popular paradigm for developing buisness application.
## Data types
The JavaScript (ECMAScript) standard defines six data types. Five are primitives, including Boolean, Null, Undefined, Number, String, and Object. In JavaScript, most things are objects, from core JavaScript features like strings and arrays to the browser APIs built on top of JavaScript. You can even create your own objects to encapsulate related functions and variables into efficient packages, and act as handy data containers. The object-oriented nature of JavaScript is important to understand if you want to go further with your knowledge of the language, therefore we've provided this module to help you. Here we teach object theory and syntax in detail, then look at how to create your own objects.
## Object and prototype
***How to define a object***
There are a couple ways to create variable as object.
```javascript
var obj1 = {};
var obj2 = new Object();
var obj3 = Object.create(null);
console.log( obj );
console.log( obj2 );
console.log( obj3 );
/*
output:
object {}
object {}
object {}
*/
```
Object type gives developers so much power and flexibility to customize their own data type. All JavaScript objects inherit the properties and methods from their prototype. The Object.prototype is on the top of the prototype chain. All JavaScript objects (Date, Array, RegExp, Function, ....) inherit from the Object.prototype.
* Object has properties and method. Object's method are the actions that can be performed on objects, they are one of most powerful feature for developers. Let's see how we can create object with properties and methods.
* Create three cars with basic object usage.
```javascript
var car1 = { color: 'red', make:'Toyota', model:'Sedan', getInfo: function (){
console.log( this );
}};
var car2 = { color: 'black', make:'BMW', model:'Coupe', getInfo: function (){
console.log( this );
}};
var car3 = { color: 'white', make:'Subaru', model:'SUV', getInfo:function (){
console.log( this );
}};
car1.getInfo();
car2.getInfo();
car3.getInfo();
/*
Output:
Object {color: "red", make: "Toyota", model: "Sedan"}
Object {color: "black", make: "BMW", model: "Coupe"}
Object {color: "white", make: "Subaru", model: "SUV"}
*/
```
* You will find the same method defined in every object. Can we make it better to just define the method once? The answer is Yes. Use an object constructor to create an object prototype. Any new object inherit the same propotype will have the same properties and methods.
```javascript
var Car = function(color, make, model, getInfo ) {
this.color='';
this.make='';
this.model='';
this.getInfo= function( time ){
console.log( this );
};
};
var car1 = new Car('red','Toyota','Sedan');
var car2 = new Car('black','BMW', 'Coupe');
var car3 = new Car('white','Subaru','SUV');
car1.getInfo();
car2.getInfo();
car3.getInfo();
```
* You will get same result as before. If you compare two blocks of code, you may think the second way has more code than the first one. Let's image if you need to create 20 objects and every object with 20 methods, then you totaly need to write 20 X 20 = 400 methods. Object's prototype is powerful, but we need to be careful when we want to use it, especially the `this` prototype. We need discuss this more in detail.
* Other sample of prototype
```javascript
var Car = function(color, make, model ) {
this.color='';
this.make='';
this.model='';
};
Car.prototype = {
getInfo : function( ){
console.log( this );
}
};
var car1 = new Car('red','Toyota','Sedan');
var car2 = new Car('black','BMW', 'Coupe');
var car3 = new Car('white','Subaru','SUV');
car1.getInfo();
car2.getInfo();
car3.getInfo();
```
* The last way to use prototype is kind of verbose. The second one is more concise and nice is most popular paradigm.
## Class and inheritance
JavaScript has no built-in way of creating or implementing interfaces.
It also lacks built-in methods for determining whether an object implements the same set of
methods as another object, making it difficult to use objects interchangeably. Luckily, JavaScript
is extremely flexible, making it easy to add these features.
JavaScript has no built-in way of creating or implementing interfaces.
It also lacks built-in methods for determining whether an object implements the same set of
methods as another object, making it difficult to use objects interchangeably. Luckily, JavaScript
is extremely flexible, making it easy to add these features.
**Inheritance issue in Javascript**
```javascript
function Pet() {
this.name = "";
this.species = "";
this.offsprings = [];
this.setName = function ( name ) { this.name = name ;};
this.deliverBaby = function( obj ){
this.offsprings.push( obj );
}
this.getInfo = function (){
console.log( " species: ",this.species, " name: " ,this.name );
console.log( " has ", this.offsprings.length ," offsprings ");
}
};
function Dog() {
};
Dog.prototype = new Pet();
Dog.prototype.species = "Dog";
var dog1 = Object.create(new Dog());
dog1.setName ( "Polly");
var dog2 = new Dog();
dog2.setName ( "Lulu");
dog1.deliverBaby( new Dog());
dog2.deliverBaby( new Dog());
dog1.getInfo();
dog2.getInfo();
/*
output :
species: Dog name: Polly
has 2 offsprings <- It is wrong. It should be 1 only.
species: Dog name: Lulu
has 2 offsprings <- It is wrong. It should be 1 only.
*/
```
You can tell there is something wrong with the prototype and constructor at a glance. It really confused many developers with C++/Java OOP backgroud. The sample code looks fine, but it doesn't work as other OOP programming language. It is your and <NAME>'s problem, because he was told to make JavaScript look like Java, even there is no built-in OO mechanism at the beginning. This just looks like an odd way of doing class-based OOP without real classes, and leaves the programmer wondering why they didn’t implement proper class-based OOP. JavaScript keeps using constructor, which obscured JavaScript’s true prototypal nature. It turns out most developers don't know how to use it properly and efficiently, including myself at the early stage.
Function is first-class citizen in JavaScript world, but it’s not really a class. We need to understand the constructor creates an empty object, then sets the prototype of empty object to the prototype property of the constructor, then set constructor function with `this` pointing to the newly-created object, and finally returns the object. You will get more confused after you see this definition. Let's us create a simple sample and take a close look why the constructor and prototype will cause this problem.
```javascript
var MyClass = function(){
this.name = 'MyClass';
this.getInfo = function ( ){
console.log( this );
}
}
MyClass.prototype.propObject = { id: 0, property: 'property' }
var objectA = new MyClass();
var objectB = new MyClass();
console.log( 'object A:', objectA.name , 'object B:', objectB.name );
console.log( 'MyClass.prototype === objectA.constructor.prototype ? ',
MyClass.prototype === objectA.constructor.prototype );
console.log( 'MyClass.prototype === objectB.constructor.prototype ? ',
MyClass.prototype === objectB.constructor.prototype );
console.log( " objectA.propObject : ", objectA.propObject ,
" objectB.propObject : ", objectB.propObject );
objectA.propObject.id = 1;
objectA.propObject.property = 'AAA';
console.log( " objectA.propObject : ", objectA.propObject,
" objectB.propObject : ", objectB.propObject );
/*
output :
MyClass object B: MyClass
MyClass.prototype === objectA.constructor.prototype ? true
MyClass.prototype === objectB.constructor.prototype ? true
objectA.propObject : Object {id: 0, property: "property"}
objectB.propObject : Object {id: 0, property: "property"}
objectA.propObject : Object {id: 1, property: "AAA"}
objectB.propObject : Object {id: 1, property: "AAA"}
*/
```
If we draw a diagram of above sample, you will see what is happening behind the scene. Since the prototype property is a reference, changing the prototype object’s properties at runtime will affect all objects using the prototype.
```ini
+------------+
| MyClass | +---- objectA.prototype
| prototype<----------|
| | +---- objectB.prototype
+------------+
```
Now we figure out the root cause. You will say it is easy to fix. We just need to create new prototype for each object, and clone the properties and methods from supper class. Yes, you are right, but it is not I want to recommand to you. First, we need to see if we really inheritance, secondly, if it is better to maintain if use inheritance.
If we still want to use inheritance, I will suggest not to just inherit the properties, instead of methods. In my opinion, there is very rare of scenario, we really need to inherit method. So we just need to find to proper way to solve the problem of properties inheritance.
### Object-based Inheritance
```javascript
function Pet(name, master) {
this.name = name || "";
this.species = "";
this.master = master || {
name: '',
gender: ''
};
this.offsprings = [];
this.deliverBaby= function ( obj) {
this.offsprings.push(obj);
},
this.getInfo = function () {
console.log(" species: ", this.species,
" name: ", this.name,
" master : ", this.master.name,
" ", this.master.gender);
this.offsprings.forEach(function (e) {
console.log(" has baby : ", e.name, " ", e.species);
});
}
}
function Dog(name, master) {
Pet.call(this, name, master);
this.mother = null;
this.species = "Dog";
}
var dog1 = new Dog('Polly');
dog1.master = {
name: 'John',
gender: 'M'
};
var dog2 = new Dog('Lulu', {
name: 'Ada',
gender: 'F'
});
dog1.deliverBaby(new Dog('Polly-Baby-Dog'));
dog2.deliverBaby(new Dog('Lulu-Baby-Dog'));
dog2.deliverBaby(new Dog('Lulu-Baby-Dog-2'));
dog1.getInfo();
dog2.getInfo();
/*
output:
Dog name: Polly master : John M
has baby : Polly-Baby-Dog Dog
species: Dog name: Lulu master : Ada F
has baby : Lulu-Baby-Dog Dog
has baby : Lulu-Baby-Dog-2 Dog
*/
```
After you test, would you ask: "what? how this works? It looks share the same prototype with `this`"? Actually the problem is the special object `this` in Javascript, which is one of the most misunderstood parts of JavaScript. Today it still confuses many other JS developers. If you have experience with other JavaScript framework. You will find many samples which use `that` , `self`, `vm` to replace the built-in `this`. e.g. `var that = {}`, `var self = {}`,etc. Let's see the new version of above sample code.
```javascript
function Pet(name, master) {
var self = {};
self.name = name || "";
self.species = "";
self.master = master || {
name: '',
gender: ''
};
self.offsprings = [];
return self;
}
function Dog(name, master) {
var self = {};
Pet.call(self, name, master);
self.species = "Dog";
self.prototype = this.constructor.prototype;
return self;
}
Dog.prototype = {
deliverBaby: function ( self, obj) {
self.offsprings.push(obj);
},
getInfo: function (self) {
console.log(" species: ", self.species,
" name: ", self.name,
" master : ", self.master.name,
" ", this.master.gender);
self.offsprings.forEach(function (e) {
console.log(" has baby : ", e.name, " ", e.species);
});
}
};
var dog1 = new Dog('Polly');
dog1.master = {
name: 'John',
gender: 'M'
};
var dog2 = new Dog('Lulu', {
name: 'Ada',
gender: 'F'
});
dog1.deliverBaby(dog1, new Dog('Polly-Baby-Dog'));
dog2.deliverBaby(dog2, new Dog('Lulu-Baby-Dog'));
dog2.deliverBaby(dog2, new Dog('Lulu-Baby-Dog-2'));
dog1.getInfo();
dog2.getInfo();
```
Now I rewrite above sample a few lines of code, then you will figour out why it is working, but maybe you still want to implement inheritance as other OOP lanuage C++, Java. Then let's take a look the classical inheritance, which is much more close to other OOP language. In classical inheritance it's impossible (or at least very difficult) to choose which properties you want to inherit. They use virtual base classes and interfaces to solve the diamond problem. It is much more complicated.
### Classical inheritance
```javascript
function extend(subClass, superClass) {
var F = function () {};
F.prototype = superClass.prototype;
subClass.prototype = new F();
subClass.prototype.constructor = subClass;
subClass.superclass = superClass.prototype;
if (superClass.prototype.constructor == Object.prototype.constructor) {
superClass.prototype.constructor = superClass;
}
}
function Pet(name, master) {
this.name = name || "";
this.species = "";
this.master = master || {
name: '',
gender: ''
};
this.offsprings = [];
}
Pet.prototype.deliverBaby = function (obj) {
this.offsprings.push(obj);
};
Pet.prototype.getInfo = function () {
console.log(" species: ", self.species,
" name: ", self.name,
" master : ", self.master.name,
" ", this.master.gender);
this.offsprings.forEach(function (e) {
console.log(" has baby : ", e.name, " ", e.species);
});
}
function Dog(name, master) {
Dog.superclass.constructor.call(this, name, master);
this.species = "Dog";
}
extend(Dog, Pet);
Dog.prototype.getInfo = function () {
console.log(" Override --- " );
Dog.superclass.getInfo.call(this) ;
};
var dog1 = new Dog('Polly');
dog1.master = {
name: 'John',
gender: 'M'
};
var dog2 = new Dog('Lulu', {
name: 'Ada',
gender: 'F'
});
dog1.deliverBaby(new Dog('Polly-Baby-Dog'));
dog2.deliverBaby(new Dog('Lulu-Baby-Dog'));
dog2.deliverBaby(new Dog('Lulu-Baby-Dog-2'));
dog1.getInfo();
dog2.getInfo();
```
Most programmers who come from a classical background argue that classical inheritance is more powerful than prototypal inheritance. The truth is that prototypal inheritance supports inheriting from multiple prototypes. Prototypal inheritance simply means one object inheriting from another object.
Whether classical or prototypal, is used to reduce the redundancy in code. Since prototypal inheritance allows for multiple inheritance, code which requires multiple inheritance is less redundant if written using prototypal inheritance rather than in a language which has classical inheritance but no multiple inheritance.
### Prototypal inheritance
```javascript
function clone(obj) {
if (obj === null || typeof obj !== 'object') {
return obj;
}
var temp = obj.constructor(); // give temp the original obj's constructor
for (var key in obj) {
temp[key] = clone(obj[key]);
}
return temp;
}
var Pet = {
name: "",
species: "",
master: {
name: '',
gender: ''
},
offsprings: [],
deliverBaby: function (obj) {
this.offsprings.push(obj);
},
getInfo: function () {
console.log(" species: ", this.species,
" name: ", name,
" master : ", this.master.name,
" ", this.master.gender);
this.offsprings.forEach(function (e) {
console.log(" has baby : ", e.name, " ", e.species);
});
}
};
var Dog = clone(Pet);
Dog.species = 'Dog';
Dog.getInfo = function () {
console.log(" Override -- species: ", this.species,
" name: ", this.name,
" master : ", this.master.name,
" ", this.master.gender);
this.offsprings.forEach(function (e) {
console.log(" has baby : ", e.name, " ", e.species);
});
};
var dog1 = clone(Dog);
var dog2 = clone(Dog);
dog1.name = 'Polly';
dog1.master = {
name: 'John',
gender: 'M'
};
dog2.name = 'Lulu';
dog2.master = {
name: 'Ada',
gender: 'F'
};
var dog11 = clone(Dog);
dog11.name = 'Polly-Baby-Dog';
var dog21 = clone(Dog);
var dog22 = clone(Dog);
dog21.name = 'Lulu-Baby-Dog';
dog22.name = 'Lulu-Baby-Dog-2';
dog1.deliverBaby(dog11);
dog2.deliverBaby(dog21);
dog2.deliverBaby(dog22);
dog1.getInfo();
dog2.getInfo();
```
One of the most important advantages of prototypal inheritance is that you can add new properties to prototypes after they are created. This allows you to add new methods to a prototype which will be automatically made available to all the objects which delegate to that prototype.
This allows you to add new methods to a prototype which will be automatically made available to all the objects which delegate to that prototype.This is not possible in classical inheritance because once a class is created you can't modify it at runtime. This is probably the single biggest advantage of prototypal inheritance over classical inheritance, and it should have been at the top.
## Module and namespace
There are quite a lot of benefits from module and namespace, especially when you are going to build some special common api shared within the whole application, even multiple systems across your whole entire enterprise. First thing first, we should not pollute the context, since it will potentially break existing functions or other third party frameworks which have been introduced in your applicatio, vice versa.
On the other hand, it is a good way to create reusable component, and it is easily for further enhancement, or maybe maintenance. JavaScript is very easy to create a module. One of the most widely used design patterns in JavaScript is the module pattern.
### Closure
The module pattern makes use of one of the nicer features of JavaScript – closures – in order to give you some control of the privacy of your methods so that third party applications cannot access private data or override it.
* Simple closure
```javascript
var closureObject = (function() {
var _privateProperty = 'private';
var _privateMethod = function () {
console.log( ' private method ');
};
return {
publicProperty: 'Public Property',
publicMethod: function() {
console.log( ' Call ', _privateMethod() , ' from public method ');
},
setPrivateProperty: function ( newValue ){
_privateProperty= newValue;
},
getPrivateProperty: function( ){
return _privateProperty;
}
}
}());
console.log( closureObject.publicProperty );
console.log( closureObject._privateProperty );
// console.log( closureObject._privateMethod() );
// This will cause Uncaught TypeError
console.log( closureObject.getPrivateProperty() );
closureObject.setPrivateProperty( 'public');
console.log( closureObject.getPrivateProperty() );
/*
output:
Public Property
undefined //--> privateProperty can not be accessed directly
private
public //--> privateProperty can be updated by public method
*/
```
From above sample code, you can the JavaScript can easily implement the encapsulation as OOP language. Closure is the base the module pattern, and module is the base of namespace. Maybe you will wonder why we need module and namespace,just closure is good enough for us control the API. If we take a second thought we will realize if some application has the same object called closureObject, both will crash at run time. As a simple solution, we can make a very long, different and ridiculous name to avoid the conflict, but it is not a nice solution. Then module turns out as a better way to solve this problem.
### Module
Module is not rock science. Actually it is quite easy to implement.
* Simple module sample
``` javascript
var myModule = (function(undefined) {
var _privateProperty = 'private';
var _privateMethod = function () {
console.log( ' private method ');
};
return {
publicProperty: 'Public Property',
publicMethod: function() {
console.log( ' Call ', _privateMethod() , ' from public method ');
},
setPrivateProperty: function ( newValue ){
_privateProperty= newValue;
},
getPrivateProperty: function( ){
return _privateProperty;
}
}
}());
```
You may say "What? closure is module." Yes, you can say that. The little difference is the auguements during auto initialization. By having an function argument undefined (the name actually does not matter) which you don't pass a parameter to, you could make sure you have a variable which really is undefined. This technique ensures that it will work as expected, in case it will be excluded to unintential amendment by other script.
Once we create our module, we can simply extend the module with the same technique.
* Module's extension with override or new api
```javascript
var myModule = (function() {
....
}());
var extendModule = (function( m){
m.publicMethod = function ( newArgument ) { // overload publicMethod
// TODO
};
m.newApi = function () { //
// TODO
};
}(myModule));
```
### Namespace
Now we will go further to namespace, which is based on module technique. Namespace gives you the ability to have public and private properties and methods.
The code inside doesn’t use the Object Literal notation. Allows you to use $ inside your code without worrying about clashing with other libraries
Allows your library to grow across files using the “window.rtkns = window.rtkns || {}” technique
A common pattern that you will see in many libraries, widgets, and plugins
```javascript
(function (rtkns, $, undefined) {
rtkns.createNS = function (namespace) {
var nsparts = namespace.split(".");
var parent = rtkns;
if (nsparts[0] === "rtkns") {
nsparts = nsparts.slice(1);
}
for (var i = 0; i < nsparts.length; i++) {
var partname = nsparts[i];
if (typeof parent[partname] === "undefined") {
parent[partname] = {};
}
parent = parent[partname];
}
return parent;
};
var clone = function(obj) {
if (obj === null || typeof obj !== 'object') {
return obj;
}
// give temp the original obj's constructor
var temp = obj.constructor();
for (var key in obj) {
temp[key] = clone(obj[key]);
}
return temp;
};
rtkns.clone = clone;
rtkns.createNS("rtkns");
rtkns.utils = rtkns.createNS("rtkns.utils");
rtkns.model = rtkns.createNS("rtkns.model");
rtkns.model.entity = {
id: 0,
createdBy:'',
modifiedBy:'',
created: null,
modified: null,
};
var entity = rtkns.model.entity;
rtkns.model.order = clone ( entity);
var order = rtkns.model.order ;
order.amount = 0;
order.description = '';
rtkns.model.client = clone( entity);
var client = rtkns.model.client ;
client.name = '';
client.email = '';
client.orders = [];
client.purchase = function ( order ){
this.orders.push( order );
};
rtkns.utils.toString = function (entity) {
return entity?JSON.stringify(entity):entity;
};
}(window.rtkns = window.rtkns || {}));
var rtkns = window.rtkns;
var client1 = rtkns.clone( rtkns.model.client );
client1.name = 'client 1';
client1.email = '<EMAIL>';
var client2 = rtkns.clone( rtkns.model.client );
client2.name = 'client 2';
client2.email = '<EMAIL>';
var order1 = rtkns.clone( rtkns.model.order );
order1.amount = 100;
order1.description = 'order 1';
var order2 = rtkns.clone( rtkns.model.order );
order2.amount = 600;
order2.description = 'order 2';
client1.purchase( order1 );
client2.purchase( order2 );
console.log(rtkns.utils.toString( client1));
console.log(rtkns.utils.toString( client2));
/*
output:
{"id":0,"createdBy":"","modifiedBy":"","created":null,"modified":null,
"name":"client 1","email":"<EMAIL>",
"orders":[{"id":0,"createdBy":"","modifiedBy":"",
"created":null,"modified":null,
"amount":100,"description":"order 1"}]}
{"id":0,"createdBy":"","modifiedBy":"","created":null,"modified":null,
"name":"client 2","email":"<EMAIL>",
"orders":[{"id":0,"createdBy":"","modifiedBy":"",
"created":null,"modified":null,
"amount":600,"description":"order 2"}]}
*/
```
The sample above combine namespace and prototypal inheritance. Namespace allows you to add new module for enhancement, and it allows you to organize your API better. On the other hand, through the globle namespace you can inject customized service, or you can replace it. The disadvantage of namespace, when the source code blows up, it will be a bit more complicated, especially you break different into different files. Mock test or unit test will needs a bit more work to do as well. There is no pattern that is a Silver Bullet, but rather you should assess where you are at and examine the pros and cons of each pattern to address your situation.
## Interfaces
An interface tells programmers what methods a given class implements, which makes it easier to use. Interfaces also stabilize the ways in which different classes can communicate.
Using any interface implementation in JavaScript will create a small performance hit, due in part to the overhead of having another method invocation.
The biggest drawback is that there is no way to force other programmers to respect the interfaces you have created. In JavaScript, you must manually ensure that a given class implements an interface. You can mitigate this problem by using coding conventions and helper classes, but it will never entirely go away. Everyone on your project must agree to use them and check for them; otherwise much of their value is lost.
JavaScript does not come with built-in support for interfaces, and there is no Interface keyword, so any method you use to implement this will be very different from what languages such as C++, Java, and making it a little more difficult. JavaScript uses what's called duck typing. (If it walks like a duck, and quacks like a duck, as far as JS cares, it's a duck.) If your object has quack(), walk(), and fly() methods, code can use it wherever it expects an object that can walk, quack, and fly, without requiring the implementation of some "Duckable" interface.
JavaScript will use Interface object to ensure if the new instance implements the same action as Interface object.
```javascript
var Interface = function(interfaceName, interfaceMembers) {
if (!(this instanceof Interface)) {
return new Interface(interfaceName, interfaceMembers);
}
var interfaceObj = this;
Object.keys(interfaceMembers).forEach(function(memberName) {
interfaceObj[memberName] = function() {
Interface.errorDetect(interfaceName, memberName);
};
});
interfaceObj.name = interfaceName;
return interfaceObj;
};
Interface.errorDetect = function(interfaceName, interfaceMember) {
throw Error('errorDetect: Class does not implement interface member '
+ interfaceName
+ '.'
+ interfaceMember + '()');
};
Interface.ensureImplement = function(obj /*, interfaces */ ) {
var interfaces = [].slice.call(arguments, 1);
interfaces.forEach(function(_interface) {
Object.keys(_interface).forEach(function(interfaceMember) {
var isFunction = typeof _interface[interfaceMember] === 'function';
if (isFunction && !obj[interfaceMember]) {
Interface.errorDetect(_interface.name, interfaceMember);
}
});
});
return true;
};
```
**How to use this interface**
* Samples below show you how the Interface can ensure the object implement multiple interfaces.
```javascript
// Sample 1 with only one interface
var ILog = Interface('ILog', {
logInfo:function(){},
logWarning:function(){},
logError:function(){},
});
var loggerA = {
logInfo:function(){},
logWarning:function(){},
logError:function(){},
};
// loggerB does not implement all methods
var loggerB = {
logInfo:function(){},
logWarning:function(){},
};
console.log(Interface.ensureImplement( loggerA, ILog));
console.log(Interface.ensureImplement( loggerB, ILog));
/*
output:
true
Uncaught Error: errorDetect: Class does not
implement interface member ILog.logError()
...
*/
// Sample 2 with 2 interfaces
var Submarine = Interface('Submarine', {
operateUnderwater:function(){}
});
var Car = Interface('Car', {
operateOnRoad:function(){}
});
var SubmarineCar = {
operateUnderwater:function(){},
operateOnRoad:function(){},
};
console.log(Interface.ensureImplement( SubmarineCar, Submarine, Car ));
/**
output:
true
*/
```
<file_sep>+++
title="Good practice - 1"
description="Good practice advice - Part 1"
weight=20
+++
### Strings
#### Change a character
* How to change a character in a string
```go
str:="hello"
c:=[]byte(s)
c[0]=’c’
s2:= string(c) // s2 == "cello"
```
#### Substring
* How to take a part(substring) of a string str
```go
substr := str[n:m]
```
#### for-loop
* How to loop over a string str with for or for-range:
```go
// gives only the bytes:
for i:=0; i < len(str); i++ {
… = str[i]
}
// gives the Unicode characters:
for ix, ch := range str {
// …
}
```
#### bytes of str
* Number of bytes in a string str
```go
len(str)
```
* Number of characters in a string str
```go
utf8.RuneCountInString(str) // FATEST
len([]int(str))
```
#### Concat strings
* Best performance with byte buffer
```go
var buffer bytes.Buffer
for {
if s, ok := getNextString(); ok { //method getNextString() not shown here
buffer.WriteString(s)
} else {
break
}
}
```
* Simple way
```go
strings.Join()
```
#### Command-line args
* Use flag and os package
### Array
#### Cut the last element
* How to cut the last element of an array or slice line
```go
line = line[:len(line)-1]
```
#### Loop over the array
* for-loop
```go
for i:=0; i < len(arr); i++ {
… = arr[i]
}
```
* for range
```go
for ix, value := range arr {
…
}
```
#### Search value in 2D array
```go
found := false
Found: for row := range arr2Dim {
for column := range arr2Dim[row] {
if arr2Dim[row][column] == V {
found = true
break Found
}
}
}
```
#### Reverse
```go
func ReverseInts(s []int) {
first := 0
last := len(s) - 1
for first < last {
s[first], s[last] = s[last], s[first]
first++
last--
}
}
```
### Maps
#### loop with range
```go
for key, value := range map1 {
…
}
```
#### Test if key exists
```go
val1, isPresent = map1[key1]
// which gives: val or zero-value, true or false
```
#### Deleting a key in a map
```go
delete(map1, key1)
```
### Interface
#### Test if a value implements Stringer
```go
if v, ok := v.(Stringer); ok {
fmt.Printf("implements String(): %s\n", v.String());
}
```
#### A type classifier:
```go
func classifier(items ...interface{}) {
for i, x := range items {
switch x.(type) {
case bool: fmt.Printf("param #%d is a bool\n", i)
case float64: fmt.Printf("param #%d is a float64\n", i)
case int, int64: fmt.Printf("param #%d is an int\n", i)
case nil: fmt.Printf("param #%d is nil\n", i)
case string: fmt.Printf("param #%d is a string\n", i)
default: fmt.Printf("param #%d’s type is unknown\n", i)
}
}
}
```
<file_sep>+++
title="Good practice - 2"
description="Good practice advice - Part 2"
weight=21
+++
### Goroutines and channels
* Performance advice:
> A rule of thumb if you use parallelism to gain efficiency over serial computation: the amount of work done inside goroutine has to be much higher than the costs associated with creating goroutines and sending data back and forth between them.
#### Using buffered channels
* Using buffered channels for performance:
A buffered channel can easily double its throughput, depending on the context the performance gain can be 10x or more. You can further try to optimize by adjusting the capacity of the channel.
#### Limiting the number of items in a channel
* Limiting the number of items in a channel and packing them in arrays:
Channels become a bottleneck if you pass a lot of individual items through them. You can work around this by packing chunks of data into arrays and then unpacking on the other end. This can be a speed gain of a factor 10x.
#### loop over a channel
* How to loop over a channel ch with a for—range:
```go
for v := range ch {
// do something with v
}
```
#### Test a channel is closed
* How to test if a channel ch is closed:
```go
//read channel until it closes or error-condition
for {
if input, open := <-ch; !open {
break
}
fmt.Printf(“%s “, input)
}
```
#### Samaphore pattern
* How to use a channel to let the main program wait until the goroutine completes?
(Semaphore pattern)
```go
ch := make(chan int) // Allocate a channel.
// Start something in a goroutine; when it completes, signal on the channel.
go func() {
// doSomething
ch <- 1 // Send a signal; value does not matter.
}()
doSomethingElseForAWhile()
<-ch // Wait for goroutine to finish; discard sent value.
// If the routine must block forever, omit ch <- 1 from the lambda function.
```
#### Channel Factory pattern
* Channel Factory pattern: the function is a channel factory and starts a lambda
function as goroutine populating the channel
```go
func pump() chan int {
ch := make(chan int)
go func() {
for i := 0; ; i++ {
ch <- i
}
}()
return ch
}
```
#### Channel Iterator pattern
* Channel Iterator pattern: Implement the Iter() method of a container returns a channel for the calling for-loop to read from.
```go
func (c *container) Iter() <-chan items {
ch := make(chan item)
go func() {
for i := 0; i < c.Len(); i++ {
// or use a for-range loop
ch <- c.items[i]
}
}()
return ch
}
// The code which calls this method can then iterate over the container
for x := range container.Iter() { ... }
```
#### Limiting the number of requests
* Limiting the number of requests processed concurrently
```go
const (
AvailableMemory = 10 << 20 // 10 MB, for example
AverageMemoryPerRequest = 10 << 10 // 10 KB
MAXREQS = AvailableMemory / AverageMemoryPerRequest // here amounts to 1000
)
var sem = make(chan int, MAXREQS)
type Request struct {
a, b int
replyc chan int
}
func process(r *Request) {
// Do something
// May take a long time and use a lot of memory or CPU
}
func handle(r *Request) {
process(r)
// signal done: enable next request to start
// by making 1 empty place in the buffer
<-sem
}
func Server(queue chan *Request) {
for {
sem <- 1
// blocks when channel is full (1000 requests are active)
// so wait here until there is capacity to process a request
// (doesn’t matter what we put in it)
request := <-queue
go handle(request)
}
}
func main() {
fmt.Println(" AvailableMemory ", AvailableMemory)
fmt.Println(" AverageMemoryPerRequest ", AverageMemoryPerRequest)
queue := make(chan *Request)
go Server(queue)
}
```
#### Parallelling computing over a few cores
* Parallelling a computing over a numbers of CPU cores
```go
const NCPU = 4
func DoAll() {
sem := make(chan int, NCPU) // Buffering optional but sensible.
for i := 0; i < NCPU; i++ {
go DoPart(sem)
}
// Drain the channel sem, waiting for NCPU tasks to complete
for i := 0; i < NCPU; i++ {
<-sem // wait for one task to complete
}
// All done.
}
func DoPart(sem chan int) {
// do the part of the computation
sem <- 1 // signal that this piece is done
}
func main() {
runtime.GOMAXPROCS(NCPU)
DoAll()
}
```
#### Paralleling computing over a large amount of data
```go
func ParallelProcessData (in <- chan *Data, out <- chan *Data) {
// make channels:
preOut := make(chan *Data, 100)
stepAOut := make(chan *Data, 100)
stepBOut := make(chan *Data, 100)
stepCOut := make(chan *Data, 100)
// start parallel computations:
go PreprocessData(in, preOut)
go ProcessStepA(preOut, stepAOut)
go ProcessStepB(stepAOut, stepBOut)
go ProcessStepC(stepBOut, stepCOut)
go PostProcessData(stepCOut, out
}
```
#### Simple timeout pattern
```go
timeout := make(chan bool, 1)
go func() {
time.Sleep(1e9) // one second
timeout <- true
}()
select {
case <-ch:
// a read from ch has occurred
case <-timeout:
// the read from ch has timed out
}
```
#### Use in- & out- channel instead of lock
```go
func Worker(in, out chan *Task) {
for {
t := <-in
process(t)
out <- t
}
}
```
#### Concurrent access to object
```go
type Person struct {
Name string
salary float64
chF chan func()
}
func NewPerson(name string, salary float64) *Person {
p := &Person{name, salary, make(chan func())}
go p.backend()
return p
}
func (p *Person) backend() {
for f := range p.chF {
f()
}
}
// Set salary.
func (p *Person) SetSalary(sal float64) {
p.chF <- func() { p.salary = sal }
}
// Retrieve salary.
func (p *Person) Salary() float64 {
fChan := make(chan float64)
p.chF <- func() { fChan <- p.salary }
return <-fChan
}
func (p *Person) String() string {
return "Person - name is: " + p.Name + " - salary is: " + strconv.
FormatFloat(p.Salary(), 'f', 2, 64)
}
func main() {
bs := NewPerson("<NAME>", 2500.5)
fmt.Println(bs)
bs.SetSalary(4000.25)
fmt.Println("Salary changed:")
fmt.Println(bs)
}
/* Output Person - name is: <NAME> - salary is: 2500.50
Salary changed:
Person - name is: <NAME> - salary is: 4000.25 *
*/
```
#### Abandon synchronous calls
* Abandon synchronous calls that run too long
```go
ch := make(chan error, 1)
go func() { ch <- client.Call(“Service.Method”, args, &reply) } ()
select {
case resp := <-ch:
// use resp and reply
case <-time.After(timeoutNs):
// call timed out
break
}
```
#### Benchmarking goroutines
```go
func main() {
fmt.Println("sync", testing.Benchmark(BenchmarkChannelSync).String())
fmt.Println("buffered", testing.Benchmark(BenchmarkChannelBuffered).String())
}
func BenchmarkChannelSync(b *testing.B) {
ch := make(chan int)
go func() {
for i := 0; i < b.N; i++ {
ch <- i
}
close(ch)
}()
for _ = range ch {
}
}
func BenchmarkChannelBuffered(b *testing.B) {
ch := make(chan int, 128)
go func() {
for i := 0; i < b.N; i++ {
ch <- i
}
close(ch)
}()
for _ = range ch {
}
}
/* Output:
sync 3000000 420 ns/op
buffered 10000000 103 ns/op
*/
```
#### Stopping a goroutine
```go
runtime.Goexit()
```
<file_sep>+++
title = "JS & ES snippet"
description="JavaScript, ECMAScript snippets"
+++
### Setup the global node modules
* Add an environment variable to tell node where the global node module sits
```bash
# I use nvm to manage my node and node modules
export NODE_PATH=${HOME}/.nvm/versions/node/<node_version>/lib/node_modules
```
* If you use npm by default, you may have permission problem to access the node modules. I suggest you to set a customized global node module folder under your home directory.
* Create a new folder under your home directory
* Install npm to new global node module
* update environment variables in the profile
```bash
mkdir $HOME/.node_modules
npm config set prefix $HOME/.node_modules
npm install -g npm
echo 'export NODE_MODULES=$HOME/.node_modules' >> $HOME/.profile
echo 'export PATH=$PATH:$NODE_MODULES/bin' >> $HOME/.profile
source $HOME/.profile
```
### Parsing CSV file
#### Parse csv file with fast-csv
* Install package fast-csv globally
npm install -g fast-csv
* Assume there is a csv file named test.csv, which contains a few contacts
##### Sample 1
```js
const fs = require("fs")
const csv = require('fast-csv');
fs.createReadStream('test.csv')
.pipe(csv.parse({headers:true}))
.on('error', error => console.error(error))
.on('data', row => console.log(`ROW=${JSON.stringify(row)}`))
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`));
```
##### Sample 2
* Validate empty field
```js
const csv = require('fast-csv')
const fs = require('fs')
const fileStream = fs.createReadStream('test.csv')
const parser = csv.parse({ ignoreEmpty: true, headers: true, trim: true })
const invalidFilter
fileStream
.pipe(parser)
.validate(data => data.last_name && data.mobile.startsWith('04'))
.on('error', error => console.error(error))
.on('data', row => console.log(`Valid [row=${JSON.stringify(row)}]`))
.on('data-invalid', (row, rowNumber) => console.log(`Invalid [rowNumber=${rowNumber}] [row=${JSON.stringify(row)}]`))
.on('end', rowCount => console.log(`Parsed ${rowCount} rows`))
```
<file_sep>+++
title = "AWS: S3 - 1"
description = "S3 Part 1 - Storage"
weight=7
+++
## S3 Part 1
Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.
### Storage feature
Amazon S3 has various features you can use to organize and manage your data in ways that support specific use cases, enable cost efficiencies, enforce security, and meet compliance requirements. Data is stored as objects within resources called “buckets”, and a single object can be up to 5 terabytes in size. Amazon S3 offers a range of storage classes designed for different use cases.
### S3 Standard
S3 Standard offers high durability, availability, and performance object storage for frequently accessed data. Because it delivers low latency and high throughput, S3 Standard is appropriate for a wide variety of use cases, including cloud applications, dynamic websites, content distribution, mobile and gaming applications, and big data analytics.
### S3 Standard-IA (Infrequent Access)
S3 Standard-IA is for data that is accessed less frequently, but requires rapid access when needed. S3 Standard-IA offers the high durability, high throughput, and low latency of S3 Standard, with a low per GB storage price and per GB retrieval fee. This combination of low cost and high performance make S3 Standard-IA ideal for long-term storage, backups, and as a data store for disaster recovery files.
### S3 One Zone-IA (Infrequent Access)
S3 One Zone-IA is for data that is accessed less frequently, but requires rapid access when needed. Unlike other S3 Storage Classes which store data in a minimum of three Availability Zones (AZs), S3 One Zone-IA stores data in a single AZ and costs 20% less than S3 Standard-IA. S3 One Zone-IA is ideal for customers who want a lower-cost option for infrequently accessed data but do not require the availability and resilience of S3 Standard or S3 Standard-IA. It’s a good choice for storing secondary backup copies of on-premises data or easily re-creatable data.
### S3 Glacier
S3 Glacier is a secure, durable, and low-cost storage class for data archiving. You can reliably store any amount of data at costs that are competitive with or cheaper than on-premises solutions. To keep costs low yet suitable for varying needs, S3 Glacier provides three retrieval options that range from a few minutes to hours.
### S3 Glacier Deep Archive
S3 Glacier Deep Archive is Amazon S3’s lowest-cost storage class and supports long-term retention and digital preservation for data that may be accessed once or twice in a year. It is designed for customers — particularly those in highly-regulated industries, such as the Financial Services, Healthcare, and Public Sectors — that retain data sets for 7-10 years or longer to meet regulatory compliance requirements.
| ae5a0adc6cb7d6c38eea83f8d2b58c3951f01f9c | [
"Markdown",
"JavaScript",
"Rust",
"Go",
"Dockerfile",
"Shell"
] | 219 | Markdown | harryho/blog-hugo | f50fe72a09b2b2e8b2e878cb79926d13e0f9ef6d | 5839bb29b589916d4ed2e1255534d3d035a7a62c | |
refs/heads/master | <file_sep># js1k 2016
[Fireworks!](http://js1k.com/2016-elemental/demo/2584)

<file_sep>#!/usr/bin/env node
'use strict'
const uglify = require('uglify-js')
const fs = require('fs')
const mkdirp = require('mkdirp')
const packer = require('regpack').packer
const path = require('path')
const srcPath = path.resolve(__dirname, '..', 'the.js')
const distDirPath = path.resolve(__dirname, '..', 'dist')
const distJsPath = path.resolve(distDirPath, 'submit.js')
const distShimPath = path.resolve(distDirPath, 'demo.html')
const shimTopPath = path.resolve(__dirname, 'shim-top.html')
const shimBottomPath = path.resolve(__dirname, 'shim-bottom.html')
function buildJs () {
const src = uglify.minify(srcPath).code
const packerResults = packer.runPacker(src, {
withMath: false,
hash2DContext: true,
varsNotReassigned: []
})
const allOutputs = [src].concat(packerResults.reduce((result, data) => {
return result.concat(data.result.map(r => r[1]))
}, []))
const shortestResult = allOutputs.reduce((result, output) => {
if (output.length < result.length) {
return output
} else {
return result
}
}, allOutputs[0])
return shortestResult
}
function buildHtml (js) {
return [
fs.readFileSync(shimTopPath, { encoding: 'utf8' }),
js,
fs.readFileSync(shimBottomPath, { encoding: 'utf8' })
].join('')
}
mkdirp.sync(distDirPath)
const js = buildJs()
fs.writeFileSync(distJsPath, js)
fs.writeFileSync(distShimPath, buildHtml(js))
console.log(`output is ${js.length} characters`)
<file_sep>/* eslint-disable no-undef */
// style the <body>
b.style.cssText = 'margin:0;padding:0;background:#262747;overflow:hidden;font-family:sans-serif;font-size:26pt;font-weight:100'
// add a div that tells you to click
k = b.appendChild(document.createElement('div'))
k.style.cssText = 'position:fixed;top:0;left:0;width:100%;padding:26px;color:#fff;text-align:center;opacity:1;transition:opacity 3s'
k.innerHTML = 'click for fireworks!'
// keep track of canvas size
w = innerWidth
h = innerHeight
// an array of all of the rockets and the particles
r = []
p = []
// shoots a rocket on click
b.onclick = function (clickEvent) {
k.style.opacity = 0
r.push({
a: clickEvent.clientX,
b: clickEvent.clientY,
y: h
})
};
// tick
(function T () {
// iterate through all the rockets, clearing the list if there aren't any active
U = 1
c.fillStyle = '#ffffff'
r.map(function (rocket) {
// skip if this rocket is already destroyed
if (rocket.d) { return }
// we've updated at least 1 rocket
U = 0
// shoot the rocket upward, destroying it if needed
if ((rocket.y -= 20) < rocket.b) {
// destroy the rocket
rocket.d = 1
// generate particles
for (R = (((Math.random() * 10) + 5) | 0); R; R--) {
// choose this ring's color, velocity
C = '#' + [
'ff0000',
'ff9900',
'ffff00',
'00ff00',
'00ffff',
'ff00ff'
][(Math.random() * 6) | 0]
V = (Math.random() * 7) + 5
// how many particles?
I = ((Math.random() * 20) + 5) | 10
// generate each particle
for (P = I; P; P--) {
p.push({
c: C,
x: rocket.a,
y: rocket.y,
v: V,
s: 10,
f: Math.cos(2 * Math.PI * P / I),
g: Math.sin(2 * Math.PI * P / I)
})
}
}
} else {
// draw the rocket
c.beginPath()
c.arc(rocket.a, rocket.y, 2, 0, 2 * Math.PI)
c.fill()
}
})
if (U) {
r = []
}
// iterate through all the particles, clearing them if you updated none
U = 1
p.map(function (particle) {
// stop if it's too slow
if ((particle.v -= 0.2) < 0) { return }
// we've updated at least one particle
U = 0
// slowly accelerate downward
particle.g += 0.03
// move the particle
particle.x += particle.f * particle.v
particle.y += particle.g * particle.v
// shrink
particle.s = Math.max(particle.s -= 0.5, 1)
// draw the particle
c.fillStyle = particle.c
c.beginPath()
c.arc(particle.x, particle.y, particle.s, 0, 2 * Math.PI)
c.fill()
})
if (U) {
p = []
}
// go again
requestAnimationFrame(T)
})()
| c612cc8475d48756f86af685d9f10d916bdbfc30 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | EvanHahn/js1k-2016 | 0150ce517635f808f90ff836ccf1a149af0e057c | fd5a8f05e0554e3da45863e5fcb0c73105a0459e | |
refs/heads/master | <file_sep>CREATE USER 'bayulaxanauser'@'%' IDENTIFIED BY '<PASSWORD>';
GRANT ALL PRIVILEGES on databasename.* to 'bayulaxanauser'@'%';
FLUSH PRIVILEGES;<file_sep># Copy the hosts list
sudo cp /vagrant/sources/hosts /etc/hosts
# Copy the new source list
sudo cp '/vagrant/sources/sources.list' '/etc/apt/'
# Update package
sudo apt update -y
# Apache Installation
sudo apt install apache2 -y
sudo ufw allow in "Apache Full"<file_sep># Implementasi MongoDB Cluster
**Nama: <NAME>**
**NRP : 05111740000020**
- [Deskripsi Tugas](#deskripsi-tugas)
+ [Dataset](#dataset)
+ [Pembagian Cluster](#pembagian-cluster)
- [Detail Cluster](#detail-cluster)
- [Environmet](#environment)
+ [Gambaran Umum](#gambaran-umum)
- [Implementasi](#implementasi)
+ [1. Konfigurasi Vagrant](#1-konfigurasi-vagrant)
+ [2. Script Provision](#2-script-provision)
+ [3. Build Vagrant](#3-build-vagrant)
+ [4. Konfigurasi Config Server](#4-konfigurasi-config-server)
+ [5. Konfigurasi Query Router](#5-konfigurasi-query-router)
+ [6. Konfugurasi Shard Server](#6-konfigurasi-shard-server)
- [Import Dataset](#import-dataset)
- [Implementasi Aplikasi CRUD](#implementasi-aplikasi-crud)
+ [Connect Database](#connect-database)
+ [Operasi READ](#operasi-read)
+ [Operasi CREATE](#operasi-create)
+ [Operasi UPDATE](#operasi-update)
+ [Operasi DELETE](#operasi-delete)
+ [Operasi AGREGASI 1](#operasi-agregasi-1)
+ [Operasi AGREGASI 2](#operasi-agregasi-2)
## Deskripsi Tugas
Dalam tugas **Implementasi MongoDB Cluster**, hal-hal yang perlu dilakukan adalah :
- Mengimplementasikan MongoDB Cluster
- Menentukan dataset yang akan digunakan
- Mengimplementasikan aplikasi Create, Read, Update dan Delete menggunakan MongoDB Cluster
**Batasan dataset :**
- Data bisa berupa CSV atau JSON.
- Jumlah data minimal 1000 data
- Dataset nantinya akan diimport ke dalam server MongoDB
### Dataset
Dataset yang digunakan dalam impelmentasi ini adalah dataset berjudul "**Population Health Measure: Age-Adjusted Mortality Rates**" yang diambil dari situs [Data Gov](https://catalog.data.gov/dataset). Dataset lengkapnya dapat diakses melalui tautan berikut.
[**Population Health Measure: Age-Adjusted Mortality Rates**](https://catalog.data.gov/dataset/population-health-measures-age-adjusted-mortality-rates-6a2e8)
Dataset tersebut mempunyai data berjumlah 12150 dan terdiri dari 9 atribut. Berikut adalah contoh beberapa data.

### Pembagian Cluster
Cluster yang diimplementasikan terdiri dari :
- 2 Config Server
- 3 Data/Shard Server
- 1 Query Router
## Detail Cluster
Cluster dijalankan secara virtual menggunakan **Virtualbox** menggunakan bantuan software provision **Vagrant**. Detail untuk masing-masing cluster disajikan dalam tabel berikut.
| Cluster | Nama Cluster | Alamat IP | Sistem Operasi | Alokasi RAM |
| --------------- | ------------------ | ------------- | -------------- | ----------- |
| Config Server 1 | mongo_config_1 | 192.168.33.11 | Ubuntu-18.04 | 512 MB |
| Config Server 2 | mongo_config_2 | 192.168.33.12 | Ubuntu-18.04 | 512 MB |
| Shard Server 2 | mongo_shard_1 | 192.168.33.13 | Ubuntu-18.04 | 512 MB |
| Shard Server 2 | mongo_shard_2 | 192.168.33.14 | Ubuntu-18.04 | 512 MB |
| Shard Server 2 | mongo_shard_3 | 192.168.33.15 | Ubuntu-18.04 | 512 MB |
| Query Router | mongo_query_router | 192.168.33.16 | Ubuntu-18.04 | 512 MB |
## Environment
### Gambaran Umum
- Sistem Operasi : Ubuntu 18.04
- Virtual Machine : Virtualbox
- Vagrant
- MongoDB
Pengimplementasian dilakukan sepenuhnya pada sistem operasi **Ubuntu 18.04**. Virtual Machine yang digunakan adalah **virtualbox** versi 5.2, menggunakan box **_bento/ubuntu-18.04_** dan provision menggunakan **Vagrant** versi 2.2.5. Versi **MongoDB** yang digunakan adalah versi 4.2.
## Implementasi
### 1. Konfigurasi Vagrant
Sebelum memulai mengimplementasikan MongoDB, hal pertama yang harus dilakukan adalah membuat konfigurasi file vagrant yang berisi cluster-cluster yang akan digunakan sesuai dengan deskripsi sebelumnya.
```
$ vagrant init
```
Perintah tersebut akan membuatkan file `Vagrantfile` yang nantinya akan diisi dengan konfigurasi cluster. Konfigurasi file Vagrantfile adalah sebagai berikut.
File [**Vagrantfile**](Vagrantfile)
```ruby
# -*- mode: ruby -*-
# vi: set ft=ruby :
Vagrant.configure("2") do |config|
config.vm.define "mongo_config_1" do |mongo_config_1|
mongo_config_1.vm.hostname = "mongo-config-1"
mongo_config_1.vm.box = "bento/ubuntu-18.04"
mongo_config_1.vm.network "private_network", ip: "192.168.33.11"
mongo_config_1.vm.provider "virtualbox" do |vb|
vb.name = "mongo-config-1"
vb.gui = false
vb.memory = "512"
end
mongo_config_1.vm.provision "shell", path: "provision/allhosts.sh", privileged: false
end
config.vm.define "mongo_config_2" do |mongo_config_2|
mongo_config_2.vm.hostname = "mongo-config-2"
mongo_config_2.vm.box = "bento/ubuntu-18.04"
mongo_config_2.vm.network "private_network", ip: "192.168.33.12"
mongo_config_2.vm.provider "virtualbox" do |vb|
vb.name = "mongo-config-2"
vb.gui = false
vb.memory = "512"
end
mongo_config_2.vm.provision "shell", path: "provision/allhosts.sh", privileged: false
end
config.vm.define "mongo_shard_1" do |mongo_shard_1|
mongo_shard_1.vm.hostname = "mongo-shard-1"
mongo_shard_1.vm.box = "bento/ubuntu-18.04"
mongo_shard_1.vm.network "private_network", ip: "192.168.33.13"
mongo_shard_1.vm.provider "virtualbox" do |vb|
vb.name = "mongo-shard-1"
vb.gui = false
vb.memory = "512"
end
mongo_shard_1.vm.provision "shell", path: "provision/allhosts.sh", privileged: false
end
config.vm.define "mongo_shard_2" do |mongo_shard_2|
mongo_shard_2.vm.hostname = "mongo-shard-2"
mongo_shard_2.vm.box = "bento/ubuntu-18.04"
mongo_shard_2.vm.network "private_network", ip: "192.168.33.14"
mongo_shard_2.vm.provider "virtualbox" do |vb|
vb.name = "mongo-shard-2"
vb.gui = false
vb.memory = "512"
end
mongo_shard_2.vm.provision "shell", path: "provision/allhosts.sh", privileged: false
end
config.vm.define "mongo_shard_3" do |mongo_shard_3|
mongo_shard_3.vm.hostname = "mongo-shard-3"
mongo_shard_3.vm.box = "bento/ubuntu-18.04"
mongo_shard_3.vm.network "private_network", ip: "192.168.33.15"
mongo_shard_3.vm.provider "virtualbox" do |vb|
vb.name = "mongo-shard-3"
vb.gui = false
vb.memory = "512"
end
mongo_shard_3.vm.provision "shell", path: "provision/allhosts.sh", privileged: false
end
config.vm.define "mongo_query_router" do |mongo_query_router|
mongo_query_router.vm.hostname = "mongo-query-router"
mongo_query_router.vm.box = "bento/ubuntu-18.04"
mongo_query_router.vm.network "private_network", ip: "192.168.33.16"
mongo_query_router.vm.provider "virtualbox" do |vb|
vb.name = "mongo-query-router"
vb.gui = false
vb.memory = "512"
end
mongo_query_router.vm.provision "shell", path: "provision/allhosts.sh", privileged: false
end
end
```
### 2. Script Provision
Konfigurasi vagrant tersebut menggunakan file provision untuk tiap host. File provision yang digunakan untuk tiap host adalah file **`allhosts.sh`**.
File [**allhosts.sh**](provision/allhosts.sh)
```bash
# Add hostname
sudo bash -c \\"echo '192.168.33.11 mongo-config-1' >> /etc/hosts\\"
sudo bash -c \\"echo '192.168.33.12 mongo-config-2' >> /etc/hosts\\"
sudo bash -c \\"echo '192.168.33.13 mongo-shard-1' >> /etc/hosts\\"
sudo bash -c \\"echo '192.168.33.14 mongo-shard-2' >> /etc/hosts\\"
sudo bash -c \\"echo '192.168.33.15 mongo-shard-3' >> /etc/hosts\\"
sudo bash -c \\"echo '192.168.33.16 mongo-query-router' >> /etc/hosts\\"
# Copy APT sources list
sudo cp /vagrant/sources/sources.list /etc/apt/
sudo cp /vagrant/sources/mongodb-org-4.2.list /etc/apt/sources.list.d/
# Add MongoDB repo key
sudo apt-get install gnupg
wget -qO - https://www.mongodb.org/static/pgp/server-4.2.asc | sudo apt-key add -
# Update Repository
sudo apt-get update
# sudo apt-get upgrade -y
# Install MongoDB
sudo apt-get install -y mongodb-org
# Start MongoDB
sudo service mongod start
```
File provision tersebut digunakan untuk melakukan instalasi **MongoDB** pada tiap host ketika di-build nanti. Tiap host akan mempunyai alamat IP masing-masing sesuai dengan penjelasan sebelumnya.
### 3. Build Vagrant
Setelah file provision dibuat, maka vagrant siap untuk di-build. Build vagrant menggunakan perintah:
```
$ vagrant up --provider virtualbox
```

Perintah vagrant up akan menginisiasi building virtual machine dan menjalankan script provision menggunakan box `bento/ubuntu-18.04`. Apabila belum terdapat box `bento/ubuntu-18.04`, maka vagrant akan otomatis mengunduhnya.
### 4. Konfigurasi Config Server
Setelah proses build selesai, host sudah bisa digunakan. Masuk ke dalam host masing-masing _**config server**_ dengan menggunakan perintah.
```
$ vagrant ssh mongo_config_1
$ vagrant ssh mongo_config_2
```
**Create Administrative Username**
Masuk ke dalam shell mongo pada masing-masing config server.
```
(mongo-config-#) $ mongo
```
Buat username pada mongodb.
```
mongo> use admin
mongo> db.createUser({user: "username", pwd: "<PASSWORD>", roles: [ "root"] } )
```
**Konfigurasi Config Server**
Buat file konfigurasi untuk masing-masing config server. File konfigurasi dapat dilihat pada :
- [**mongo-config-1.conf**](config/mongo-config-1.conf)
- [**mongo-config-2.conf**](config/mongo-config-2.conf)
Hal yang perlu diperhatikan adalah alamat IP harus sesuai dan ditambahkan konfigurasi khusus.
```conf
.....
net:
port: 27019
bindIp: <sesuai IP server>
....
....
replication:
replSetName: configReplSet
sharding:
clusterRole: "configsvr"
....
```
Salin file konfigurasi tadi dalam file konfigurasi mongodb pada masing-masing config server.
```
(mongo-config-1) $ sudo cp /vagrant/config/mongo-config-1.conf /etc/mongod.conf
```
```
(mongo-config-2) $ sudo cp /vagrant/config/mongo-config-2.conf /etc/mongod.conf
```
Setelah konfigurasi selesai, restart mongodb pada masing-masing server.
```
(mongo-config-#) $ sudo systemctl restart mongod
```
Pada salah satu config server, masuk kembali pada mongod.
```
(mongo-config-1) $ mongo 192.168.33.11:27019 -u mongo-admin -p --authenticationDatabase admin
```
Inisialisasi replica set dengan perintah berikut dan menambahkan anggota config server yang lain.
```json
rs.initiate(
{ _id: "configReplSet", configsvr: true, members: [
{ _id: 0, host: "192.168.33.11:27019" },
{ _id: 1, host: "192.168.33.12:27019" }
]
}
)
```
```
mongo> rs.initiate( { _id: "configReplSet", configsvr: true, members: [ { _id: 0, host: "192.168.33.11:27019" }, { _id: 1, host: "192.168.33.12:27019" } ] } )
```
### 5. Konfigurasi Query Router
Masuk terlebih dahulu ke dalam host query router.
```
$ vagrant ssh mongo-query-router
```
**Konfigurasi Query Router**
Untuk konfigurasi query router, file konfigurasinya berbeda. Buat file konfigurasi.
- File [**mongo-query-router.conf**](config/mongo-query-router.conf)
```conf
# where to write logging data.
systemLog:
destination: file
logAppend: true
path: /var/log/mongodb/mongos.log
# network interfaces
net:
port: 27017
bindIp: 192.168.33.16
sharding:
configDB: configReplSet/192.168.33.11:27019,192.168.33.12:27019
```
Salin file konfigurasi yang telah dibuat ke dalam folder konfigurasi query router.
```
(mongo-query-router) $ sudo cp /vagrant/config/mongo-query-router.conf /etc/mongos.conf
```
**Service Query Router**
Setelah file konfigurasi, maka langkah selanjutnya adalah membuat service untuk mongos. Buatlah file service `mongos.service`.
- File [**mongos.service**](config/mongos.service)
```service
[Unit]
Description=Mongo Cluster Router
After=network.target
[Service]
User=mongodb
Group=mongodb
ExecStart=/usr/bin/mongos --config /etc/mongos.conf
# file size
LimitFSIZE=infinity
# cpu time
LimitCPU=infinity
# virtual memory size
LimitAS=infinity
# open files
LimitNOFILE=64000
# processes/threads
LimitNPROC=64000
# total threads (user+kernel)
TasksMax=infinity
TasksAccounting=false
[Install]
WantedBy=multi-user.target
```
### 6. Konfigurasi Shard Server
Masuk ke dalam masing-masing shard server untuk memulai konfigurasi.
```
$ vagrant ssh mongo_shard_1
$ vagrant ssh mongo_shard_2
$ vagrant ssh mongo_shard_3
```
**Create Administrative Username**
Masuk ke dalam shell mongo pada masing-masing shard server.
```
(mongo-config-#) $ mongo
```
Buat username pada mongodb.
```
mongo> use admin
mongo> db.createUser({user: "username", pwd: "<PASSWORD>", roles: [ "root"] } )
```
**Konfigurasi Shard Server**
Untuk mengkonfigurasi shard server, hal yang perlu dilakukan adalah mengubah file konfigurasi shard server, lalu menambahkan shard ke dalam mongoDB cluster kemudian mengaktifkan sharding pada level database dan level collection.
1. Mengubah file konfigurasi shard server
Buat file konfigurasi untuk masing-masing shard server. Yang perlu diperhatikan pada masing-masing file konfigurasi adalah:
```conf
bindIp: <IP masing-masing server>
....
....
sharding:
clusterRole: "shardsvr"
```
+ File [**mongo-shard-1.conf**](config/mongo-shard-1.conf)
+ File [**mongo-shard-2.conf**](config/mongo-shard-2.conf)
+ File [**mongo-shard-3.conf**](config/mongo-shard-3.conf)
Salin file konfigurasi ke folder konfigurasi pada masing-masing shard server.
```
(mongo-shard-1) $ sudo cp /vagrant/conf/mongo-shard-1.conf /etc/mongod.conf
```
```
(mongo-shard-2) $ sudo cp /vagrant/conf/mongo-shard-2.conf /etc/mongod.conf
```
```
(mongo-shard-3) $ sudo cp /vagrant/conf/mongo-shard-3.conf /etc/mongod.conf
```
2. Menambahkan shard ke dalam mongoDB cluster.
Di salah satu shard sever, masuk ke dalam query router.
```
(mongo-shard-1) $ mongo 192.168.33.16:27017 -u mongo-admin -p --authenticationDatabase admin
```
Untuk menambahkan shard, pada shell mongo jalankan perintah berikut.
```
sh.addShard("192.168.33.13:27017")
sh.addShard("192.168.33.14:27017")
sh.addShard("192.168.33.15:27017")
```
3. Mengaktifkan sharding pada level database dan collection
Pertama masuk ke dalam query router (pada salah satu shard server).
```
(mongo-shard-1) $ mongo 192.168.33.16:27017 -u mongo-admin -p --authenticationDatabase admin
```
Untuk mengaktifkan sharding pada level database, beralih pada salah satu database yang hendak diaktifkan shardingnya.
```
mongos> use populationData
mongos> sh.enableSharding("populationData)
mongos> db.populationData.ensureIndex( { _id: "hashed"} )
sh.shardCollection( "populationData.populationCollection", { "_id" : "hashed" } )
```
## Import Dataset
Untuk melakukan import dataset menggunakan file CSV, bisa menggunakan perintah yang sudah disediakan oleh mongoDB.
```
$ mongoimport -h [host:port] -u [username] -d [database_name] -c [collection_name] --type CSV --file [path/to/file.csv] --headerline --authenticationDatabase admin
```
Masuk pada server query router untuk menjalankan perintah tersebut.
```
$ mongoimport -h 192.168.33.16:27017 -u mongo-admin -d populationData -c populationCollection --type CSV --file /vagrant/dataset/data_real.csv --headerline --authenticationDatabase admin
```

Data akan diimport pada database yang bersangkutan.
## Implementasi Aplikasi CRUD


Aplikasi CRUD yang diimplementasikan pada MongoDB Cluster adalah aplikasi API (Application Programming Interface) menggunakan bahasa pemograman Python.
Flask adalah framework web untuk Python, yang menyediakan fungsionalitas untuk membangun aplikasi web, termasuk mengelola request HTTP dan merender template. Implementasi kali ini akan menggunakan Flask untuk membuat API menggunakan bahasa pemrograman Python.
API yang dibuat berisi beberapa operasi dasar seperti:
- Create
- Read
- Update
- Delete
dan operasi agregasi yang menggunakan **count** dan **sum**.
Pengujian API menggunakan aplikasi POSTMAN.

[**Implementasi API (main.py)**](API/main.py)
### Connect Database
Hal pertama yang perlu dilakukan adalah connect ke dalam database MongoDB.
```py
app = Flask(__name__)
app.secret_key = "SECRETKEY"
app.config["MONGO_URI"] = "mongodb://mongo-admin:[email protected]:27017/healthMeasure?retryWrites=false&authSource=admin"
```
### Operasi READ
```py
@app.route('/populationHealthMeasure', methods=['GET'])
def get_data():
datas = mongo.db.populationCollection.find()
response = dumps(datas)
return response
```
Pada operasi read, yang dilakukan adalah untuk mendapatkan semua data. Method yang digunakan adalah `GET` dengan endpoint `/populationHealthMeasure`.

### Operasi CREATE
```py
@app.route('/populationHealthMeasure', methods=['POST'])
def add_data():
request_json = request.json
data_uniqueid = request_json["unique_id"]
data_3Year = request_json["3_year_period"]
data_deathCause = request_json["cause_of_death"]
data_Race = request_json["race"]
data_HispanicLatino = request_json["hispanic_or_latino"]
data_gender = request_json["gender"]
data_ageAdjusted = request_json["age-adjusted_rate"]
data_lowerConfidenceInterval = request_json["lower_confidence_interval"]
data_upperConfidenceInterval = request_json["upper_confidence_interval"]
data_id = mongo.db.populationCollection.insert({
'unique_id' : data_uniqueid,
'3_year_period' : data_3Year,
'cause_of_death' : data_deathCause,
'race' : data_Race,
'hispanic_or_latino' : data_HispanicLatino,
'gender' : data_gender,
'age-adjusted_rate' : data_ageAdjusted,
'lower_confidence_interval' : data_lowerConfidenceInterval,
'upper_confidence_interval' : data_upperConfidenceInterval
})
response = jsonify('Data has added successfully. The id is {}'.format(data_id))
response.status_code = 200
return response
```
Operasi Create yang dilakukan adalah meng-insertkan satu data ke dalam database. Endpoint yang digunakan `/populationHealthMeasure` dan method yang digunakan adalah `POST`. Data yang dikirimkan berupa data JSON dengan atribut-atribut yang bersesuaian dan value-nya masing-masing.

### Operasi UPDATE
```py
@app.route('/populationHealthMeasure/<id>', methods=['PUT'])
def update_data(id):
request_json = request.json
data_id = request_json["_id"]
data_uniqueid = request_json["unique_id"]
data_3Year = request_json["3_year_period"]
data_deathCause = request_json["cause_of_death"]
data_Race = request_json["race"]
data_HispanicLatino = request_json["hispanic_or_latino"]
data_gender = request_json["gender"]
data_ageAdjusted = request_json["age-adjusted_rate"]
data_lowerConfidenceInterval = request_json["lower_confidence_interval"]
data_upperConfidenceInterval = request_json["upper_confidence_interval"]
mongo.db.populationCollection.update_one(
{'_id': ObjectId(data_id['$oid']) if '$oid' in data_id else ObjectId(data_id)},
{
'$set' :
{
'unique_id' : data_uniqueid,
'3_year_period' : data_3Year,
'cause_of_death' : data_deathCause,
'race' : data_Race,
'hispanic_or_latino' : data_HispanicLatino,
'gender' : data_gender,
'age-adjusted_rate' : data_ageAdjusted,
'lower_confidence_interval' : data_lowerConfidenceInterval,
'upper_confidence_interval' : data_upperConfidenceInterval
}
}
)
response = jsonify('Data has updated successfully. The id is {}'.format(data_id))
response.status_code = 200
return response
```
Pada operasi update, yang dilakukan adalah memperbarui satu data dengan parameter berupa ObjectId. Data yang dikirimkan berupa data JSON dengan atribut-atribut dan value terbaru yang hendak di-update. Namun, ada satu hal yang perlu disertakan, yakni ObjectID dari data yang hendak di-update. Endpointnya adalah `/populationHealthMeasure/<id>` dengan parameter berupa ObjectId dan method yang digunakan adalah `PUT`.

### Operasi DELETE
```py
@app.route('/populationHealthMeasure/<id>', methods=['DELETE'])
def delete_data(id):
mongo.db.populationCollection.delete_one({
'_id': ObjectId(id)
})
response = jsonify('Data has deleted successfully. The id is {}'.format(id))
response.status_code = 200
return response
```
Untuk operasi delete, yang dilakukan adalah menghapus data berdasarkan ObjectId. Endpoint yang digunakan adalah `/populationHealthMeasure/<id>` dengan parameter berupa ObjectId dan method yang digunakan adalah `DELETE`.

### Operasi AGREGASI 1
```py
@app.route('/populationHealthMeasure/cause_of_death', methods=['GET'])
def get_salmonela():
result = mongo.db.populationCollection.aggregate([
{
"$group": {
"_id": "$cause_of_death",
"count": {"$sum": 1}
}
}
])
response = dumps(result)
return response
```
Operasi agregrasi pertama melibatkan dua operasi yakni **group** dan **sum**. Tujuan dari operasi ini adalah untuk menghitung berapa banyak data berdasarkan **_cause_of_death_**.
Agregasi group digunakan untuk mengelompokkan berdasarkan _cause_of_death_, kemudian dihitung jumlahnya menggunakan _sum_.

### Operasi AGREGASI 2
```py
# Aggregation Operation
@app.route('/populationHealthMeasure/max_upper_confidence', methods=['GET'])
def get_max_confidence():
result = mongo.db.populationCollection.aggregate([
{
"$group": {
"_id": "$cause_of_death",
"minimum": {"$min": "$upper_confidence_interval"}
}
}
])
response = dumps(result)
return response
```
Operasi agregrasi kedua melibatkan dua operasi yakni **group** dan **min**. Tujuan dari operasi ini adalah untuk mencari nilai minimal upper_confidence_interval berdasarkan **_cause_of_death_**.
<file_sep># Basis Data Terdistribusi
## Environment
Implementasi Basis Data Terdistribusi menggunakan metode Grup-Replication ini menggunakan tool Virtualbox dan provisioning menggunakan Vagrant. Seluruh tahapan pengerjaan dilakukan sepenuhnya pada Sistem Operasi Linux Ubuntu 18.04.
### Instalasi Vagrant dan Virtualbox
```
$ sudo apt-get update
$ sudo apt-get install vagrant
$ sudo apt-get install virtualbox
```
Cek apakah vagrant dan virtualbox sudah terinstall.
```
$ vagrant --version
$ vboxmanage --version
```
Output
```
user@user:
Vagrant 2.0.2
user@user
5.2.32_Ubuntur132056
```
## Desain Infrastruktur Basis Data
Berikut adalah desain insfrastruktur basis data yang akan diimplementasikan:
**!GAMBAR GANTI!**

Keterangan :
### Web Server
Web server yang digunakan pada implementasi ini menggunakan komputer host (lokal), dengan IP nya sesuai komputer host. Cek ip komputer host menggunakan command:
```
$ ip addr
```
Hasil output :

### Proxy Server
Proxy server disini menggunakan virtualbox, dengan setting IP 177.5.**17**.**20** dan alokasi memori sebesar 512 MB. Tools yang digunakan adalah MySQL Proxy, yang nantinya berfungsi layaknya _Load Balancer_.
### Database Server
Terdapat 3 database server yang diimplementasikan, yang masing-masing bernama db1, db2, db3 yang masing-masing alokasi memori database server adalah 512 MB dan mempunyai IP :
- db1 192.168.127.12
- db2 172.16.58.3
- db3 192.168.127.12
## Implementasi
Tahapan-tahapan umum impelentasinya adalah sebagai berikut.
1. Membangun tiap-tiap server menggunakan vagrant.
2. Konfigurasi database server.
3. Konfigurasi proxy server.
4. Konfigurasi web server.
5. Menghubungkan aplikasi menuju proxy server.
-----------------------
## Deskripsi Aplikasi
Link [aplikasi](github.com/bayulaxana/project_bdt1)
Aplikasi yang akan digunakan untuk penerapan basis data terdistribusi menggunakan Group-Replicartion adalah aplikasi web sederhana yang dibuat menggunakan laravel. Fungsi dari aplikasi ini adalah untuk menyimpan blog-blog pribadi atau notes yang digunakan untuk keperluan pribadi saja. Cukup sederhana untuk menggambarkan bagaimana database bekerja pada web.
Aplikasi ini mencakup operasi-operasi basis data dasar seperti Create, Read, Update, dan Delete.
Berikut adalah beberapa screenshot tampilan web tersebut.





## Vagrant
Sebelum melakukan build virtual machine, buat terlebih dahulu file konfigurasi **Vagrantfile**.
Link [Vagrantfile](Vagrantfile)
Build menggunakan command :
```
$ vagrant up
```
Perintah `vagrant up` akan menginisiasi building virtual machine menggunakan box `bento/ubuntu-16.04`. Apabila belum terdapat box `bento/ubuntu-16.04`, maka vagrant akan otomatis mengunduhnya.
Proses ini akan membuat virtual machine dengan nama masing-masing "db1", "db2", "db3", dan "proxy", sekaligus melakukan provisioning terhadap masing-masing server untuk memperoleh library-library yang diperlukan.
File provision :
- [db1](DB_server/deployMySQL1.sh)
- [db2](DB_server/deployMySQL2.sh)
- [db3](DB_server/deployMySQL3.sh)
- [proxy](Proxy_server/deployProxySQL.sh)
Untuk memastikan apakah masing-masing server telah sukses dibuat, periksa pada direktori `.vagrant/machines`.
```
$ ls -1 .vagrant/machines
```
Akan terdapat empat direktori dengan nama masing-masing server.

Sampai tahap ini, masing-masing server telah berhasil dibuat.
## Database Server
Setelah semua server telah terbentuk, kini saatnya untuk mengkonfigurasi masing-masing database server.
Sebelum mengkonfigurasi, generate terlebih dahulu UUID untuk digunakan pada grup replikasi.
```
(db1)$ uuidgen
```
Tiap-tiap database server mempunyai konfigurasi file yang hampir sama.
- [db1.cnf](DB_server/db1.cnf)
- [db2.cnf](DB_server/db2.cnf)
- [db3.cnf](DB_server/db3.cnf)
### Konfigurasi Grup-Replication
Untuk setiap file konfigurasi, aktifkan `loose-group_replication_start_on_boot = ON` agar pada saat mysql dijalankan langsung bisa mengaktifkan grup-replication.
```
...
!includedir /etc/mysql/conf.d/
!includedir /etc/mysql/mysql.conf.d/
[mysqld]
# General replication settings
gtid_mode = ON
enforce_gtid_consistency = ON
master_info_repository = TABLE
relay_log_info_repository = TABLE
binlog_checksum = NONE
log_slave_updates = ON
log_bin = binlog
binlog_format = ROW
transaction_write_set_extraction = XXHASH64
loose-group_replication_bootstrap_group = OFF
loose-group_replication_start_on_boot = ON
loose-group_replication_ssl_mode = REQUIRED
loose-group_replication_recovery_use_ssl = 1
...
```
### Group sharing
Disinilah bagian konfigurasi untuk mengatur anggota-anggota dari grup-replication (database server yang lain). Isi bagian `loose-group_replication_group_name` dengan UUID yang telah digenerate, `loose-group_replication_ip_whitelist` berisi anggota masing-masing (ip) database server, `loose-group_replication_group_seeds` dengan ip diikuti port 33061.
```
# Shared replication group configuration
loose-group_replication_group_name = "f02df904-502b-4d4a-a170-2f07faf816a8"
loose-group_replication_ip_whitelist = "192.168.127.12, 172.16.58.3, 192.168.127.12"
loose-group_replication_group_seeds = "192.168.127.12:33061, 172.16.58.3:33061, 192.168.127.12:33061"
```
### Multi-primary mode
Dalam mode Multy-Primary, setiap database server mempunyai hak untuk melakukan write pada database.
```
# Single or Multi-primary mode? Uncomment these two lines
# for multi-primary mode, where any host can accept writes
loose-group_replication_single_primary_mode = OFF
loose-group_replication_enforce_update_everywhere_checks = ON
```
### Konfigurasi untuk server
Untuk masing-masing server, terdapat konfigurasi khusus spesifik.
```
# Host specific replication configuration
server_id = 1
bind-address = "192.168.127.12"
report_host = "192.168.127.12"
loose-group_replication_local_address = "192.168.127.12:33061"
```
Hal-hal yang perlu diperhatikan adalah :
- `server_id` diisi **id** masing-masing server.
- `bind-address` disii IP masing-masing server.
- `report-host` disii IP masing-masing server.
- `loose-group_replication_local_address` disii IP masing-masing server diikuti port 33061.
### Perbarui file konfigurasi MySQL
Pada masing-masing server :
```
(db1)$ sudo cp /vagrant/DB_server/db1.cnf /etc/mysql/my.cnf
```
```
(db2)$ sudo cp /vagrant/DB_server/db2.cnf /etc/mysql/my.cnf
```
```
(db3)$ sudo cp /vagrant/DB_server/db3.cnf /etc/mysql/my.cnf
```
Restart service mysql disemua server.
### Aktivasi Plugin Group-Replication
Untuk setiap server, masuk ke dalam mysql menggunakan akun `root` dan password `<PASSWORD>`, dan jalankan file `group_rep_member.sql` untuk membuat user replication.
```
$ mysql -u root -padmin < DB_server/group_rep_member.sql
```
File [group_rep_member.sql](DB_server/group_rep_member.sql)
Sebelum memulai Grup-Replication, masuk pada **salah satu db server** (misal db1) untuk melakukan bootstraping agar masing-masing server tahu keberadaan server lain.
```
(db1)$ mysql -u root -padmin < DB_server/start_bootstrap.sql
```
File [start_bootstrap.sql](DB_server/start_bootstrap.sql)
Kemudian di server lain, inisiasi untuk memulai grup replication.
```
mysql> START GROUP_REPLICATION;
```
## Proxy Server
Proxy server berperan sebagai Load Balancer, dan tools yang digunakan adalah ProxySQL. ProxySQL sudah terinstall ketika pertama kali melakukan provisioning script `vagrant up`.
File [provisioning ProxySQL]()
Jalankan service ProxySQL terlebih dahulu.
```
(proxy)$ sudo systemctl start proxysql
```
### Konfigurasi ProxySQL Sebagai Admin
Pada server proxy, masuk ke dalam mysql dengan akun `admin` menggunakan password `<PASSWORD>` dan jalankan file `proxysql.sql`.
```
(proxy)$ mysql -u admin -padmin -h 127.0.0.1 -P 6032 < Proxy_server/proxysql.sql
```
File [proxysql.sql](Proxy_server/deployProxySQL.sh)
Pada file .sql tersebut, akan melakukan monitoring terhadap semua database server yang tersedia (semua node), sehingga ProxySQL tahu bagaimana cara untuk mendistribusikan tiap-tiap node.
```sql
INSERT INTO mysql_group_replication_hostgroups (writer_hostgroup, backup_writer_hostgroup, reader_hostgroup, offline_hostgroup, active, max_writers, writer_is_also_reader, max_transactions_behind) VALUES (2, 4, 3, 1, 1, 3, 1, 100);
```
Menambahkan database server agar diketahui oleh ProxySQL.
```sql
INSERT INTO mysql_servers(hostgroup_id, hostname, port) VALUES (2, '192.168.127.12', 3306);
INSERT INTO mysql_servers(hostgroup_id, hostname, port) VALUES (2, '172.16.58.3', 3306);
INSERT INTO mysql_servers(hostgroup_id, hostname, port) VALUES (2, '192.168.127.12', 3306);
```
Agar dapat dimonitor oleh ProxySQL, maka perlu dilakukan konfigurasu tambahan pada salah satu database server.
```
(db1)$ mysql -u root -p Proxy_server/addition_to_sys.sql
```
Kemudian menambahkan user monitor baru.
```
(db1) mysql> CREATE USER 'monitor'@'%' IDENTIFIED BY 'monitorpassword';
(db1) mysql> GRANT SELECT on sys.* to 'monitor'@'%';
(db1) mysql> FLUSH PRIVILEGES;
```
Setelah itu dapat menambahkan user baru pada ProxySQL.
```sql
INSERT INTO mysql_users(username, password, default_hostgroup) VALUES ('bayulaxanauser', '<PASSWORD>', 2);
```
> User yang ditambahkan adalah `bayulaxanauser` dengan password `<PASSWORD>`
### Konfigurasi Database Server
Untuk memperbolehkan agar ProxySQL dapat berhubungan dengan database server, masing-masing database server membutuhkan user yang sama dengan user yang telah dibuat pada ProxySQL.
Dalam kasus ini, nama user yang digunakan adalah `bayulaxanauser`. Pada salah satu database server, masuk ke dalam mysql dengan user root.
```
(db1)$ mysql -u root -p
```
Kemudian eksekusi perintah dibawah.
```sql
CREATE USER 'bayulaxanauser'@'%' IDENTIFIED BY '<PASSWORD>';
GRANT ALL PRIVILEGES on databasename.* to 'bayulaxanauser'@'%';
FLUSH PRIVILEGES;
```
Tahapan konfigurasi ProxySQL sudah selesai. Untuk memastikan, lihat server-server yang terhubung dengan menjalankan perintah dibawah pada server ProxySQL (admin).
```
mysqlAdmin> SELECT hostgroup_id, hostname, status FROM runtime_mysql_servers;
```
Output
```
+--------------+-------------+---------+
| hostgroup_id | hostname | status |
+--------------+-------------+---------+
| 2 | 17192.168.3.11 | ONLINE |
| 2 | 172.16.58.3 | ONLINE |
| 2 | 192.168.127.12 | ONLINE |
+--------------+-------------+---------+
```
## Web Server
Web server pada implementasi ini menggunakan komputer host sebagai servernya. Kemudian nantinya, komputer host akan mengarahkan koneksi database mysql menuju ProxySQL. Agar web server dapat bekerja sebagaimana mestinya, maka tools yang perlu diinstall adalah :
- Apache
- PHP
### Instalasi Apache
Install apache web server dengan perintah berikut.
```
$ sudo apt update
$ sudo apt install apache2 -y
```
Ijinkan firewall untuk menggunakan apache.
```
$ sudo ufw app info "Apache Full"
$ sudo ufw allow in "Apache Full"
```
Periksa dengan mengakses IP 127.0.0.1 pada browser. Seharusnya muncul halaman seperti gambar di bawah yang menandakan instalasi Apache sukses.

### Instalasi PHP
Selanjutnya adalah proses instalasi PHP, yang nantinya akan digunakan sebagai backend server dan integrasi laravel.
Untuk menginstal php berserta modul-modul yang dibutuhkan oleh laravel, gunakan perintah :
```
$ sudo apt install php7.2-common php7.2-cli php7.2-gd php7.2-mysql php7.2-curl php7.2-intl php7.2-mbstring php7.2-bcmath php7.2-imap php7.2-xml php7.2-zip
```
### Instalasi Composer
Composer dibutuhkan untuk menginisiasi project laravel.
```
$ curl -sS https://getcomposer.org/installer | sudo php -- --install-dir=/usr/local/bin --filename=composer
```
### Koneksi database laravel ke ProxySQL
Agar database project Laravel bisa terhubung ke ProxySQL, maka koneksi DB_HOST pada file environment laravel harus disesuaikan. Edit file .env khususnya pada bagian DB agar sesuai dengan IP ProxySQL.
```env
DB_CONNECTION=mysql
DB_HOST=172.16.31.10
DB_PORT=6033
DB_DATABASE=playground
DB_USERNAME=bayulaxanauser
DB_PASSWORD=<PASSWORD>
```
Host dari ProxySQL mempunyai IP 172.16.31.10 dan dihubungkan pada port 6033 dengan nama database `playground` dan username `bayulaxanauser`.
Untuk memulai web, bisa menggunakan command
```
$ php artisan serve
```
akan diarahkan pada `127.0.0.01:8000` untuk mengakses konten webnya.
<file_sep># Implementasi Redis Cluster
**Nama: <NAME>**
**NRP : 05111740000020**
# A. Deskripsi Tugas
## Deskripsi Umum
- Pada tugas ini, diminta untuk mengimplementasikan Redis server dan menggunakan Sentinel untuk High Availability.
- Redis server akan digunakan untuk menge-cache konten dari sebuah website Wordpress agar dapat disajikan lebih cepat.
- Kemudian juga akan membandingkan performa dari server Wordpress yang menggunakan Redis dan tidak menggunakan Redis.
## Rincian
1. Buatlah sebuah cluster Redis dengan 3 buah node. Tuliskan file konfigurasi yang digunakan.
2. Buatlah sebuah web server, kemudian:
- Install Wordpress
- Konfigurasi Wordpress agar menggunakan redis cache
3. Buatlah sebuah web server lagi dengan konfigurasi hardware yang sama,kemudian install Wordpress. Pada server ini tidak perlu mengaktifkan Redis cache.
4. Lakukan pengujian menggunakan JMeter untuk mengecek perbedaan load time dari dua server web tersebut. Buat kesimpulan dari pengujian yang telah dilakukan.
5. Lakukan pula pengujian untuk menunjukkan bahwa proses fail over menggunakan Redis Sentinel berhasil. Caranya dengan mematikan salah satu server Redis dan mengecek siapa master node baru yang terpilih.
## Pembagian Cluster
Pembuatan server menggunakan skema alamat IP sebagai berikut:
- Untuk IP A.B.C.D
- A dan B bebas
- C merupakan tahun angkatan
- D 3 digit NRP terakhir
### Arsitektur Cluster

### Detail Cluster
| Cluster | Nama Cluster | Alamat IP | Sistem Operasi | Alokasi RAM |
| --------------- | ------------ | ------------| -------------- | ----------- |
| Wordpress Non Redis | wordpress_1 | 172.16.58.3 | Ubuntu-18.04 | 512 MB |
| Wordpress Redis | wordpress_2 | 192.168.3.11 | Ubuntu-18.04 | 512 MB |
| Redis 1 (Master) | redis_1 | 172.16.17.32 | Ubuntu-18.04 | 512 MB |
| Redis 2 (Slave) | redis_2 | 172.16.58.3 | Ubuntu-18.04 | 512 MB |
| Redis 3 (Slave) | redis_3 | 172.16.31.10 | Ubuntu-18.04 | 512 MB |
# B. Environment
- Sistem Operasi : Ubuntu 18.04
- Virtual Machine : Virtualbox
- Vagrant 2.0.2
- Redis 5.0.7
Pengimplementasian dilakukan sepenuhnya pada sistem operasi **Ubuntu 18.04**. Virtual Machine yang digunakan adalah **virtualbox** versi 5.2, menggunakan box **_bento/ubuntu-18.04_** dan provision menggunakan **Vagrant** versi 2.0.2.
# C. Implementasi
## Konfigurasi Vagrant
Sebelum memulai mengimplementasikan Redis dan Web Server, hal pertama yang harus dilakukan adalah membuat konfigurasi file vagrant yang berisi cluster-cluster yang akan digunakan sesuai dengan deskripsi sebelumnya.
```
$ vagrant init
```
Perintah tersebut akan membuatkan file `Vagrantfile` yang nantinya akan diisi dengan konfigurasi cluster. Konfigurasi file Vagrantfile adalah sebagai berikut.
- [**File Vagrantfile**](Vagrantfile)
### Script Provision
Konfigurasi vagrant tersebut menggunakan file provision. Untuk web server menggunakan script provision :
- [**File wordpress_provision.sh**](provision/wordpress_provision.sh)
Sedangkan file provision untuk redis server menggunaakan script proviion :
- [**File redis_provision.sh**](provision/redis_provision.sh)
## Build Vagrant
Setelah file provision dibuat, maka vagrant siap untuk di-build. Build vagrant menggunakan perintah:
```
$ vagrant up --provider virtualbox
```

_Proses provisioning_

Perintah vagrant up akan menginisiasi building virtual machine dan menjalankan script provision menggunakan box `bento/ubuntu-18.04`. Apabila belum terdapat box `bento/ubuntu-18.04`, maka vagrant akan otomatis mengunduhnya.
## File Konfigurasi Cluster Redis
Hal pertama yang akan dilakukan adalah mengkonfigurasi server redis. Cara konfigurasi untuk semua server redis sama.
Masuk ke dalam masing-masing server redis
```
$ vagrant ssh redis_1
```
_Tampilan awal_

> masuk ke semua server redis
Namun sebelum memulai, ada file konfigurasi yang perlu dibuat terlebih dahulu, yakni file konfigurasi untuk redis dan redis-sentinel.
**Konfigurasi Server Redis**
- [**File konfigurasi redis1**](config/redis-1.conf)
```conf
bind 172.16.17.32
port 6379
dir "/etc/redis"
```
- [**File konfigurasi redis2**](config/redis-2.conf)
```conf
bind 172.16.58.3
port 6379
dir "/etc/redis"
slaveof 172.16.17.32 6379
```
- [**File konfigurasi redis3**](config/redis-3.conf)
```conf
bind 172.16.31.10
port 6379
dir "/etc/redis"
slaveof 172.16.17.32 6379
```
**File Konfigurasi Redis Sentinel**
- [**File konfigurasi redis sentinel 1**](config/sentinel-1.conf)
```conf
# Host and port we will listen for requests on
bind 172.16.17.32
port 26379
sentinel monitor redis-cluster 172.16.17.32 6379 2
sentinel down-after-milliseconds redis-cluster 5000
sentinel parallel-syncs redis-cluster 1
sentinel failover-timeout redis-cluster 10000
```
- [**File konfigurasi redis sentinel 2**](config/sentinel-2.conf)
```conf
# Host and port we will listen for requests on
bind 172.16.58.3
port 26379
sentinel monitor redis-cluster 172.16.17.32 6379 2
sentinel down-after-milliseconds redis-cluster 5000
sentinel parallel-syncs redis-cluster 1
sentinel failover-timeout redis-cluster 10000
```
- [**File konfigurasi redis sentinel 3**](config/sentinel-3.conf)
```conf
# Host and port we will listen for requests on
bind 172.16.31.10
port 26379
sentinel monitor redis-cluster 172.16.17.32 6379 2
sentinel down-after-milliseconds redis-cluster 5000
sentinel parallel-syncs redis-cluster 1
sentinel failover-timeout redis-cluster 10000
```
**File Service Redis**
- [**File service redis**](service/redis.service)
```
[Unit]
Description=Redis In-Memory Data Store
After=network.target
[Service]
User=redis
Group=redis
ExecStart=/usr/local/bin/redis-server /etc/redis/redis.conf
ExecStop=/usr/local/bin/redis-cli shutdown
Restart=always
[Install]
WantedBy=multi-user.target
```
**File Service Redis Sentinel**
- [**File service redis sentinel**](service/redis-sentinel.service)
```
[Unit]
Description=Redis Sentinel
After=network.target
[Service]
User=redis
Group=redis
ExecStart=/usr/local/bin/redis-server /etc/redis-sentinel.conf --sentinel
ExecStop=/usr/local/bin/redis-cli shutdown
Restart=always
[Install]
WantedBy=multi-user.target
```
Kemudian kita buat file script untuk nantinya dijalankan pada masing-masing server redis. Kegunaan dari file script ini adalah untuk mengaktifkan service redis dan redis sentinel pada masing-masing server redis.
- [**File script redis 1**](bash/redis1.sh)
- [**File script redis 2**](bash/redis2.sh)
- [**File script redis 3**](bash/redis3.sh)
Contoh file script :
```bash
sudo cp /vagrant/config/redis-1.conf /etc/redis/redis.conf
sudo cp /vagrant/config/sentinel-1.conf /etc/redis-sentinel.conf
sudo cp /vagrant/service/redis.service /etc/systemd/system/redis.service
sudo cp /vagrant/service/redis-sentinel.service /etc/systemd/system/redisentinel.service
sudo adduser --system --group --no-create-home redis
sudo mkdir /var/lib/redis
sudo chown redis:redis /var/lib/redis
sudo chmod 770 /var/lib/redis
sudo systemctl start redis
sudo systemctl status redis
sudo chmod 777 /etc/redis-sentinel.conf
sudo systemctl start redisentinel
sudo systemctl status redisentinel
sudo chmod -R 777 /etc/redis
sudo systemctl restart redis
sudo systemctl status redis
```
Untuk menjalankan file script, lakukan perintah berikut pada server yang bersesuaian.
```
(redis-1) $ sudo bash /vagrant/bash/redis1.sh
```

Jika semua script telah dijalankan pada masing-masing server redis, maka sudah terinstall redis dan redis-sentinel.

Cek pada masing-masing server apakah service redis dan redis sentinel sudah berjalan atau belum.
```
(redis-1) $ sudo systemctl status redis
```

```
(redis-1) $ sudo systemctl status redisentinel
```

## Konfigurasi Web Server
Setelah semua server redis telah dikonfigurasi, sekarang saatnya untuk mengkonfigurasi web server. Nantinya web server akan diinstall Wordpress, sehingga membutuhkan app-stack lain seperti :
- web server
- backend (PHP)
- database
Web server yang digunakan adalah apache2. Kemudian untuk backend menggunakan PHP dan database menggunakan MySQL.
Pada semua web server, yang perlu dilakukan adalah :
### Installasi Apache2
Proses installasi apache2 sudah dilakukan pada saat melakukan provision.
```bash
# Apache Installation
$ sudo apt install apache2 -y
$ sudo ufw allow in "Apache Full"
```

Cek dengan mengetikkan alamat IP web server pada browser. Jika berhasil maka akan muncul halaman awal dari apache.

### Installasi PHP dan PHP Redis Extension
```
$ sudo apt install php libapache2-mod-php php-mysql php-pear php-dev -y
$ sudo a2enmod mpm_prefork && sudo a2enmod php7.2
$ sudo pecl install redis
```


Setelah PHP Redis Extension terinstall, tambahkan perintah **`extension=redis.so`** pada file **`/etc/php/7.2/apache2/php.ini`**.

### Installasi MySQL
MySQL nantinya akan digunakan sebagai database dari WordPress. Untuk melakukan instalasi MySQL, perintahnya adalah sebagai berikut.
```
$ sudo debconf-set-selections <<< 'mysql-server mysql-server/root_password password <PASSWORD>'
$ sudo debconf-set-selections <<< 'mysql-server mysql-server/root_password_again password <PASSWORD>'
$ sudo apt install mysql-server -y
```

Selanjutnya adalah mengkonfigurasi MySQL dan membuat database untuk wordpress. Pertama masuk ke dalam mysql:
```
$ mysql -u root -padmin
```
Berikut adalah syntax konfigurasinya.
```sql
CREATE DATABASE wordpress DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;
CREATE USER 'wordpressuser'@'%' IDENTIFIED BY 'admin';
GRANT ALL PRIVILEGES on wordpress.* to 'wordpressuser'@'%';
FLUSH PRIVILEGES;
```

### Installasi Wordpress
Setelah semua hal yang dibutuhkan terinstall, maka saatnya melakukan instalasi Wordpress. Untuk melakukan instalasi wordpress, lakukan perintah berikut.
```
$ cd /tmp
$ wget -c http://wordpress.org/latest.tar.gz
$ tar -xzvf latest.tar.gz
```

Kemudian langkah selanjutnya adalah mengubah file permission dari file/folder wordpress dan menyalin file konfigurasi wordpress.
```
$ sudo mkdir -p /var/www/wordpress
$ sudo mv wordpress/* /var/www/wordpress
```
Berikut adalah file konfigurasi untuk masing-masing web server:
- [**File config wordpress1**](wordpress_config/wp-config.php)
- [**File config wordpress2**](wordpress_config/wp-config2.php)
Pada file konfigurasi tersebut, diatur nama database sesuai setting mysql sebelumnya.
Contoh pada file [file config wordpress1](wordpress_config/wp-config.php) :

```
$ sudo cp /vagrant/wp-config.php /var/www/wordpress/
$ sudo chown -R www-data:www-data /var/www/wordpress/
$ sudo chmod -R 755 /var/www/wordpress/
$ sudo systemctl restart apache2
```
Instalasi wordpress sudah selesai. Sekarang coba masuk pada wordpress untuk mendaftarkan akun. Pada browser, pergi ke alamat `<IP_web_server>/index.php`.
Ikuti langkah-langkah yang tertera.



Akan muncul halaman awal wordpress

## Mengaktifkan Redis Cache Pada Salah Satu Web Server
Web server yang menggunakan Redis Cache adalah web server dengan alamat IP 192.168.3.11 (wordpress_2). Untuk mengaktifkan Redis Cache pada wordpress, perlu dilakukan instalasi plugin **Redis Object Cache**.
Masuk pada halaman dashboard wordpress pada alamat IP tersebut.

Pada halaman plugin, install plugin **Redis Object Cache**.

Tambahkan perintah berikut pada **`/var/www/wordpress/wp-config.php`**. Perintah berikut digunakan untuk menghubungkan wordpress dengan redis.

> Perintah tersebut harus diletakkan tepat dibawah **`define ('DB_COLLATE', '')`**
Cek pada status Diagnostic **Redis Object Cache** apakah sudah terhubung atau belum. Jika terhubung, maka akan muncul seperti ini.

# <NAME>
## Pengujian Fail-Over
Pengujian fail-over dilakukan dengan mematikan service redis master atau dengan mematikan sementara service redis master. Kali ini, yang akan dilakukan adalah mematikan sementara service redis master.
Pada server redis master, ketikan perintah berikut.
```
$ redis-cli -h 172.16.17.32 -p 6379 DEBUG sleep 60
```

Dengan hal ini, service redi pada server master akan mati selama 60 detik.
### Cek Fail-Over
Untuk memeriksa apakah fail-over berhasil, masuk pada server redis slave (redis_2 dan redis_3), kemudian masuk pada **`redis-cli`** dan mengetikkan perintah `info replication`
_Hasil pada server redis_2_

_Hasil pada server redis_3_

Dapat diperhatikan bahwa, redis_2 lah yang menjadi master. Dapat dilihat pada **`role:master`** di redis_2.
## Pengujian Load Test Menggunakan JMeter
### Pengujian 50 Koneksi

### Pengujian 100 + 20 Koneksi

### Pengujian 200 + 20 Koneksi

### Kesimpulan Hasil Pengujian
Berdasarkan data yang telah didapatkan, didapatkan bahwa wordpress yang menggunakan Redis Object Cache cenderung lebih lambat dari pada yang tidak menggunakan Redis Object Cache. Ini dapat diperhatikan pada hasil `Average` :
- Wordpress Non Redis: 20, 19, 20
- Wordpress Redis: 27, 26, 26
Dapat dilihat bahwa hasil rata-rata untuk tiap pengujian menunjukkan lebih baik apabila tidak menggunakan redis. Kemungkinan, hal ini terjadi karena jumlah permintaan ke server redis cenderung lebih banyak dari pada jumlah permintaan (request) ke mysql sendiri. Sehingga, semakin banyaknya permintaan ke server redis, akan menyebabkan overprocess pada server. Kemungkinan lainnya adalah alokasi RAM untuk server terlalu sedikit (512 MB) yang menurut saya kurang untuk server redis.
Untuk saran kedepannya adalah dengan menambah alokasi RAM maupun Storage penyimpanaan pada redis karena redis merupakan basis data yang bersifat in-memory (menggunakan memory sebagai penyimpanan).
<file_sep>INSERT INTO mysql_servers(hostgroup_id, hostname, port) VALUES (2, '172.16.31.10', 3306);
INSERT INTO mysql_servers(hostgroup_id, hostname, port) VALUES (2, '172.16.31.10', 3306);
INSERT INTO mysql_servers(hostgroup_id, hostname, port) VALUES (2, '192.168.127.12', 3306);
<file_sep>### EAS Basis Data Terdistribusi
# Implementasi TiDB Cluster
**Nama: <NAME>**
**NRP : 05111740000020**
## Daftar Isi
- [A. Deskripsi Tugas](#a-deskripsi-tugas)
+ [Deskripsi Umum](#deskripsi-umum)
+ [Ketentuan Pembagian IP](#ketentuan-pembagian-ip)
- [B. Pembagian Node/Server](#b-pembagian-nodeserver)
- [C. Desain Arsitektur](#c-desain-arsitektur)
- [D. Implementasi](#d-implementasi)
+ [Vagrant](#vagrant)
+ [Script Provision](#script-provision)
+ [Build Vagrant](#build-vagrant)
+ [Konfigurasi Node/Server](#konfigurasi-nodeserver)
- [E. Pemanfaat Aplikasi](#e-pemanfaatan-aplikasi)
+ [Deskripsi Aplikasi](#deskripsi-aplikasi)
+ [Operasi Create](#operasi-create)
+ [Operasi Read](#operasi-read)
+ [Operasi Update](#operasi-update)
+ [Operasi Delete](#operasi-delete)
- [F. Uji Performa](#f-uji-performa)
+ [Uji Performa Database (Sysbench)](#uji-performa-database-sysbench)
+ [Uji Performa Aplikasi (JMeter)](#uji-performa-aplikasi-jmeter)
- [G. Monitoring Grafana dan Prometheus](#g-monitoring-grafana-dan-prometheus)
+ [Instalasi Node Exporter](#instalasi-node-exporter)
+ [Instalasi Prometheus](#instalasi-prometheus)
+ [Instalasi Grafana](#instalasi-grafana)
+ [Konfigurasi Grafana](#konfigurasi-grafana)
+ [Import Dashboard Grafana](#import-dashboard-grafana)
+ [Hasil Dashboard](#hasil-dashboard)
- [H. Uji Failover](#h-uji-failover)
+ [Cek Node Leader](#cek-node-leader)
+ [Mematikan Service Leader](#mematikan-service-leader)
+ [Cek Hasil](#cek-hasil)
## A. Deskripsi Tugas
### Deskripsi Umum
- Mengimplementasikan Arsitektur Sistem Basis Data Terdistribusi menggunakan TiDB (dengan catatan jumlah PD server harus 3).
- Pemanfaatan basis data terdistribusi dalam aplikasi. Aplikasi yang dapat dipilih antara lain :
+ Aplikasi CMS (lakukan instalasi dan konfigurasi)
+ Aplikasi yang dibuat sendiri & menerapkan CRUD (Berbasis web atau API)
- Uji performa aplikasi dan basis data (minimal terdapat 3 variasi):
+ Aplikasi : menggunakan JMeter
+ Basis Data : menggunakan Sysbench
- Menambahkan monitoring dashboard menggunakan Grafana
- Dokumentasi pengerjaan yang berisi :
+ Desain arsitektur (termasuk pembagian IP)
+ Cara penggunaan aplikasi
+ Uji performa
+ Uji fail over
### Ketentuan Pembagian IP
IP pertama cluster menggunakan format A.B.**C**.**D** dimana A dan B bebas, kemudian untuk **C** merupakan **tahun angkatan** dan **D** merupakan **NRP**.
Pada implementasi ini, skema IP yang digunakan adalah sebagai berikut:
- A = 172
- B = 5
- C = 17 (tahun angkatan)
- D = 20 (NRP)
IP: **172.16.17.32**
## B. Pembagian Node/Server
Terdapat 6 node/server yang akan digunakan pada implementasi TiDB cluster kali ini. Detail masing-masing pembagiannya disajikan dalam tabel berikut.
| Node | Sistem Operasi | Memori | Alamat IP | Service |
| ------ | -------------- | ------ | ----------- | ----------------- |
| Node 2 | CentOS 7 | 512 MB | 172.16.17.32 | Node Exporter, TiDB, PD, Grafana dan Prometheus |
| Node 1 | CentOS 7 | 512 MB | 172.16.17.32 | Node Exporter, PD |
| Node 3 | CentOS 7 | 512 MB | 172.16.58.3 | Node Exporter, PD |
| Node 4 | CentOS 7 | 512 MB | 192.168.127.12 | Node Exporter, TiKV |
| Node 5 | CentOS 7 | 512 MB | 172.16.58.3 | Node Exporter, TiKV |
| Node 6 | CentOS 7 | 512 MB | 192.168.3.11 | Node Exporter, TiKV |
## C. Desain Arsitektur
Desain arsitektur yang digunakan dalam implementasi ini adalah sebagai berikut.

Sedangkan service-service yang ada pada tiap node diperlihatkan pada gambar berikut.

## D. Implementasi
### Vagrant
Sebelum memulai mengimplementasikan TiDB cluster, hal pertama yang harus dilakukan adalah membuat konfigurasi file vagrant yang berisi cluster-cluster yang akan digunakan sesuai dengan deskripsi sebelumnya.
```
$ vagrant init
```
Perintah tersebut akan membuatkan file Vagrantfile yang nantinya akan diisi dengan konfigurasi node/server. Konfigurasi file Vagrantfile adalah sebagai berikut.
- [**File vagrant**](VagrantFile)
### Script Provision
File Vagrant tersebut nantinya akan menggunakan provision untuk melakukan installasi service yang dibutuhkan pada node/server. File provision dapat dilihat dalam file berikut.
- [**File provision**](provision/bootstrap.sh)
### Build Vagrant
Setelah file provision dibuat, maka vagrant siap untuk di-build. Build vagrant menggunakan perintah:
```
$ vagrant up --provider virtualbox
```
Seperti inilah proses build vagrant menggunakan file provision yang telah dibuat.
_Proses mengunduh box centos7_
Perintah vagrant up akan menginisiasi building virtual machine dan menjalankan script provision menggunakan box geerlingguy/centos7. Apabila belum terdapat box geerlingguy/centos7, maka vagrant akan otomatis mengunduhnya.

_Proses menyalakan VM_

_Proses provisioning_

### Konfigurasi Node/Server
Setelah semua node/server berhasil di-build, saatnya kini untuk konfigurasi node/server. Masuk ke masing-masing node dengan perintah `vagrant ssh <nama_node>`. Contoh :
```
$ vagrant ssh node1
```
Node 1, 2, dan 3 berperan sebagai Placement Driver. Untuk mengaktifkan service PD pada node-node tersebut, masuk ke dalam direktori instalasi TiDB :
```
$ cd tidb-v3.0-linux-amd64
```
kemudian jalankan perintah berikut:
Pada **Node 1** :
```bash
./bin/pd-server --name=pd1 \
--data-dir=pd \
--client-urls="http://172.16.17.32:2379" \
--peer-urls="http://172.16.17.32:2380" \
--initial-cluster="pd1=http://172.16.17.32:2380,pd2=http://172.16.17.32:2380,pd3=http://172.16.58.3:2380" \
--log-file=pd.log &
```
Pada **Node 2** :
```bash
./bin/pd-server --name=pd2 \
--data-dir=pd \
--client-urls="http://172.16.17.32:2379" \
--peer-urls="http://172.16.17.32:2380" \
--initial-cluster="pd1=http://172.16.17.32:2380,pd2=http://172.16.17.32:2380,pd3=http://172.16.58.3:2380" \
--log-file=pd.log &
```
Pada **Node 3** :
```bash
./bin/pd-server --name=pd3 \
--data-dir=pd \
--client-urls="http://172.16.58.3:2379" \
--peer-urls="http://172.16.58.3:2380" \
--initial-cluster="pd1=http://172.16.17.32:2380,pd2=http://172.16.17.32:2380,pd3=http://172.16.58.3:2380" \
--log-file=pd.log &
```
Sedangkan, node 4, 5, dan 6 akan berperan sebagai Server TiKV. Untuk mengaktifkan service TiKV pada node-node tersebut, masuk ke dalam direktori instalasi TiDB :
```
[node4]$ cd tidb-v3.0-linux-amd64
```
kemudian jalankan perintah berikut :
Pada **Node 4** :
```bash
./bin/tikv-server --pd="172.16.17.32:2379,172.16.17.32:2379,172.16.58.3:2379" \
--addr="192.168.127.12:20160" \
--data-dir=tikv \
--log-file=tikv.log &
```
Pada **Node 5** :
```bash
./bin/tikv-server --pd="172.16.17.32:2379,172.16.17.32:2379,172.16.58.3:2379" \
--addr="172.16.58.3:20160" \
--data-dir=tikv \
--log-file=tikv.log &
```
Pada **Node 6** :
```bash
./bin/tikv-server --pd="172.16.17.32:2379,172.16.17.32:2379,172.16.58.3:2379" \
--addr="192.168.3.11:20160" \
--data-dir=tikv \
--log-file=tikv.log &
```
Service TiDB server sendiri akan diletakkan pada node 1. Untuk mengaktifkannya, pada node 1 (pada direktori yang sama), jalankan perintah berikut.
**Node 1**:
```bash
./bin/tidb-server --store=tikv \
--path="172.16.17.32:2379" \
--log-file=tidb.log &
```
## E. Pemanfaatan Aplikasi
### Deskripsi Aplikasi
Aplikasi yang akan digunakan untuk penerapan TiDB cluster adalah aplikasi web sederhana yang dibuat menggunakan framework laravel. Fungsi dari aplikasi ini adalah untuk menyimpan blog-blog pribadi atau notes yang digunakan untuk keperluan pribadi saja. Cukup sederhana untuk menggambarkan bagaimana database bekerja pada web.
Aplikasi ini mencakup operasi-operasi basis data dasar seperti Create, Read, Update, dan Delete. Berikut adalah beberapa screenshot tampilan web tersebut.


### Konfigurasi Laravel
Hal yang perlu dikonfigurasi pada laravel adalah koneksi databasenya. Pada file **`.env`** atur agar databse connection mengarah pada IP node1 dan pada port 4000. Sesuaikan nama databasenya.

Kemudian melakukan migrasi database.
```php
php artisan migrate
```
### Operasi Create
Operasi **CREATE** yang diperlihatkan disini adalah untuk menambahkan post baru. Tampilan formnya seperti gambar di bawah ini.

Ketika berhasil disimpan, maka akan muncul pemberitahuan seperti dibawah.

### Operasi Read
Kemudian, untuk operasi **READ**, yang dilakukan adalah untuk menampilkan post yang telah dibuat sebelumnya. Tampilannya seperti di bawah ini.

### Operasi Update
Selanjutnya, operasi **UPDATE** yang dilakukan adalah dengan mengedit isi dari post sebelumnya yang telah dibuat. Misalkan, yang diubah adalah isi postnya. Tampilan formnya seperti gambar di bawah ini.

Ketika berhasil disimpan, maka akan muncul pemberitahuan seperti dibawah.

### Operasi Delete
Operasi **DELETE** yang dilakukan adalah dengan menghapus post sebelumnya. Saat hendak menghapus, akan ada peringatan seperti gambar di bawah.

Ketika berhasil dihapus, maka akan muncul pemberitahuan seperti dibawah.

## F. Uji Performa
### Uji Performa Database (Sysbench)
Pengujian performa dari database dilakukan dengan menggunakan sysbench. Variasi pengujian yang diterapkan adalah dengan jumlah PD cluster (1 PD cluster, 2 PD cluster, 3 PD cluster).
**Instalasi Sysbench**
- Instal sysbench pada node PD cluster manapun dengan menjalankan perintah berikut.
```
$ curl -s https://packagecloud.io/install/repositories/akopytov/sysbench/script.rpm.sh | sudo bash
$ sudo yum -y install sysbench
```
- Langkah selanjutnya adalah dengan mendownload file konfigurasi beserta aplikasi untuk testing tidb. Clone github :
```
$ git clone https://github.com/pingcap/tidb-bench.git
```
**Pengujian**
- Hal pertama yang perlu dilakukan adalah mengedit file konfigurasi agar sesuai dengan koneksi tidb pada node1. Edit file konfigurasi pada file `config`.
```
$ cd tidb-bench/sysbench
$ nano config
```
Buat agar menjadi seperti gambar di bawah:

- Kemudian, langkah selanjutnya adalah membuat database `sbtest` pada tidb (**Node1**). Masuk pada service mysql untuk membuat database `sbtest`.
```
$ mysql -h 172.16.17.32 -P 4000 -u root
```
```
mysql[node1]> CREATE DATABASE sbtest;
```
- Lakukan persiapan pengujian menjalankan perintah :
```
$ ./run.sh point_select prepare 100
```
- Kemudian jalankan pengujian dan tunggu hingga proses selesai.
```
$ ./run.sh point_select run 100
```
- Hasil pengujian dapat dilihat pada file log yang dihasilkan dari menjalankan perintah `run`.
**Hasil Pengujian**
Dari pengujian menggunakan sysbench pada variasi jumlah PD cluster, didapatkan hasil sebagai berikut.
| Jumlah PD Cluster | Rata-rata latency (ms) | Total query (per detik) |
| ----------------- | ---------------------- | ----------------------- |
| 1 PD Cluster | 19.31 ms | 5172.43 query |
| 2 PD Cluster | 14.80 ms | 6756.76 query |
| 3 PD Cluster | 10.67 ms | 9369.73 query |
Hasil pengujian dapat dilihat pada file berikut.
- [Hasil 1 PD Cluster](sysbench_res/1_PD.log)
- [Hasil 2 PD Cluster](sysbench_res/2_PD.log)
- [Hasil 3 PD Cluster](sysbench_res/3_PD.log)
**Kesimpulan**
Hasil pengujian hanya dengan melihat parameter rata-rata latency dan total query per detik, menunjukkan bahwa semakin banyak PD cluster yang digunakan, maka performa database akan semakin lebih cepat. Walaupun perubahan tersebut tidak terlalu signifikan.
### Uji Performa Aplikasi (JMeter)
Pengujian performa aplikasi dilakukan dengan menggunakan bantuan aplikasi JMeter. Variasi koneksi yang digunakan adalah 100 koneksi, 500 koneksi dan 1000 koneksi. Berikut adalah hasil dari pengujian menggunakan masing-masing koneksi.
**Uji 100 Koneksi**

**Uji 500 Koneksi**

**Uji 1000 Koneksi**

## G. Monitoring Grafana dan Prometheus
### Instalasi Node Exporter
- Hal pertama yang perlu dilakukan adalah menginstal node exporter. Masuk ke dalam semua node (node1 - node6), kemudian masuk pada direktori node exporter (`cd node_exporter-0.18.1.linux-amd64`). Jalankan perintah berikut.
```bash
./node_exporter --web.listen-address=":9100" \
--log.level="info" &
```

### Instalasi Prometheus
- Download binary dari prometheus pada **Node1**, dan extract file tersebut.
```bash
$ wget https://github.com/prometheus/prometheus/releases/download/v2.2.1/prometheus-2.2.1.linux-amd64.tar.gz
$ tar -xzf prometheus-2.2.1.linux-amd64.tar.gz
```
- Edit file konfigurasi pada file **prometheus.yml**. Sesuaikan isinya seperti di bawah.
```yml
global:
scrape_interval: 15s # By default, scrape targets every 15 seconds.
evaluation_interval: 15s # By default, scrape targets every 15 seconds.
# scrape_timeout is set to the global default value (10s).
external_labels:
cluster: 'test-cluster'
monitor: "prometheus"
scrape_configs:
- job_name: 'overwritten-nodes'
honor_labels: true # Do not overwrite job & instance labels.
static_configs:
- targets:
- '172.16.17.32:9100'
- '172.16.17.32:9100'
- '172.16.58.3:9100'
- '192.168.127.12:9100'
- '172.16.58.3:9100'
- '192.168.3.11:9100'
- job_name: 'tidb'
honor_labels: true # Do not overwrite job & instance labels.
static_configs:
- targets:
- '172.16.17.32:10080'
- job_name: 'pd'
honor_labels: true # Do not overwrite job & instance labels.
static_configs:
- targets:
- '172.16.17.32:2379'
- '172.16.17.32:2379'
- '172.16.58.3:2379'
- job_name: 'tikv'
honor_labels: true # Do not overwrite job & instance labels.
static_configs:
- targets:
- '192.168.127.12:20180'
- '172.16.58.3:20180'
- '192.168.3.11:20180'
```
+ [**File prometheus.yml**](prometheus/prometheus.yml)
- Jalankan service prometheus dengan menjalankan perintah di bawah.
```bash
prometheus
./prometheus \
--config.file="./prometheus.yml" \
--web.listen-address=":9090" \
--web.external-url="http://172.16.17.32:9090/" \
--web.enable-admin-api \
--log.level="info" \
--storage.tsdb.path="./data.metrics" \
--storage.tsdb.retention="15d" &
```
### Instalasi Grafana
- Untuk menginstal grafana, download binary dan extract file tersebut (pada **Node1**).
```bash
$ wget https://dl.grafana.com/oss/release/grafana-6.5.1.linux-amd64.tar.gz
$ tar -zxf grafana-6.5.1.linux-amd64.tar.gz
```
- Lakukan konfigurasi grafana pada file `grafana.ini`
```
$ cd grafana-6.5.1
$ nano conf/grafana.ini
```
Isi file **`grafana.ini`**
```ini
[paths]
data = ./data
logs = ./data/log
plugins = ./data/plugins
[server]
http_port = 3000
domain = 172.16.17.32
[database]
[session]
[analytics]
check_for_updates = true
[security]
admin_user = admin
admin_password = <PASSWORD>
[snapshots]
[users]
[auth.anonymous]
[auth.basic]
[auth.ldap]
[smtp]
[emails]
[log]
mode = file
[log.console]
[log.file]
level = info
format = text
[log.syslog]
[event_publisher]
[dashboards.json]
enabled = false
path = ./data/dashboards
[metrics]
[grafana_net]
url = https://grafana.net
```
- Jalankan service grafana dengan menjalankan perintah berikut.
```bash
./bin/grafana-server \
--config="./conf/grafana.ini" &
```
### Konfigurasi Grafana
- Langkah selanjutnya adalah mengkonfigurasi grafana. Akses pada browser dengan URL **`http://[IP-node1]:3000`**. Contoh URL **`http://172.16.17.32:3000`**. Akan muncul halaman login seperti gambar di bawah.

- Masuk dengan username **`admin`** dan password **`<PASSWORD>`**. Setelah masuk, akan ada halaman dashboard untuk mengelola grafana. Tambahkan dashboard baru (lihat gambar di bawah).

- Pilih **Prometheus** sebagai servicenya.

- Kemudian atur agar terhubung pada IP node1 port 9090. Simpan dan test connection.

### Import Dashboard Grafana
- Kemudian import dashboard grafana untuk menjalankan monitoring. Pada halaman awal, masuk ke **Manage Dashboard**

- Akan muncul halaman seperti gambar di bawah. Tekan import untuk mulai mengimpor.

- Dashboard diimport menggunakan file JSON yang dapat diperoleh dari [link berikut](https://github.com/pingcap/tidb-ansible/tree/master/scripts). Pada implementasi ini, terdapat lima dashboard yang digunakan, yakni :
+ [pd.json](prometheus/pd.json)
+ [tidb_summary.json](prometheus/tidb_summary.json)
+ [tidb.json](prometheus/tidb.json)
+ [tikv_details.json](prometheus/tikv_details.json)
+ [tikv_summary.json](prometheus/tikv_summary.json)
### Hasil Dashboard




## H. Uji Failover
### Cek Node Leader
Untuk melakukan proses failover, kita perlu tahu terlebih dahulu siapakah yang menjadi node leader saat ini. Hal ini dapat diketahui dengan menjalankan perintah sebagai berikut pada sembarang node Placement Driver (Node1, Node2, Node3).
Misalkan pada **node1**:
```
$ curl http://172.16.17.32:2739/pd/api/v1/members
```
maka akan muncul informasi yang berisi detail member cluster dan node yang menjadi leader saat ini. Gambar pertama menunjukkan informasi member PD cluster.

Sedangkan gambar kedua menunjukkan node leader saat ini. Dapat dilihat bahwa saat ini, **node yang menjadi leader adalah node 2**.

### Mematikan Service Leader
Langkah selanjutnya adalah dengan mematikan service pada leader. Karena leader saat ini adalah node 2, maka service PD dimatikan pada node 2. Untuk mematikan service dilakukan dengan _kill_ proses dari PD cluster.
- Temukan PID dari proses service PD.
```
[node2]$ ps -aux | grep pd
```

- Matikan (kill) proses tersebut
```
[node2]$ sudo kill <PID>
```
### Cek Hasil
Setelah proses servis PD cluser pada node 2 dimatikan, maka seharusnya sekarang leadernya sudah berganti. Untu memeriksanya, masuklah pada node PD cluster lain dengan menjalankan perintah yang sama.
```
$ curl http://172.16.17.32:2739/pd/api/v1/members
```
Hasilnya seperti gambar di bawah. Terlihat sekarang bahwa leader PD cluster sekarang dipegang oleh node 3. Dengan demikian proses uji failover telah berhasil dilakukan.
<file_sep># Copy the hosts list
sudo cp /vagrant/sources/hosts /etc/hosts
# Copy the new source list
sudo cp '/vagrant/sources/sources.list' '/etc/apt/'
# Update package
sudo apt update -y
# Install Redis and JeMalloc
sudo apt-get install build-essential tcl -y
sudo apt-get install libjemalloc-dev -y
curl -O http://download.redis.io/redis-stable.tar.gz
tar xzvf redis-stable.tar.gz
cd redis-stable
make
sudo make install
#Allow Port
sudo ufw allow 6379
sudo ufw allow 26379
# Create redis directory
sudo mkdir /etc/redis
<file_sep>SET GLOBAL group_replication_bootstrap_group=ON;
START GROUP_REPLICATION;
SET GLOBAL group_replication_bootstrap_group=OFF;
<file_sep># -*- mode: ruby -*-
# vi: set ft=ruby :
# All Vagrant configuration is done below. The "2" in Vagrant.configure
# configures the configuration version (we support older styles for
# backwards compatibility). Please don't change it unless you know what
# you're doing.
Vagrant.configure("2") do |config|
# Configuration for Wordpress Server
(1..2).each do |i|
config.vm.define "wordpress_#{i}" do |node|
node.vm.hostname = "wordpress-#{i}"
node.vm.box = "bento/ubuntu-18.04"
node.vm.network "private_network", ip: "172.5.17.#{19+i}"
node.vm.provider "virtualbox" do |vb|
vb.name = "wordpress-#{i}"
vb.gui = false
vb.memory = "512"
end
node.vm.provision "shell", path: "provision/wordpress_provision.sh", privileged: false
end
end
# Configuration for Redis Server
(1..3).each do |i|
config.vm.define "redis_#{i}" do |node|
node.vm.hostname = "redis-#{i}"
node.vm.box = "bento/ubuntu-18.04"
node.vm.network "private_network", ip: "172.5.17.#{21+i}"
node.vm.provider "virtualbox" do |vb|
vb.name = "redis-#{i}"
vb.gui = false
vb.memory = "512"
end
node.vm.provision "shell", path: "provision/redis_provision.sh", privileged: false
end
end
end<file_sep>sudo cp /vagrant/config/redis-1.conf /etc/redis/redis.conf
sudo cp /vagrant/config/sentinel-1.conf /etc/redis-sentinel.conf
sudo cp /vagrant/service/redis.service /etc/systemd/system/redis.service
sudo cp /vagrant/service/redis-sentinel.service /etc/systemd/system/redisentinel.service
sudo adduser --system --group --no-create-home redis
sudo mkdir /var/lib/redis
sudo chown redis:redis /var/lib/redis
sudo chmod 770 /var/lib/redis
sudo systemctl start redis
sudo systemctl status redis
sudo chmod 777 /etc/redis-sentinel.conf
sudo systemctl start redisentinel
sudo systemctl status redisentinel
sudo chmod -R 777 /etc/redis
sudo systemctl restart redis
sudo systemctl status redis<file_sep># Script API Python using Flask
# and flask_pymongo
from bson.json_util import dumps
from bson.objectid import ObjectId
from flask import Flask
from flask_pymongo import PyMongo
from flask import jsonify, request
app = Flask(__name__)
app.secret_key = "SECRETKEY"
app.config["MONGO_URI"] = "mongodb://mongo-admin:<PASSWORD>@192.168.33.16:27017/populationData?retryWrites=false&authSource=admin"
mongo = PyMongo(app)
# Read Operation
@app.route('/populationHealthMeasure', methods=['GET'])
def get_data():
datas = mongo.db.populationCollection.find()
response = dumps(datas)
return response
# Insert Operation
@app.route('/populationHealthMeasure', methods=['POST'])
def add_data():
request_json = request.json
data_uniqueid = request_json["unique_id"]
data_3Year = request_json["3_year_period"]
data_deathCause = request_json["cause_of_death"]
data_Race = request_json["race"]
data_HispanicLatino = request_json["hispanic_or_latino"]
data_gender = request_json["gender"]
data_ageAdjusted = request_json["age-adjusted_rate"]
data_lowerConfidenceInterval = request_json["lower_confidence_interval"]
data_upperConfidenceInterval = request_json["upper_confidence_interval"]
data_id = mongo.db.populationCollection.insert({
'unique_id' : data_uniqueid,
'3_year_period' : data_3Year,
'cause_of_death' : data_deathCause,
'race' : data_Race,
'hispanic_or_latino' : data_HispanicLatino,
'gender' : data_gender,
'age-adjusted_rate' : data_ageAdjusted,
'lower_confidence_interval' : data_lowerConfidenceInterval,
'upper_confidence_interval' : data_upperConfidenceInterval
})
response = jsonify('Data has added successfully. The id is {}'.format(data_id))
response.status_code = 200
return response
# Update Operation
@app.route('/populationHealthMeasure/<id>', methods=['PUT'])
def update_data(id):
request_json = request.json
data_id = request_json["_id"]
data_uniqueid = request_json["unique_id"]
data_3Year = request_json["3_year_period"]
data_deathCause = request_json["cause_of_death"]
data_Race = request_json["race"]
data_HispanicLatino = request_json["hispanic_or_latino"]
data_gender = request_json["gender"]
data_ageAdjusted = request_json["age-adjusted_rate"]
data_lowerConfidenceInterval = request_json["lower_confidence_interval"]
data_upperConfidenceInterval = request_json["upper_confidence_interval"]
mongo.db.populationCollection.update_one(
{'_id': ObjectId(data_id['$oid']) if '$oid' in data_id else ObjectId(data_id)},
{
'$set' :
{
'unique_id' : data_uniqueid,
'3_year_period' : data_3Year,
'cause_of_death' : data_deathCause,
'race' : data_Race,
'hispanic_or_latino' : data_HispanicLatino,
'gender' : data_gender,
'age-adjusted_rate' : data_ageAdjusted,
'lower_confidence_interval' : data_lowerConfidenceInterval,
'upper_confidence_interval' : data_upperConfidenceInterval
}
}
)
response = jsonify('Data has updated successfully. The id is {}'.format(data_id))
response.status_code = 200
return response
# Delete Operation
@app.route('/populationHealthMeasure/<id>', methods=['DELETE'])
def delete_data(id):
mongo.db.populationCollection.delete_one({
'_id': ObjectId(id)
})
response = jsonify('Data has deleted successfully. The id is {}'.format(id))
response.status_code = 200
return response
# Aggregation Operation
@app.route('/populationHealthMeasure/cause_of_death', methods=['GET'])
def get_cause_of_death():
result = mongo.db.populationCollection.aggregate([
{
"$group": {
"_id": "$cause_of_death",
"count": {"$sum": 1}
}
}
])
response = dumps(result)
return response
# Aggregation Operation
@app.route('/populationHealthMeasure/max_upper_confidence', methods=['GET'])
def get_max_confidence():
result = mongo.db.populationCollection.aggregate([
{
"$group": {
"_id": "$cause_of_death",
"minimum": {"$min": "$upper_confidence_interval"}
}
}
])
response = dumps(result)
return response
if __name__ == "__main__":
app.run() | e0bad88100d50fa399443fbab17e538dc748b4bc | [
"SQL",
"Ruby",
"Markdown",
"Python",
"Shell"
] | 12 | SQL | bayulaxana/Basis_Data_Terdistribusi | 42510222834f68b6ea68ce0e3df1b8800643d342 | 72d3fac6b05f311b53b8b9772495497598a99b51 | |
refs/heads/master | <repo_name>shiwangi20/Milkman-Assistant<file_sep>/src/connection/MySqlConnection.java
package connection;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
public class MySqlConnection {
public static Connection doConnect()
{
Connection con=null;
try {
Class.forName("com.mysql.jdbc.Driver");
System.out.println("Loaded");
con=DriverManager.getConnection("jdbc:mysql://localhost/javaproject","root","<PASSWORD>");
System.out.println("Connected");
} catch (ClassNotFoundException | SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return con;
}
public static void main(String args[])
{
doConnect();
}
}
<file_sep>/src/billCollection/BillCollectionViewController.java
package billCollection;
import sms.SendBill;
import java.net.URL;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.ResourceBundle;
import connection.MySqlConnection;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.ComboBox;
import javafx.scene.control.TextField;
public class BillCollectionViewController {
@FXML
private ResourceBundle resources;
@FXML
private URL location;
@FXML
private ComboBox<Integer> month;
@FXML
private ComboBox<Integer> year;
@FXML
private TextField txtmobile;
@FXML
private TextField txtBill;
@FXML
void doGetStatus(ActionEvent event) {
Alert alert =new Alert(AlertType.INFORMATION);
alert.setTitle("Bill Status");
if(month.getSelectionModel().isEmpty() || year.getSelectionModel().isEmpty() || txtmobile.getText().equals(""))
{
alert.setContentText("Please select year, month and mobile number");
alert.show();
}
else{
try {
pst=con.prepareStatement("select * from billhistory where mobile=? and month=? and year=?");
pst.setString(1, txtmobile.getText());
pst.setInt(2, month.getValue());
pst.setInt(3, year.getValue());
ResultSet rs =pst.executeQuery();
rs.next();
if(rs.getInt("Status")==0)
{
txtBill.setText(String.valueOf(rs.getFloat("Total")));
}
else
{
alert.setContentText("No pending Bill");
alert.show();
}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
@FXML
void doUpdateStatus(ActionEvent event) {
Alert alert = new Alert(AlertType.INFORMATION);
alert.setTitle("Updating Customer Status");
if(month.getSelectionModel().isEmpty() || year.getSelectionModel().isEmpty() || txtmobile.getText().equals(""))
{
alert.setContentText("Please select year, month and mobile number");
alert.show();
}
else{
try {
pst=con.prepareStatement("update billhistory set Status=? where mobile=? and month=? and year=?");
pst.setInt(1, 1);
pst.setString(2, txtmobile.getText());
pst.setInt(3, month.getValue());
pst.setInt(4, year.getValue());
pst.executeUpdate();
alert.setContentText("Status Updated");
SendBill.sendMessage(txtmobile.getText(), "Payment of month :"+month.getValue()+" is received");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
alert.setContentText("Error Occured in Updating Status");
}
alert.show();
}
}
Connection con;
PreparedStatement pst;
@FXML
void initialize() {
con=MySqlConnection.doConnect();
ArrayList<Integer> months = new ArrayList<Integer>(Arrays.asList(1,2,3,4,5,6,7,8,9,10,11,12));
ArrayList<Integer> years = new ArrayList<Integer>(Arrays.asList(2017,2018,2019,2020,2021,2022));
month.getItems().addAll(months);
year.getItems().addAll(years);
}
}
<file_sep>/README.md
# Milkman-Assistant
This will help milkman keeping records of his customers and sending their monthly bills via sms.
| 5c4b60ff5fa043a7e48ae154edd849ec564a0ea4 | [
"Markdown",
"Java"
] | 3 | Java | shiwangi20/Milkman-Assistant | f7b5bece0718f654ac11c33513acf7bfd3701989 | 768c846e4e6890d7c39f0fdbc85b86dac950cd76 | |
refs/heads/master | <file_sep>FROM centos:7.6.1810
ARG OS_VERSION
ARG REPO
ARG DBEAVER_VERSION
ARG GIT_VERSION
ARG JAVA_VERSION
ARG MAVEN_VERSION
ARG PYTHON_VERSION
ENV LANG=ja_JP.UTF-8
ENV DISPLAY=:0.0
RUN yum install -y git
RUN cd /usr/local/src && git clone https://github.com/ukijumotahaneniarukenia/$REPO.git
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-docker-host-user.sh | bash
RUN cd /usr/local/src/$REPO && echo './$OS_VERSION-install-default-user.sh 1000 oracle 1000 oracle' | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-repository-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-dev-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-tool-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-network-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-system.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-python-$PYTHON_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-git-$GIT_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-locale.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-ld.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-ibus_mozc.sh | bash
#RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-fcitx_anthy.sh | bash
COPY subrun.sh /root/subrun.sh
#RUN bash /root/subrun.sh
ENV ORACLE_DOCKER_INSTALL=true
ENV ORACLE_SID=ORCLCDB
ENV ORACLE_HOME=/opt/oracle/product/19c/dbhome_1
ENV PATH=$PATH:$ORACLE_HOME/bin
RUN cd /usr/local/src && curl -sSLO https://yum.oracle.com/repo/OracleLinux/OL7/latest/x86_64/getPackage/oracle-database-preinstall-19c-1.0-1.el7.x86_64.rpm
RUN cd /usr/local/src && yum -y localinstall oracle-database-preinstall-19c-1.0-1.el7.x86_64.rpm && rm oracle-database-preinstall-19c-1.0-1.el7.x86_64.rpm
COPY oracle-database-ee-19c-1.0-1.x86_64.rpm /root/oracle-database-ee-19c-1.0-1.x86_64.rpm
RUN yum -y localinstall /root/oracle-database-ee-19c-1.0-1.x86_64.rpm
#起動スクリプトの微修正
RUN sed -i '34i export PASSWORD=<PASSWORD>' /etc/init.d/oracledb_ORCLCDB-19c
RUN sed -i '165s;^;#;' /etc/init.d/oracledb_ORCLCDB-19c
RUN sed -i '166i $SU -s /bin/bash $ORACLE_OWNER -c "$DBCA -silent -createDatabase -gdbName $ORACLE_SID -templateName $TEMPLATE_NAME -characterSet $CHARSET -createAsContainerDatabase $CREATE_AS_CDB -createListener $LISTENER_NAME:$LISTENER_PORT -datafileDestination $ORACLE_DATA_LOCATION -sid $ORACLE_SID -sysPassword $PASSWORD -systemPassword $PASSWORD -emConfiguration DBEXPRESS -emExpressPort $EM_EXPRESS_PORT"' /etc/init.d/oracledb_ORCLCDB-19c
#パスワード変更
RUN echo 'oracle:oracle_pwd' | chpasswd
#準備が大変だな
COPY create_user.sql /home/oracle/create_user.sql
COPY glogin.sql /opt/oracle/product/19c/dbhome_1/sqlplus/admin/glogin.sql
COPY init_db.sh /home/oracle/init_db.sh
COPY launch_and_open_db.sql /home/oracle/launch_and_open_db.sql
RUN chown oracle:oinstall /home/oracle/* && chmod +x /home/oracle/*
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-dbeaver-$DBEAVER_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-jdk-$JAVA_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-maven-$MAVEN_VERSION.sh | bash
USER oracle
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
WORKDIR /home/oracle
EXPOSE 1521
COPY run.sh /etc/init/run.sh
ENTRYPOINT ["/etc/init/run.sh","centos"]
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
<file_sep>- https://developer.android.com/studio/workflow
- https://developer.android.com/studio/run/managing-avds#createavd
- https://developer.android.com/training/basics/firstapp/running-app
<file_sep>#!/bin/bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
ENV_REPO=$1;shift
[ -z $ENV_REPO ] && usage
while read tgt;do
while read nnn;do
echo $tgt/$nnn
done < <(find $HOME/$ENV_REPO/ubuntu-16-04-vim -name "env-*" | perl -pe 's;.*/;;g') #環境ディレクトリの中から代表1つチョイス
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log') | perl -pe 's;.*/;;g' | sort | uniq -c
<file_sep># 参考文献
https://www.itmedia.co.jp/enterprise/articles/1604/27/news001.html
# keyboard
https://wiki.archlinux.jp/index.php/%E3%82%B3%E3%83%B3%E3%82%BD%E3%83%BC%E3%83%AB%E3%81%A7%E3%81%AE%E3%82%AD%E3%83%BC%E3%83%9C%E3%83%BC%E3%83%89%E8%A8%AD%E5%AE%9A
```
[aine💜centos (土 10月 05 10:58:34) ~]$cat /etc/vconsole.conf
KEYMAP=/usr/share/ibus/keymaps/jp
FONT=latarcyrheb-sun16
[aine💜centos (土 10月 05 10:36:30) ~]$cat /usr/share/ibus/keymaps/jp
include common
shift keycode 2 = exclam
shift keycode 3 = quotedbl
shift keycode 4 = numbersign
shift keycode 5 = dollar
shift keycode 6 = percent
shift keycode 7 = ampersand
shift keycode 8 = apostrophe
shift keycode 9 = parenleft
shift keycode 10 = parenright
shift keycode 11 = asciitilde
keycode 12 = minus
shift keycode 12 = equal
keycode 13 = asciicircum
shift keycode 13 = asciitilde
keycode 26 = at
shift keycode 26 = grave
keycode 27 = bracketleft
shift keycode 27 = braceleft
keycode 39 = semicolon
shift keycode 39 = plus
keycode 40 = colon
shift keycode 40 = asterisk
keycode 41 = Zenkaku_Hankaku
keycode 43 = bracketright
shift keycode 43 = braceright
keycode 51 = comma
shift keycode 51 = less
keycode 52 = period
shift keycode 52 = greater
keycode 53 = slash
shift keycode 53 = question
shift keycode 58 = Eisu_toggle
keycode 89 = backslash
shift keycode 89 = underscore
keycode 92 = Henkan_Mode
keycode 93 = Hiragana_Katakana
keycode 94 = Muhenkan
keycode 124 = yen
shift keycode 124 = bar
keycode 122 = Hangul
keycode 123 = Hangul_Hanja
shift keycode 84 = Execute
keycode 112 = Katakana
```
# startxコマンド
起動手順記載あり
https://www.ibm.com/support/knowledgecenter/ja/ssw_aix_72/s_commands/startx.html
http://x68000.q-e-d.net/~68user/unix/pickup?startx
読み込むファイル
startx コマンド内の xinit コマンドは、.Xdefaults ファイルをロードしようとします。
# x環境でじゃんはい表示するヒント
https://unix.stackexchange.com/questions/196152/xterm-not-displaying-unicode
# xterm build手順
http://lfsbookja.osdn.jp/BLFS/svn-ja/x/xterm.html
xtermで起動フォント確認やっとできた
Rictyフォントは全角スペースが可視化される
https://github.com/metalefty/Ricty/blob/master/README.md

```
[root♥12f7ecdc2a7e (火 10月 01 09:43:30) /]$fc-list | grep Ri
/usr/share/fonts/RictyDiminished/RictyDiminished-Oblique.ttf: Ricty Diminished:style=Oblique
/usr/share/fonts/RictyDiminished/RictyDiminished-Regular.ttf: Ricty Diminished:style=Regular
/usr/share/fonts/RictyDiminished/RictyDiminished-Bold.ttf: Ricty Diminished:style=Bold
/usr/share/fonts/RictyDiminished/RictyDiminishedDiscord-Bold.ttf: Ricty Diminished Discord:style=Bold
/usr/share/fonts/RictyDiminished/RictyDiminishedDiscord-BoldOblique.ttf: Ricty Diminished Discord:style=Bold Oblique
/usr/share/fonts/RictyDiminished/RictyDiminishedDiscord-Regular.ttf: Ricty Diminished Discord:style=Regular
/usr/share/fonts/RictyDiminished/RictyDiminishedDiscord-Oblique.ttf: Ricty Diminished Discord:style=Oblique
/usr/share/fonts/RictyDiminished/RictyDiminished-BoldOblique.ttf: Ricty Diminished:style=Bold Oblique
[root♥12f7ecdc2a7e (火 10月 01 09:44:38) /]$xterm -fa 'Ricty Diminished:style=Regular'
```
xtermをソースコードからビルド
https://gist.github.com/dichotomies/6b41496e23bbb89d461e23f8b9b0c262
UnicodeとXterm
https://linuxjf.osdn.jp/JFdocs/Unicode-HOWTO-3.html
xフォント関連コマンド
http://www.yam-web.net/c-programing/font/index.html
https://orebibou.com/2017/01/centosubuntu%E3%81%A7%E3%83%95%E3%82%A9%E3%83%B3%E3%83%88%E3%82%92%E8%BF%BD%E5%8A%A0%E3%81%99%E3%82%8B%E6%96%B9%E6%B3%95/
Xアプリでのフォントの設定
http://www.momonga-linux.org/docs/TTF-HOWTO/ja/fontpath.html
貴重なXtermの参考文献
https://invisible-island.net/xterm/xterm.html
Xのデフォルトの設定ファイル
http://watermans-linuxtips.blogspot.com/2009/02/centosx.html?m=1
Xフォント
ArchLinuxは偉大
https://bbs.archlinux.org/viewtopic.php?id=167204
https://l-w-i.net/t/x/font_101.txt
```
[rstudio💓2de5d80db52a (月 9月 30 09:40:49) /]$cd /usr/share/fonts/vlgothic/
[rstudio💓2de5d80db52a (月 9月 30 09:42:04) /usr/share/fonts/vlgothic]$ll
total 8092
-rw-r--r--. 1 root root 4111416 6月 7 2013 VL-Gothic-Regular.ttf
-rw-r--r--. 1 root root 4171796 6月 7 2013 VL-PGothic-Regular.ttf
[rstudio💓2de5d80db52a (月 9月 30 09:42:06) /usr/share/fonts/vlgothic]$which mkfontscale
/usr/bin/mkfontscale
[rstudio💓2de5d80db52a (月 9月 30 09:42:43) /usr/share/fonts/vlgothic]$which mkfontdir
/usr/bin/mkfontdir
[rstudio💓2de5d80db52a (月 9月 30 09:42:52) /usr/share/fonts/vlgothic]$sudo mkfontscale
[rstudio💓2de5d80db52a (月 9月 30 09:42:59) /usr/share/fonts/vlgothic]$ll
total 8096
-rw-r--r--. 1 root root 4111416 6月 7 2013 VL-Gothic-Regular.ttf
-rw-r--r--. 1 root root 4171796 6月 7 2013 VL-PGothic-Regular.ttf
-rw-r--r--. 1 root root 2404 9月 30 09:42 fonts.scale
[rstudio💓2de5d80db52a (月 9月 30 09:43:02) /usr/share/fonts/vlgothic]$sudo mkfontdir
[rstudio💓2de5d80db52a (月 9月 30 09:43:08) /usr/share/fonts/vlgothic]$ll
total 8100
-rw-r--r--. 1 root root 4111416 6月 7 2013 VL-Gothic-Regular.ttf
-rw-r--r--. 1 root root 4171796 6月 7 2013 VL-PGothic-Regular.ttf
-rw-r--r--. 1 root root 2404 9月 30 09:43 fonts.dir
-rw-r--r--. 1 root root 2404 9月 30 09:42 fonts.scale
```
このコマンドでフォント遊べる
```
[root💛2de5d80db52a (月 9月 30 09:12:56) /]$/usr/bin/xfontsel
```
おためしようフォント
https://github.com/tomokuni/Myrica
Xtermの設定ファイル漁るgit
https://github.com/jfoscarini/.Xresources/blob/master/.Xresources
絵文字の表示幅
https://github.com/hamano/locale-eaw
ibusやりとり
https://github.com/ibus/ibus/issues/2005
https://github.com/ibus/ibus/issues/385
coredump味方
http://rabbitfoot141.hatenablog.com/entry/2016/11/14/153101
iBus最新
https://www.clear-code.com/blog/2018/7/25.html
iBus
https://wiki.archlinux.jp/index.php/IBus#.E5.88.9D.E6.9C.9F.E8.A8.AD.E5.AE.9A
日本語入力
https://sites.google.com/site/voidlinuxmemo/ri-ben-yu-huan-jing-1#TOC-mozc-
フォントキャッシュ作成する際に参照しているフォルダ見つける
```
[rstudio💗7c2d3e78e156 (土 9月 28 17:54:36) ~/IdeaProjects/untitled]$fc-cache -f -v
/usr/share/fonts: caching, new cache contents: 0 fonts, 3 dirs
/usr/share/fonts/dejavu: caching, new cache contents: 9 fonts, 0 dirs
/usr/share/fonts/liberation: caching, new cache contents: 4 fonts, 0 dirs
/usr/share/fonts/vlgothic: caching, new cache contents: 2 fonts, 0 dirs
/usr/share/X11/fonts/Type1: caching, new cache contents: 13 fonts, 0 dirs
/usr/share/X11/fonts/TTF: skipping, no such directory
/usr/local/share/fonts: skipping, no such directory
/home/rstudio/.local/share/fonts: skipping, no such directory
/home/rstudio/.fonts: skipping, no such directory
/usr/share/fonts/dejavu: skipping, looped directory detected
/usr/share/fonts/liberation: skipping, looped directory detected
/usr/share/fonts/vlgothic: skipping, looped directory detected
/usr/lib/fontconfig/cache: not cleaning unwritable cache directory
/home/rstudio/.cache/fontconfig: cleaning cache directory
/home/rstudio/.fontconfig: not cleaning non-existent cache directory
/usr/bin/fc-cache-64: succeeded
[rstudio💗7c2d3e78e156 (土 9月 28 17:54:41) ~/IdeaProjects/untitled]$fc-list>post
[rstudio💗7c2d3e78e156 (土 9月 28 17:54:47) ~/IdeaProjects/untitled]$ll
total 12
-rwxrwxr-x. 1 rstudio rstudio 35 9月 28 17:50 a.sh
-rw-rw-r--. 1 rstudio rstudio 2243 9月 28 17:54 post
-rw-rw-r--. 1 rstudio rstudio 2243 9月 28 17:54 pre
[rstudio💗7c2d3e78e156 (土 9月 28 17:54:49) ~/IdeaProjects/untitled]$diff post pre
```
展開したフォントファイルを参照しているフォルダにmvする。
```
[rstudio💗7c2d3e78e156 (土 9月 28 17:54:58) ~/IdeaProjects/untitled]$ll /usr/share/X11/fonts/TTF
ls: cannot access /usr/share/X11/fonts/TTF: No such file or directory
```
pam越え
https://blfs-support.linuxfromscratch.narkive.com/22HPbg3d/normal-user-cannot-start-x
```
[rstudio💞4931eb893dee (土 9月 28 17:25:28) /root]$find / -name "*pam_*so" 2>/dev/null
/usr/lib64/security/pam_namespace.so
/usr/lib64/security/pam_filter.so
/usr/lib64/security/pam_motd.so
/usr/lib64/security/pam_tty_audit.so
/usr/lib64/security/pam_limits.so
/usr/lib64/security/pam_succeed_if.so
/usr/lib64/security/pam_echo.so
/usr/lib64/security/pam_exec.so
/usr/lib64/security/pam_lastlog.so
/usr/lib64/security/pam_unix.so
/usr/lib64/security/pam_faildelay.so
/usr/lib64/security/pam_permit.so
/usr/lib64/security/pam_unix_acct.so
/usr/lib64/security/pam_selinux_permit.so
/usr/lib64/security/pam_group.so
/usr/lib64/security/pam_timestamp.so
/usr/lib64/security/pam_access.so
/usr/lib64/security/pam_umask.so
/usr/lib64/security/pam_warn.so
/usr/lib64/security/pam_debug.so
/usr/lib64/security/pam_issue.so
/usr/lib64/security/pam_sepermit.so
/usr/lib64/security/pam_rootok.so
/usr/lib64/security/pam_xauth.so
/usr/lib64/security/pam_chroot.so
/usr/lib64/security/pam_pwhistory.so
/usr/lib64/security/pam_systemd.so
/usr/lib64/security/pam_postgresok.so
/usr/lib64/security/pam_time.so
/usr/lib64/security/pam_tally2.so
/usr/lib64/security/pam_selinux.so
/usr/lib64/security/pam_loginuid.so
/usr/lib64/security/pam_shells.so
/usr/lib64/security/pam_console.so
/usr/lib64/security/pam_pwquality.so
/usr/lib64/security/pam_env.so
/usr/lib64/security/pam_unix_auth.so
/usr/lib64/security/pam_securetty.so
/usr/lib64/security/pam_faillock.so
/usr/lib64/security/pam_nologin.so
/usr/lib64/security/pam_listfile.so
/usr/lib64/security/pam_keyinit.so
/usr/lib64/security/pam_cap.so
/usr/lib64/security/pam_unix_passwd.so
/usr/lib64/security/pam_rhosts.so
/usr/lib64/security/pam_ftp.so
/usr/lib64/security/pam_localuser.so
/usr/lib64/security/pam_cracklib.so
/usr/lib64/security/pam_mkhomedir.so
/usr/lib64/security/pam_deny.so
/usr/lib64/security/pam_userdb.so
/usr/lib64/security/pam_unix_session.so
/usr/lib64/security/pam_stress.so
/usr/lib64/security/pam_mail.so
/usr/lib64/security/pam_wheel.so
/usr/lib64/security/pam_fprintd.so
/usr/lib64/security/pam_sss.so
/usr/lib64/security/pam_oddjob_mkhomedir.so
```
rootユーザー以外でもXアプリ起動できるか
http://www.turbolinux.com/support/document/knowledge/98.html
http://www.ep.sci.hokudai.ac.jp/~inex/y2006/1222/Debian_install/after_install.html
X.orgのWiki
https://www.x.org/wiki/
一般ユーザでstartxしたらFatal server errorと出る時の対策
PAM authentication failed, cannot start X server
http://blogcdn.rutake.com/blog/techmemo/2007/08/startxfatal_server_error.html
回避するために/etc/pam.d/xserverを編集
修正前
```
[root💟4931eb893dee (土 9月 28 17:02:42) /]$cat /etc/pam.d/xserver
#%PAM-1.0
auth sufficient pam_rootok.so
auth required pam_console.so
account required pam_permit.so
session optional pam_keyinit.so force revoke
```
修正後
pam_console.soをpam_permit.soに変更
```
#%PAM-1.0
auth sufficient pam_rootok.so
auth required pam_permit.so
account required pam_permit.so
session optional pam_keyinit.so force revoke
```
ファイルの場所
/etc/pam.d/xserver
```
[root❤4931eb893dee (土 9月 28 17:01:01) /]$ll /etc/pam.d/*
-rw-r--r--. 1 root root 272 10月 31 2018 /etc/pam.d/atd
-rw-r--r--. 1 root root 192 8月 9 12:09 /etc/pam.d/chfn
-rw-r--r--. 1 root root 192 8月 9 12:09 /etc/pam.d/chsh
-rw-r--r--. 1 root root 232 4月 11 2018 /etc/pam.d/config-util
-rw-r--r--. 1 root root 287 8月 9 08:07 /etc/pam.d/crond
-rw-r--r--. 1 root root 701 4月 11 2018 /etc/pam.d/fingerprint-auth
-rw-r--r--. 1 root root 97 8月 9 23:07 /etc/pam.d/liveinst
-rw-r--r--. 1 root root 796 8月 9 12:09 /etc/pam.d/login
-rw-r--r--. 1 root root 154 4月 11 2018 /etc/pam.d/other
-rw-r--r--. 1 root root 188 6月 10 2014 /etc/pam.d/passwd
-rw-r--r--. 1 root root 760 4月 11 2018 /etc/pam.d/password-auth
-rw-r--r--. 1 root root 155 9月 14 03:09 /etc/pam.d/polkit-1
-rw-r--r--. 1 root root 329 4月 11 2018 /etc/pam.d/postlogin
-rw-r--r--. 1 root root 681 8月 9 12:09 /etc/pam.d/remote
-rw-r--r--. 1 root root 143 8月 9 12:09 /etc/pam.d/runuser
-rw-r--r--. 1 root root 138 8月 9 12:09 /etc/pam.d/runuser-l
-rw-r--r--. 1 root root 145 6月 10 2014 /etc/pam.d/setup
-rw-r--r--. 1 root root 743 4月 11 2018 /etc/pam.d/smartcard-auth
lrwxrwxrwx. 1 root root 25 9月 28 13:15 /etc/pam.d/smtp -> /etc/alternatives/mta-pam
-rw-r--r--. 1 root root 76 10月 31 2018 /etc/pam.d/smtp.postfix
-rw-r--r--. 1 root root 904 8月 9 10:40 /etc/pam.d/sshd
-rw-r--r--. 1 root root 540 8月 9 12:09 /etc/pam.d/su
-rw-r--r--. 1 root root 137 8月 9 12:09 /etc/pam.d/su-l
-rw-r--r--. 1 root root 238 8月 9 11:57 /etc/pam.d/sudo
-rw-r--r--. 1 root root 216 8月 9 11:57 /etc/pam.d/sudo-i
-rw-r--r--. 1 root root 760 4月 11 2018 /etc/pam.d/system-auth
-rw-r--r--. 1 root root 129 9月 14 03:19 /etc/pam.d/systemd-user
-rw-r--r--. 1 root root 84 10月 31 2018 /etc/pam.d/vlock
-rw-r--r--. 1 root root 163 8月 9 12:28 /etc/pam.d/xserver
```
ブラウザで日本語入力
http://www.ep.sci.hokudai.ac.jp/~inex/y2006/1222/Debian_install/after_install.html
~/.xinitrcに記載
```
export LC_ALL=ja_JP.utf8
export LANGUAGE=ja_JP.utf8
export LANG=ja_JP.utf8
export GTK_IM_MODULE=ibus
export XMODIFIERS=@im=ibus
export QT_IM_MODULE=ibus
export DefalutIMModule=ibus
export NO_AT_BRIDGE=1
```
リナックスディレクトリ構成
https://oxynotes.com/?p=5987
https://satoru739.hatenadiary.com/entry/20111007/1318086532
xtermの色指定
https://heruwakame.hatenablog.com/entry/2017/10/21/232112
http://xjman.dsl.gr.jp/man/man1/xterm.1x.html
Xの日本語対応化
http://www.rcc.ritsumei.ac.jp/?p=6403
Xの日本語入力対応
https://qiita.com/ai56go/items/63abe54f2504ecc940cd
https://tkng.org/unixuser/200405/part1.html
https://tkng.org/unixuser/200405/part2.html
https://tkng.org/unixuser/200405/part3.html
http://note.kurodigi.com/xterm-customize/#id304
https://solist.work/blog/posts/mozc/
日本語のロケール問題
https://www.linux.ambitious-engineer.com/?p=984
http://slavartemp.blogspot.com/2013/06/xming-teraterm-ssh-lubuntu-x.html
X11とはみたいな
http://nmaruichi.la.coocan.jp/XawDoc/Xaw02.html
X11における日本語フォントファイル
http://vega.pgw.jp/~kabe/vsd/k14/
Xtermにおける日本語の表示に関して
http://vega.pgw.jp/~kabe/vsd/k14/xterm-fontsel.html
http://bogytech.blogspot.com/2019/
Xの仕組み
http://luozengbin.github.io/blog/2014-06-21-%5B%E3%83%A1%E3%83%A2%5D%E3%83%AA%E3%83%A2%E3%83%BC%E3%83%88x%E3%81%AE%E6%8E%A5%E7%B6%9A%E6%96%B9%E6%B3%95.html
iBusのしくみ
https://ja.opensuse.org/IBus
Xフォント
https://medium.com/source-words/how-to-manually-install-update-and-uninstall-fonts-on-linux-a8d09a3853b0
https://running-dog.net/2011/10/post_303.html
https://incompleteness-theorems.at.webry.info/201009/article_6.html
https://linuxjf.osdn.jp/JFdocs/XWindow-User-HOWTO-7.html
http://hajimete-program.com/blog/2017/04/15/debian8ubuntu%E3%81%A7xterm%E3%81%AE%E3%83%95%E3%82%A9%E3%83%B3%E3%83%88%E3%82%92%E5%A4%A7%E3%81%8D%E3%81%8F%E3%81%97%E3%81%A6%E3%80%81molokai%E3%83%86%E3%83%BC%E3%83%9E%E3%81%AB%E3%81%99%E3%82%8B/
すげぇ
https://running-dog.net/category/cat_kdeandqt
これじゃないか???
https://blog.goo.ne.jp/tabitom2002/e/a18ccc75ec1ff1bbc5909ee95e5f9408
ぶらうざ
https://www.hiroom2.com/centos/centos-7-ja/
dockerにmanいれる
https://okisanjp.hatenablog.jp/entry/2017/01/06/214353
日本語入力苦しい
http://pc.casey.jp/archives/153902376
Xアプリでのキー入力
https://wiki.archlinux.jp/index.php/Xorg_%E3%81%A7%E3%81%AE%E3%82%AD%E3%83%BC%E3%83%9C%E3%83%BC%E3%83%89%E8%A8%AD%E5%AE%9A?rdfrom=https%3A%2F%2Fwiki.archlinux.org%2Findex.php%3Ftitle%3DKeyboard_Configuration_in_Xorg_%28%25E6%2597%25A5%25E6%259C%25AC%25E8%25AA%259E%29%26redirect%3Dno
SuperはWindowsキー
https://linux.keicode.com/linux/japanese.php
Xtermコマンド
https://www.ibm.com/support/knowledgecenter/ja/ssw_aix_72/x_commands/xterm.html
http://x68000.q-e-d.net/~68user/unix/pickup?xterm
http://x68000.q-e-d.net/~68user/unix/pickup?kterm
https://www.ibm.com/support/knowledgecenter/ja/ssw_aix_72/a_commands/aixterm.html
https://www.ibm.com/support/knowledgecenter/ja/ssw_aix_71/d_commands/dtterm.html
キーボード入力
全角半角キー入力の設定
https://github.com/uim/uim-doc-ja/wiki/CustomizeUim
たぶんこれじゃないか2回目
https://slackware.jp/configuration/x_window_configuration.html
149行目コメントインして150行目をコメントアウトする
```
[root💝0f0a00fd19e2 (木 9月 26 20:23:49) /etc/X11]$vi /usr/share/X11/xkb/keycodes/evdev
[root💙0f0a00fd19e2 (木 9月 26 20:31:03) /]$grep -A15 -n Japan /usr/share/X11/xkb/keycodes/evdev
147: // Keys that are generated on Japanese keyboards
148-
149- //<HZTG> = 93; // Hankaku/Zenkakau toggle - not actually used
150- alias <HZTG> = <TLDE>;
151- <HKTG> = 101; // Hiragana/Katakana toggle
152- <AB11> = 97; // backslash/underscore
153- <HENK> = 100; // Henkan
154- <MUHE> = 102; // Muhenkan
155- <AE13> = 132; // Yen
156- <KATA> = 98; // Katakana
157- <HIRA> = 99; // Hiragana
158- <JPCM> = 103; // KPJPComma
159- //<RO> = 97; // Romaji
160-
161- // Keys that are generated on Korean keyboards
162-
```
docker側のキーボード入力設定
keyboard.confのデフォルト
```
[root💜0f0a00fd19e2 (木 9月 26 21:51:43) /]$cat /etc/X11/xorg.conf.d/00-keyboard.conf
# Read and parsed by systemd-localed. It's probably wise not to edit this file
# manually too freely.
Section "InputClass"
Identifier "system-keyboard"
MatchIsKeyboard "on"
Option "XkbLayout" "us"
EndSection
```
dockerコンテナ側のキーボード入力設定
```
[rstudio❤centos (木 9月 26 21:53:49) ~/unko/script_scratch/x]$find / -name "*keyboard*" 2>/dev/null |& grep X11
/etc/X11/xorg.conf.d/00-keyboard.conf
/usr/include/X11/bitmaps/keyboard16
[rstudio❤centos (木 9月 26 21:54:05) ~/unko/script_scratch/x]$vi /etc/X11/xorg.conf.d/00-keyboard.conf
[rstudio❤centos (木 9月 26 21:54:20) ~/unko/script_scratch/x]$cat /etc/X11/xorg.conf.d/00-keyboard.conf
# Read and parsed by systemd-localed. It's probably wise not to edit this file
# manually too freely.
Section "InputClass"
Identifier "system-keyboard"
MatchIsKeyboard "on"
Option "XkbLayout" "jp,jp"
Option "XkbVariant" "kana,"
EndSection
```
==================================================================================
# X11の設定
X11でぐぐる
```
[root💘25d0f3ff0af4 (土 9月 28 14:01:36) /]$find / -name "*X11*" 2>/dev/null
/etc/X11
/usr/include/X11
/usr/lib64/X11
/usr/lib64/girepository-1.0/GdkX11-2.0.typelib
/usr/lib64/girepository-1.0/GdkX11-3.0.typelib
/usr/lib64/libX11.so
/usr/lib64/libX11-xcb.so
/usr/lib64/libX11-xcb.so.1.0.0
/usr/lib64/libX11.so.6.3.0
/usr/lib64/libX11-xcb.so.1
/usr/lib64/libX11.so.6
/usr/share/X11
/usr/share/gir-1.0/GdkX11-2.0.gir
/var/lib/yum/yumdb/l/4972906b4016cfe97055d73c789653847a916870-libX11-devel-1.6.7-2.el7-x86_64
/var/lib/yum/yumdb/l/db7e8655fc08c81313cd45ca17452adfafc45a60-libX11-1.6.7-2.el7-x86_64
/var/lib/yum/yumdb/l/fe8f43ef082c3f982a0f4bd7025416ae627fdeac-libX11-common-1.6.7-2.el7-noarch
/tmp/.X11-unix
```
# /etc/X11の場所
```
[root💘25d0f3ff0af4 (土 9月 28 14:02:26) /]$ll /etc/X11
total 24
-rw-r--r--. 1 root root 547 4月 11 2018 Xmodmap
-rw-r--r--. 1 root root 493 4月 11 2018 Xresources
drwxr-xr-x. 2 root root 4096 4月 11 2018 applnk
drwxr-xr-x. 1 root root 4096 9月 28 13:09 fontpath.d
drwxr-xr-x. 1 root root 4096 9月 28 13:26 xinit
drwxr-xr-x. 1 root root 4096 9月 14 03:19 xorg.conf.d
```
# /etc/X11/xorg.conf.dにキーボード設定ファイル
```
[root💘25d0f3ff0af4 (土 9月 28 14:35:30) /]$ll /etc/X11/xorg.conf.d/
total 4
-rw-r--r--. 1 root root 232 12月 4 2018 00-keyboard.conf
```
```
[root💘25d0f3ff0af4 (土 9月 28 14:36:50) /]$ll /etc/X11/xorg.conf.d/*
-rw-r--r--. 1 root root 232 12月 4 2018 /etc/X11/xorg.conf.d/00-keyboard.conf
[root💘25d0f3ff0af4 (土 9月 28 14:36:51) /]$cat /etc/X11/xorg.conf.d/00-keyboard.conf
# Read and parsed by systemd-localed. It's probably wise not to edit this file
# manually too freely.
Section "InputClass"
Identifier "system-keyboard"
MatchIsKeyboard "on"
Option "XkbLayout" "us"
EndSection
```
# inputメソッド
50-xinput.sh
```
[root💘25d0f3ff0af4 (土 9月 28 14:06:42) /]$ll /etc/X11/xinit/xinitrc.d/*
-rwxr-xr-x. 1 root root 621 3月 14 2019 /etc/X11/xinit/xinitrc.d/00-start-message-bus.sh
-rwxr-xr-x. 1 root root 4228 8月 9 08:53 /etc/X11/xinit/xinitrc.d/50-xinput.sh
-rwxr-xr-x. 1 root root 543 4月 11 2018 /etc/X11/xinit/xinitrc.d/localuser.sh
-rwxr-xr-x. 1 root root 842 8月 9 23:07 /etc/X11/xinit/xinitrc.d/zz-liveinst.sh
```
# /usr/share/X11の場所
ここ!
```
[root💘25d0f3ff0af4 (土 9月 28 14:15:38) /]$ll /usr/share/X11
total 112
-rw-r--r--. 1 root root 42077 10月 9 2018 XErrorDB
drwxr-xr-x. 2 root root 4096 9月 28 13:07 app-defaults
drwxr-xr-x. 5 root root 4096 9月 28 13:09 fonts
drwxr-xr-x. 64 root root 4096 9月 28 13:07 locale
-rw-r--r--. 1 root root 17975 8月 12 2017 rgb.txt
drwxr-xr-x. 2 root root 4096 9月 28 13:07 x11perfcomp
drwxr-xr-x. 8 root root 4096 9月 28 13:09 xkb
drwxr-xr-x. 2 root root 4096 9月 28 13:18 xorg.conf.d
```
# /usr/share/X11/app-defaults/
Xアプリのデフォルト設定ファイルはここにあるXtermとかも
# X11のロケーる設定
```
[root💘25d0f3ff0af4 (土 9月 28 14:21:58) /]$ll /usr/share/X11/locale/ja_JP.UTF-8/*
-rw-r--r--. 1 root root 77 8月 9 09:39 /usr/share/X11/locale/ja_JP.UTF-8/Compose
-rw-r--r--. 1 root root 347 10月 9 2018 /usr/share/X11/locale/ja_JP.UTF-8/XI18N_OBJS
-rw-r--r--. 1 root root 2205 8月 9 09:39 /usr/share/X11/locale/ja_JP.UTF-8/XLC_LOCALE
```
# よくわからんがシェルスクリプトの勉強になる
awkよくつかわれているな
```
[root💘25d0f3ff0af4 (土 9月 28 14:23:44) /]$ll /usr/share/X11/x11perfcomp/*
-rwxr-xr-x. 1 root root 27271 8月 12 2017 /usr/share/X11/x11perfcomp/Xmark
-rwxr-xr-x. 1 root root 424 8月 12 2017 /usr/share/X11/x11perfcomp/fillblnk
-rwxr-xr-x. 1 root root 669 8月 12 2017 /usr/share/X11/x11perfcomp/perfboth
-rwxr-xr-x. 1 root root 624 8月 12 2017 /usr/share/X11/x11perfcomp/perfratio
```
# Xアプリで使用するキー配列を定義している設定ファイル
```
[root💘25d0f3ff0af4 (土 9月 28 14:27:04) /]$ll /usr/share/X11/xkb/compat/*
-rw-r--r--. 1 root root 1712 6月 5 2018 /usr/share/X11/xkb/compat/README
-rw-r--r--. 1 root root 1121 6月 5 2018 /usr/share/X11/xkb/compat/accessx
-rw-r--r--. 1 root root 1054 6月 5 2018 /usr/share/X11/xkb/compat/basic
-rw-r--r--. 1 root root 507 6月 5 2018 /usr/share/X11/xkb/compat/caps
-rw-r--r--. 1 root root 228 6月 5 2018 /usr/share/X11/xkb/compat/complete
-rw-r--r--. 1 root root 1644 6月 5 2018 /usr/share/X11/xkb/compat/iso9995
-rw-r--r--. 1 root root 986 6月 5 2018 /usr/share/X11/xkb/compat/japan
-rw-r--r--. 1 root root 469 6月 5 2018 /usr/share/X11/xkb/compat/ledcaps
-rw-r--r--. 1 root root 466 6月 5 2018 /usr/share/X11/xkb/compat/lednum
-rw-r--r--. 1 root root 486 6月 5 2018 /usr/share/X11/xkb/compat/ledscroll
-rw-r--r--. 1 root root 1396 6月 5 2018 /usr/share/X11/xkb/compat/level5
-rw-r--r--. 1 root root 2724 6月 5 2018 /usr/share/X11/xkb/compat/misc
-rw-r--r--. 1 root root 4604 6月 5 2018 /usr/share/X11/xkb/compat/mousekeys
-rw-r--r--. 1 root root 1133 6月 5 2018 /usr/share/X11/xkb/compat/olpc
-rw-r--r--. 1 root root 340 6月 5 2018 /usr/share/X11/xkb/compat/pc
-rw-r--r--. 1 root root 1226 6月 5 2018 /usr/share/X11/xkb/compat/pc98
-rw-r--r--. 1 root root 1842 6月 5 2018 /usr/share/X11/xkb/compat/xfree86
-rw-r--r--. 1 root root 1457 6月 5 2018 /usr/share/X11/xkb/compat/xtest
```
# ユーザーの入力イベントを定義している設定ファイル
```
[root💘25d0f3ff0af4 (土 9月 28 14:31:09) /]$ll /usr/share/X11/xorg.conf.d/*
-rw-r--r--. 1 root root 1099 11月 9 2018 /usr/share/X11/xorg.conf.d/10-evdev.conf
-rw-r--r--. 1 root root 1867 8月 9 12:28 /usr/share/X11/xorg.conf.d/10-quirks.conf
-rw-r--r--. 1 root root 92 8月 9 12:23 /usr/share/X11/xorg.conf.d/10-radeon.conf
-rw-r--r--. 1 root root 789 11月 9 2018 /usr/share/X11/xorg.conf.d/40-libinput.conf
-rw-r--r--. 1 root root 1706 11月 9 2018 /usr/share/X11/xorg.conf.d/50-synaptics.conf
-rw-r--r--. 1 root root 115 11月 9 2018 /usr/share/X11/xorg.conf.d/50-vmmouse.conf
-rw-r--r--. 1 root root 3025 8月 9 12:23 /usr/share/X11/xorg.conf.d/50-wacom.conf
```
# Xtermで絵文字表示
https://tmtms.hatenablog.com/entry/201811/windows-terminal
# dockerコンテナ削除
```
docker ps -qa | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
# dockerイメージ作成
```
time docker build -t centos_xxx . | tee log
```
# dockerコンテナ作成
DISPLAYはDISPLAY=IP or ホスト名:ディスプレイ番号.ウィンドウ番号
```
docker run --privileged --shm-size=8gb --name xxx -itd -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id -p 28787:8787 centos_xxx
```
# dockerコンテナ潜入
```
docker exec -it xxx /bin/bash
```
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
RSTUDIO_VERSION=1-2-X
R_VERSION=X-X-X
SHINY_VERSION=1-5-9
<file_sep>GIT_VERSION=2-24-1
JAVA_VERSION=11
MYSQL_VERSION=8-X-X
PYTHON_VERSION=3-7-4
<file_sep>GIT_VERSION=2-24-1
MYSQL_VERSION=8-X-X
PYTHON_VERSION=3-7-4
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
SQLITE3_VERSION=3-30-0
<file_sep># docker自動ビルド対象リストに追加
- 当該環境ディレクトリを追加
```
pwd | sed 's;.*/;;' >HHH/ENV_REPO/docker-build-wanted-list
```
# dockerイメージ作成
- キャッシュ有効-バッググラウンド実行
```
time docker build --no-cache -t XXX BUILD_ARG --build-arg REPO=INSTALLER_REPO --build-arg OS_VERSION=$(echo XXX | grep -Po '[a-z]{1,}(?:-[0-9]{1,}){1,}') . UNKO
```
- キャッシュ有効-フォアグラウンド実行
```
time docker build -t XXX BUILD_ARG --build-arg REPO=INSTALLER_REPO --build-arg OS_VERSION=$(echo XXX | grep -Po '[a-z]{1,}(?:-[0-9]{1,}){1,}') . | tee log
```
- キャッシュ無効
```
time docker build --no-cache -t XXX BUILD_ARG --build-arg REPO=INSTALLER_REPO --build-arg OS_VERSION=$(echo XXX | grep -Po '[a-z]{1,}(?:-[0-9]{1,}){1,}') . | tee log
```
# dockerコンテナ起動
```
docker run --privileged --shm-size=SHM_SIZE --hostname=doc-XXX -v HHH/ENV_REPO/XXX/mnt:HHH/mnt -v HHH/Downloads-for-docker-container/XXX:HHH/media -v /sys/fs/cgroup:/sys/fs/cgroup:ro -v /etc/localtime:/etc/localtime -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id EXPOSE --name XXX -itd XXX
```
# dockerコンテナ潜入
```
docker exec -it XXX /bin/bash
```
# dockerコンテナ削除
- ALL削除
```
docker ps -qa | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
- Exit削除
```
docker ps -a | grep Exit | awk '{print $1}' | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
- 単一削除
```
docker ps -a | grep -P $(pwd | sed 's;.*/;;') | awk '{print $1}' | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
- 自身以外削除
```
docker ps -a | grep -vP $(pwd | sed 's;.*/;;') | awk '{print $1}' | grep -v CONTAINER | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
# dockerイメージ削除
- none削除
```
docker images | awk '$1=="<none>"{print $3}' | xargs -I@ docker rmi @
```
- 単一削除
```
docker ps -a | grep -P $(pwd | sed 's;.*/;;') | awk '{print $1}' | xargs -I@ docker rmi @
```
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|nginx|1001|nginx|nginx_pwd|
|1002|jenkins|1002|jenkins|jenkins_pwd|
<file_sep>codeblocksエディタの起動
```
codeblocks 1>launch-codeblocks.log 2>&1 &
```
<file_sep># トラシュー
- 事象
```
ERROR 1026 (HY000) at line 100 in file: 'centos-7-6-18-10-healthcheck-mysql-8-X-X-mroonga-X-X-X.sql': [plugin][register] cannot find plugin file: </usr/lib64/groonga/plugins/token_filters/stem.so>
```
- 原因
stem.soダイナミックリンクファイルがない たしかにない
```
$find /usr/lib64/groonga/plugins
/usr/lib64/groonga/plugins
/usr/lib64/groonga/plugins/sharding.rb
/usr/lib64/groonga/plugins/sharding
/usr/lib64/groonga/plugins/sharding/logical_enumerator.rb
/usr/lib64/groonga/plugins/sharding/logical_parameters.rb
/usr/lib64/groonga/plugins/sharding/keys_parsable.rb
/usr/lib64/groonga/plugins/sharding/range_expression_builder.rb
/usr/lib64/groonga/plugins/sharding/logical_shard_list.rb
/usr/lib64/groonga/plugins/sharding/logical_table_remove.rb
/usr/lib64/groonga/plugins/sharding/parameters.rb
/usr/lib64/groonga/plugins/sharding/dynamic_columns.rb
/usr/lib64/groonga/plugins/sharding/logical_count.rb
/usr/lib64/groonga/plugins/sharding/logical_select.rb
/usr/lib64/groonga/plugins/sharding/logical_range_filter.rb
/usr/lib64/groonga/plugins/tokenizers
/usr/lib64/groonga/plugins/normalizers
/usr/lib64/groonga/plugins/normalizers/mysql.so
/usr/lib64/groonga/plugins/token_filters
/usr/lib64/groonga/plugins/token_filters/stop_word.so
/usr/lib64/groonga/plugins/ruby
/usr/lib64/groonga/plugins/ruby/eval.rb
/usr/lib64/groonga/plugins/query_expanders
/usr/lib64/groonga/plugins/query_expanders/tsv.so
/usr/lib64/groonga/plugins/functions
/usr/lib64/groonga/plugins/functions/number.so
/usr/lib64/groonga/plugins/functions/string.so
/usr/lib64/groonga/plugins/functions/time.so
/usr/lib64/groonga/plugins/functions/math.so
/usr/lib64/groonga/plugins/functions/vector.so
/usr/lib64/groonga/plugins/functions/index_column.so
```
- 対応
-
- 予防
-
<file_sep>- 起動
```
$KDevelop.AppImage 1>launch-kdevelop.log 2>&1 & </dev/null
```
[kdevelopホームページ](https://www.kdevelop.org/download)
[cppチュートリアル](https://www.learncpp.com/cpp-tutorial/a3-using-libraries-with-codeblocks/)
[fuseグループ作成方法](https://github.com/AppImage/AppImageKit/wiki/FUSE)
<file_sep>DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MYSQL_VERSION=8-X-X
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 $ENV_REPO
EOS
exit 0
}
ENV_REPO=$1;shift
[ -z $ENV_REPO ] && usage
MD_FILE_NAME=.gitignore
while read tgt;do
echo cp $HOME/$ENV_REPO/$MD_FILE_NAME $HOME/$ENV_REPO/$tgt/$MD_FILE_NAME | sh
done < <(ls -l $HOME/$ENV_REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-build-log)
<file_sep>#!/usr/bin/env bash
#ユーザーがrootを除いて複数に存在する場合のパッチ
usage(){
cat <<EOS
Usage:
$0 script-env
or
$0 script-env --debug
EOS
exit 0
}
execute(){
OS_VERSION=$1;shift
while read tgt;do
#最後に見つかったWORKDIR以外を削除
cmd=$(grep -n -P 'WORKDIR' $tgt/Dockerfile.auto |cut -d' ' -f1|xargs|sed '/^$/d'|awk -v FS=' ' '{$NF="";print $0}' | xargs -I@ echo @ | perl -pe "s;:.*;;;s;^;sed -i ;;s;$;d $tgt/Dockerfile.auto;")
if [ "$SHELL" = 'bash' ];then
echo $cmd | $SHELL
else
echo $cmd
fi
done < <(find $HOME/$ENV_REPO -type d | grep -v docker-log | grep $OS_VERSION | grep -vP mnt)
}
main(){
ENV_REPO=$1;shift
DEBUG=$1;shift
if [ "$DEBUG" = '--debug' ];then
SHELL=: #なんもしない
else
SHELL=bash #じっこうする
fi
[ -z $ENV_REPO ] && usage
export -f execute
find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | grep -Po '[a-z]+(-[0-9]{1,}){1,}' | sort | uniq | while read tgt;do execute $tgt ;done
}
main "$@"
<file_sep>- singularityコンテナ環境を使ったGPU込みの検証環境
- 自宅マシンはGPUないや。。
- https://qiita.com/mkt3/items/b9f86f5ddf9eb0f43608
- gpuをつかったハンディそうな検証環境構築
- https://qiita.com/exthnet/items/91e8a9f4e620de3c28de
- java
- https://speakerdeck.com/shintanimoto/introduction-to-reactive-programming-using-spring-webflux?slide=21
- lisp
- https://masatoi.hateblo.jp/entry/20151210/1449948614
- vpnの設定くちょー簡単
- https://gihyo.jp/admin/serial/01/ubuntu-recipe/0614
- 表記正規化が大変そう
- https://www.slideshare.net/mobile/WorksApplications/java-82794239
- Kuromori
- http://www.mwsoft.jp/programming/lucene/kuromoji.html
- Wire shark ネットワーク監視ツール
- ruby ファイル書き込み 読み込みと同じで、引数のオプションを変える
- https://uxmilk.jp/22615
- ruby ファイル拡張子以外を切り出すときにslice使う。
- https://uxmilk.jp/24040
- pytorch bi gram
- https://buildersbox.corp-sansan.com/entry/2019/09/26/110000
- 文章ごとに処理する。文章idふる。
- 形態素解析するかしないか
- する
- mecabかそれ以外か
- しない
- 何文字単位で文章を区切るか
- 区切る際、各文書グラム単位に連番付与
- 連番付与後、グラムごとに出現位置をサマル
- 区切り方で特色が出で面白い!
- Rでバイグラム 分析関数leadなど使うとハンディにできるっぽい
- https://iwsktky.wixsite.com/home/post/r%E4%B8%8A%E3%81%A7-%E7%B0%A1%E5%8D%98%E3%81%AB-n-gram%E3%82%92%E4%BD%9C%E3%81%A3%E3%81%A6%EF%BC%8C%E5%85%B1%E8%B5%B7%E3%83%8D%E3%83%83%E3%83%88%E3%83%AF%E3%83%BC%E3%82%AF%E3%82%92%E4%BD%9C%E3%82%8B%E6%96%B9%E6%B3%95
- 学習済みってのは分散表現を終えた後のって意味やな。ハンディなモデルがWiki以外で欲しい。
- https://qiita.com/Hironsan/items/513b9f93752ecee9e670
- 分散表現はシェルゲイでできる。メカブかました後、unnestしてtranspose
- https://blog.aidemy.net/entry/2017/07/01/184421
- .net可変長戻り読みができる online で試してみる。
- https://oraclesqlpuzzle.ninja-web.net/regex/regex-9-1.html
- csharp開発環境
- https://www.casleyconsulting.co.jp/blog/engineer/181/
- ruby アスタ配列展開
- http://yamakichi.hatenablog.com/entry/2016/10/18/232253
- Jupiter lab notebookの進化系
- https://qiita.com/canonrock16/items/d166c93087a4aafd2db4
- wsl2インスコ
- https://qiita.com/TsuyoshiUshio@github/items/947301bd9317610572fc
- クロス集計
- https://kunst1080.hatenablog.com/entry/2013/06/02/234025
- jenkinsfileの書き方
- https://qiita.com/lufia/items/18cdb01f86a6d5040c60
- データセンターの光景
- https://jp.quora.com/YouTube%E3%81%AF%E4%BD%95%E6%95%85%E3%81%82%E3%82%93%E3%81%AA%E3%81%AB%E4%B8%96%E7%95%8C%E4%B8%AD%E3%81%AE%E5%A4%A7%E9%87%8F%E3%81%AE%E5%8B%95%E7%94%BB%E3%82%92%E4%BF%9D%E5%AD%98%E3%81%97%E3%81%A6%E3%81%84%E3%82%89
- kube概念
- https://qiita.com/Kta-M/items/ce475c0063d3d3f36d5d
- goでsqliteするやつ
- https://github.com/Johniel/go-quadtree/blob/master/src/main.go
- git操作-まちがいそうなやつ
- https://qiita.com/terry_6518/items/44ce2419a78d4121569a#comments
- python マルチスレッド
- https://qiita.com/kenmaro/items/69fdd84e18e793a21790
- マルチスレッドプログラミング
- http://yamatyuu.net/computer/program/vc2013/thread3/index.html
- このノートパソコンかっこいいな
- https://www.itmedia.co.jp/news/spv/2003/06/news003.html
- マルチスレッドのグルーピングに関して
- https://www.isus.jp/hpc/mtp-windows-64core/
- python 小技
- https://qiita.com/_akiyama_/items/b24acf53470a885c70db
- データ前処理フロー https://qiita.com/takashige/items/4882555f4a99a0d96220#%E6%A9%9F%E6%A2%B0%E5%AD%A6%E7%BF%92%E3%82%92%E6%B4%BB%E7%94%A8%E3%81%99%E3%82%8B%E3%82%B7%E3%82%B9%E3%83%86%E3%83%A0%E3%81%AE%E8%A8%AD%E8%A8%88%E3%83%97%E3%83%AD%E3%82%BB%E3%82%B9
- 正規表現のビューワー
- https://qiita.com/yucatio/items/e980550814f66b3450dd
- pythonスクレイピング説明丁寧
- https://qiita.com/HIRO960/items/467ce27bca7c353e3624
- GraphQLのクエリ使い方説明
- https://qiita.com/shunp/items/d85fc47b33e1b3a88167
- n-gramを利用したsqlの自動生成。調べたいことを言語された文章として受け取って、適切なsqlを生成する際の精度についての話
- https://qiita.com/Inowe2457/items/562ef5312cc598711a6c
- 1975好き
- https://note.com/2020willblessus/n/n593ea331e584
- 機械学習
- 概念からの説明 https://ledge.ai/reinforcement-learning/
- kube 運用知見
- https://tech.smartcamp.co.jp/entry/use-kubernetes-one-year?amp=1
- 機械学習 データセットやアプローチについて
- https://github.com/allenai/acl2018-semantic-parsing-tutorial
- csvハンドリング操作の3言語での比較
- https://employment.en-japan.com/engineerhub/entry/2020/02/27/103000?amp=1
- nim エントリポイントの実装例
- https://qiita.com/6in/items/c735cb2ffbe79f3f9d94
- nimのサンプルはこの方の記事あたりを参考に
- https://qiita.com/6in/items/cbb161c401afe054314f
- ruby の標準入力
- https://qiita.com/mogulla3/items/fbc2a46478872bebbc47
- ruby の標準入力
- https://docs.ruby-lang.org/ja/latest/class/IO.html
- api curl きれいにまとまっている。参考になる。
- https://qiita.com/gorilla0513/items/9306d4165637c29370eb
- go標準入力
- https://qiita.com/tnoda_/items/b503a72eac82862d30c6
- go エラーハンドリング
- https://qiita.com/nayuneko/items/3c0b3c0de9e8b27c9548
- uiの部品名とかは参考になるかなー
- https://nablarch.github.io/docs/LATEST/doc/index.html
- https://nablarch.github.io/docs/LATEST/doc/development_tools/ui_dev/doc/reference_jsp_widgets/index.html
- python便利ライブラリ
- IBM 機械学習力入れてんだよな
- https://qiita.com/ishida330/items/53f1b0df2247fab5c6dd
- kube環境構築
- https://qiita.com/ishida330/items/dfff18362ea16aa92f88
- was websphere IBM qiitaでぐぐる
- https://qiita.com/ishida330/items/78d7e19510c141201d69
- was実行環境とかdockerであるぽい
- https://qiita.com/triple4649/items/727eb3358a9fa5414bed
- gcとかの設定いじる手順
- https://qiita.com/ch7821/items/0cecc5909715dc4bf0b6
- jmeter多重度いじるサンプル手順
- https://qiita.com/ch7821/items/50ee9f667016ac9e45af
- cicleciデプロイ手順
- https://qiita.com/gakinchoy7/items/ae31107ef56efb16fe7e
- bash テンプーレト作成
- https://www.m3tech.blog/entry/2018/08/21/bash-scripting
- pytoch インデクシングにも関わってくるぽい cpuだけでなくgpuの力も合わせて計算高速化させようとしているのかな
- https://qiita.com/miyamotok0105/items/1fd1d5c3532b174720cd
- pythoch環境構築 gpuが重要そう
- https://qiita.com/riversun/items/9f8ecbae2645c22853b6
- httperfとabの使い方
- https://hostingstock.net/article/notes/benchmark-tools/
- ngunxとapacheの性能比較
- https://qiita.com/kamihork/items/49e2a363da7d840a4149
- Apache bench 一定時間内に起動する総プロセス数と同時起動プロセス数を指定。多重度がユーザー数で総起動プロセス数を割れば一ユーザーあたりのプロセス数。
- https://qiita.com/flexfirm/items/ac5a2f53cfa933a37192
- Apache jmeter負荷テスト多重度(プロセス同時起動数)をある時間内で制御できてテストするツール
- https://qiita.com/PlanetMeron/items/a604645d6f89b6ce3a14
- このイメージに近いマイリーダー
- https://yoshinorin.net/2019/07/20/nuxt-js-photo-gallery-by-vue-magic-grid/
- この方の環境づくりが参考になる
- https://github.com/YoshinoriN/docker-gitbucket-orchestration
- 検索エンジン用語まとめ
- https://qiita.com/mogulla3/items/23aaffbe29c4e600876
- nodejs groonga接続方式
- https://memo.appri.me/programming/groonga-nroonga-nodejs
- フォルダ構成関連
- https://upura.hatenablog.com/entry/2018/12/28/225234
- ソケットエラー関連
- https://rcmdnk.com/blog/2015/07/03/computer-linux/
- データベースクライアントツール
ベンダー問わないのが対応範囲が広いのがすごい。
- https://qiita.com/Ryooota/items/755e249f5241d2c964e6
- https://qiita.com/12345/items/48f6856e32fd618ea307
- 言語別トークナイズ管理マスタgroonga
- apache spark環境似てる。フローが参考になる。
- http://www.intellilink.co.jp/article/column/bigdata-kk04.html
- 分かち書きpythonによる使用例
- http://testpy.hatenablog.com/entry/2016/10/04/010000
- グリッドレイアウトこれがいいかなー。簡単そう。
- https://coliss.com/articles/build-websites/operation/javascript/magic-grid-for-dynamic-grid.html
- nuxt.jsでできるそう。グリッドレイアウト。
- https://yoshinorin.net/2019/07/20/nuxt-js-photo-gallery-by-vue-magic-grid/
- nuxt.jsインストール
- https://ja.nuxtjs.org/guide/installinglation
- magic-gridは単一ファイルに収まりそう
- https://on-ze.com/archives/7644
- ブログテンプーレトあった
- https://ulab.hatenablog.com/entry/20191006/1570336147
- axiosの使い方。mvc的な機能分割を提供してくれるプラグイン。
- https://public-constructor.com/nuxtjs-with-axios/
- nuxtフレームワークでのaxiosライブラリを使用したコンパクトなアプリ
- https://qiita.com/mgr/items/f2193fd21765be1d34c2
- nuxt.jsでのpost実装例 nuxt.js axios apiでぐぐる
- https://qiita.com/cortyuming/items/3257ccc7936acf1a7989
- nuxt.jsでのmvc感出てる
- https://noumenon-th.net/programming/2019/07/15/axios-module/
- pdfファイルからテキスト抽出できるjar
- https://qiita.com/arc279/items/5b6cf2d0b24ce5a50cae
- elasticsearchのテーブル定義変更に伴うview側のkibanaの横テン串チェックの事例
- https://qiita.com/NAO_MK2/items/2d03d9db1cd7b0ceae04
- elasticsearchのcrud処理の事例とともに概念の説明あり
- https://qiita.com/kiyokiyo_kzsby/items/344fb2e9aead158a5545
- pythonグルーピング事例 単一キーに対する複数バリュー listagg
- https://qiita.com/tag1216/items/b2765e9e87025c01e57f
- pythonのzip関数。egisonにもある。変数展開アスタの使いかた。しらかみゅさんのコメントいつも参考になる。
- https://qiita.com/shibasab/items/836ebfbf496a757c09e2
- perlのオプションまとめがいい。nleとanleとEオプションが使い勝手いいかな。
- https://qiita.com/ohtsuka1317/items/92c339a65533e1e6c6fc
- jqの日付型 環境変数の使い方のサンプル
- https://qiita.com/arc279/items/436d8e7b8653848406e3
- dockerコンテナを複数立ち上げたとき各コンテナで動いているプロセスのログを一元化する事例。
- https://qiita.com/skjune12/items/d88a8eb32794865afcd3
- jqで日付変換 読みやすい形式に変換
- https://qiita.com/sutoh/items/350bb96a06cb237f54af
- mroonga mysqlとのコラボいい。
- https://qiita.com/nayuneko/items/e1d4cad31b9ec23fd12c
- jqフラット化 一度退避してから、直積するイメージ
- https://qiita.com/arc279/items/00b353428120f48a9e06
- perlのexistsぽくてとても重宝するコマンド
- https://qiita.com/mumian1014/items/bb71b0520e457f3b2466
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1000|oracle|1000|oracle|oracle_pwd|
<file_sep>GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
RSTUDIO_VERSION=1-2-X
R_VERSION=X-X-X
<file_sep>MECAB_IPADIC_VERSION=2-7-0
MECAB_VERSION=0-9-9-6
GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
<file_sep># 動作確認
- https://github.com/ukijumotahaneniarukenia/Hatena-Textbook
- https://qiita.com/naokits/items/8f09ffc8bbc78ade366c
```
$cat a.swift
#!/usr/bin/env swift
let hello = "こんにちは"
print(hello)
// 遊び心で。。
let 挨拶 = "お世話になります"
print(挨拶)
$chmod 700 a.swift
$./a.swift
こんにちは
お世話になります
```
<file_sep># 参考文献
- https://qiita.com/taka4sato/items/2c3397ff34c440044978</br>
- https://qiita.com/tand826/items/0c478bf63ead75427782</br>
- https://code-graffiti.com/how-to-use-jupyter-notebook/#toc6</br>
# バージョン確認
```
$python3 --version
Python 3.7.4
$ pip3 -V
pip 19.3.1 from /usr/local/lib/python3.7/site-packages/pip (python 3.7)
```
# ライブラリインストール
pipコマンド経由で各ライブラリをいんすこ
ライブラリのインストールはrootユーザーで実行
```
$pip3 install numpy pandas matplotlib seaborn scikit-learn plotly ipython[notebook]
```
# jupyterコマンド確認
```
$ jupyter --version
jupyter core : 4.6.1
jupyter-notebook : 6.0.2
qtconsole : not installed
ipython : 7.11.1
ipykernel : 5.1.3
jupyter client : 5.3.4
jupyter lab : not installed
nbconvert : 5.6.1
ipywidgets : 7.5.1
nbformat : 4.4.0
traitlets : 4.3.3
```
# jupyter notebook起動
rootユーザー以外で実行
```
$ jupyter notebook --port 8888 --ip=0.0.0.0 1>launch-jupyter.log 2>&1 &
[1] 278
```
ちょっとあつかいづらくなった。
これで起動しておくのがよさげ。。入力れむしたい。
```
$ jupyter notebook --port 8888 --ip=0.0.0.0
```
ブラウザが立ち上がる。
ログの一部
```
[I 17:09:23.749 NotebookApp] Writing notebook server cookie secret to /home/kuraine/.local/share/jupyter/runtime/notebook_cookie_secret
[I 17:09:23.882 NotebookApp] Serving notebooks from local directory: /home/kuraine
[I 17:09:23.882 NotebookApp] The Jupyter Notebook is running at:
[I 17:09:23.882 NotebookApp] http://eb796c05a8c5:8888/?token=<PASSWORD>
[I 17:09:23.882 NotebookApp] or http://127.0.0.1:8888/?token=<PASSWORD>
[I 17:09:23.882 NotebookApp] Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).
[C 17:09:23.907 NotebookApp]
To access the notebook, open this file in a browser:
file:///home/kuraine/.local/share/jupyter/runtime/nbserver-278-open.html
Or copy and paste one of these URLs:
http://eb796c05a8c5:8888/?token=<PASSWORD>
or http://127.0.0.1:8888/?token=<PASSWORD>
START /usr/bin/firefox "/home/kuraine/.local/share/jupyter/runtime/nbserver-278-open.html"
Running without a11y support!
```
ブラウザ経由で作業する場合はサーバーから発行されたトークンを元に実施
# 作業ブック保存
ブラウザでの作業結果を保存すると、ローカルホストのサーバーでは以下のようにファイルが保存される。
Untitled.ipynbがあると前回の作業分から実施できる。
```
$ pwd
/home/kuraine
$ ll
total 8
-rw-rw-r--. 1 kuraine kuraine 848 1月 4 17:17 Untitled.ipynb
-rw-rw-r--. 1 kuraine kuraine 1443 1月 4 17:17 launch_jupyter.log
```
ログにはこんな感じででる。
```
[I 17:17:20.501 NotebookApp] Creating new notebook in
[I 17:17:20.535 NotebookApp] Writing notebook-signing key to /home/kuraine/.local/share/jupyter/notebook_secret
[I 17:17:21.102 NotebookApp] Kernel started: 0143cd50-aea8-43df-b248-1ff41696e018
[I 17:17:34.441 NotebookApp] Saving file at /Untitled.ipynb
```
ブラウザ閉じるとログにはこんな感じででる。
```
[Parent 402, Gecko_IOThread] WARNING: pipe error (77): Connection reset by peer: file /builddir/build/BUILD/firefox-68.3.0/ipc/chromium/src/chrome/common/ipc_channel_posix.cc, line 358
[Parent 402, Gecko_IOThread] WARNING: pipe error (108): Connection reset by peer: file /builddir/build/BUILD/firefox-68.3.0/ipc/chromium/src/chrome/common/ipc_channel_posix.cc, line 358
[Parent 402, Gecko_IOThread] WARNING: pipe error (95): Connection reset by peer: file /builddir/build/BUILD/firefox-68.3.0/ipc/chromium/src/chrome/common/ipc_channel_posix.cc, line 358
```
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|mysql|1001|mysql|mysql_pwd|
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
or
$0 script-env --debug
EOS
exit 0
}
execute(){
OS_VERSION=$1;shift
[ -z $OS_VERSION ] && usage
OS_NAME=$(echo $OS_VERSION | perl -pe 's/^([a-z]+)-(.*)$/\1/g')
TEMPLATE_FILE=$(find $HOME/$ENV_REPO -name "docker-template-Dockerfile-$OS_NAME")
while read tgt;do
#テンプレートファイルのMAIN_USERの置換
{
echo $tgt
grep -c -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md
grep -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md | awk -v FS='|' -v ORS='' '{print ","$3}'
} | xargs -n3 | \
while read file cnt usr;do
if [ 0 -eq $cnt ];then
cmd=$(echo "sed -n '/^USER/,/^EXPOSE/p' $TEMPLATE_FILE | head -n-1 | perl -pe "s/MAIN_USER/kuraine/g" >>$file/Dockerfile.auto")
if [ "$SHELL" = 'bash' ];then
echo $cmd | $SHELL
else
echo $cmd
fi
fi
for (( i=0;i<$cnt;i++));do
cmd=$(echo "sed -n '/^USER/,/^EXPOSE/p' $TEMPLATE_FILE | head -n-1 | perl -pe "s/MAIN_USER/$(echo $usr | cut -d',' -f$(($i+2)))/g" >>$file/Dockerfile.auto")
if [ "$SHELL" = 'bash' ];then
echo $cmd | $SHELL
else
echo $cmd
fi
done
done
done < <(find $HOME/$ENV_REPO -type d | grep -v docker-log | grep $OS_VERSION | grep -vP mnt)
}
main(){
ENV_REPO=$1;shift
DEBUG=$1;shift
if [ "$DEBUG" = '--debug' ];then
SHELL=: #なんもしない
else
SHELL=bash #じっこうする
fi
[ -z $ENV_REPO ] && usage
export -f execute
find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | grep -Po '[a-z]+(-[0-9]{1,}){1,}' | sort | uniq | while read tgt;do execute $tgt ;done
}
main "$@"
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
MONODEVELOP_VERSION=X-X-X
<file_sep>- 公式マニュアル
- http://archive.apache.org/dist/lucene/solr/ref-guide/apache-solr-ref-guide-8.1.pdf
- https://kshigeru.blogspot.com/2012/01/solr-wikipedia-data-import.html
- http://www.mwsoft.jp/programming/munou/wikipedia_solr.html
- https://gist.github.com/satom9to5/7902551
- wikiデータ
- https://dumps.wikimedia.org/jawiki/latest/
- データインポート
- http://lucene.apache.org/solr/guide/8_4/uploading-structured-data-store-data-with-the-data-import-handler.html#uploading-structured-data-store-data-with-the-data-import-handler
デモで練習
- https://lucene.apache.org/solr/guide/8_4/uploading-structured-data-store-data-with-the-data-import-handler.html#uploading-structured-data-store-data-with-the-data-import-handler
```
solr@9d28f1194bed ~$solr -e dih
Starting up Solr on port 8983 using command:
"/usr/local/src/solr-8.4.1/bin/solr" start -p 8983 -s "/usr/local/src/solr-8.4.1/example/example-DIH/solr"
NOTE: Please install lsof as this script needs it to determine if Solr is listening on port 8983.
Started Solr server on port 8983 (pid=163). Happy searching!
Solr dih example launched successfully. Direct your Web browser to http://localhost:8983/solr to visit the Solr Admin UI
```
- プロセス起動
```
solr@95adbde9b31c /usr/local/src/script-repo$solr start -p 8983
Waiting up to 180 seconds to see Solr running on port 8983 [/]
Started Solr server on port 8983 (pid=828). Happy searching!
```
- 待受ポート確認
```
solr@95adbde9b31c /usr/local/src/script-repo$lsof -P -i:8983
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
java 828 solr 152u IPv4 1432559 0t0 TCP *:8983 (LISTEN)
```
- プロセス確認
```
solr@95adbde9b31c /usr/local/src/script-repo$ps aux
USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND
solr 1 0.0 0.0 5372 2192 pts/0 Ss+ 19:27 0:00 /bin/bash
solr 7 0.0 0.0 5372 2416 pts/1 Ss 19:28 0:00 /bin/bash
solr 828 17.3 0.7 5423016 239112 pts/1 Sl 19:39 0:05 /usr/local/src/jdk-11/bin/java -server -Xms512m -Xmx512m -XX:+UseG1GC -XX:+PerfDisableSharedMem -XX:+ParallelRefProcEnabled -XX:MaxGCPauseMillis=2
solr 926 0.0 0.0 7008 1560 pts/1 R+ 19:39 0:00 ps aux
```
- ブラウザアクセス
- http://localhost:8983/solr
```
solr@7b86fab03d5e ~$ls -lh
total 12G
drwxr-xr-x. 3 solr solr 4.0K 2月 11 13:56 dotfile
-rw-rw-r--. 1 solr solr 12G 2月 11 17:34 jawiki-latest-pages-articles.xml
```
- コア作成
```
solr@95adbde9b31c /usr/local/src/script-repo$solr create -c wiki
WARNING: Using _default configset with data driven schema functionality. NOT RECOMMENDED for production use.
To turn off: bin/solr config -c wiki -p 8983 -action set-user-property -property update.autoCreateFields -value false
Created new core 'wiki'
```
- 設定ファイル編集
以下の3ファイルを修正。レポからコピってくる。
- /usr/local/src/solr-8.4.1/server/solr/wiki/conf/managed-schema
- /usr/local/src/solr-8.4.1/server/solr/wiki/conf/solrconfig.xml
- /usr/local/src/solr-8.4.1/server/solr/wiki/jawiki-latest-pages-articles.xml
```
$grep -Po '<.*>' jawiki-latest-pages-articles.xml | head -n100 | awk '/<page>/,/<\/page>/{if($0=="<page>"){print 1,$0}else{print 0,$0}}' | nl
```
- 設定ファイル変更後は一度インスタンスをポート単位で落として、再起動
```
solr@95adbde9b31c /usr/local/src/solr-8.4.1/server/solr/wiki/conf$solr stop -p 8983
Sending stop command to Solr running on port 8983 ... waiting up to 180 seconds to allow Jetty process 828 to stop gracefully.
solr@95adbde9b31c /usr/local/src/solr-8.4.1/server/solr/wiki/conf$solr start -p 8983 -s "/usr/local/src/solr-8.4.1/server/solr"
Waiting up to 180 seconds to see Solr running on port 8983 [/]
Started Solr server on port 8983 (pid=1404). Happy searching!
```
- コマンドラインからデータ取得
```
curl -s http://localhost:8983/solr/wiki/select?q=*%3A* | jq
curl -s "http://localhost:8983/solr/wiki/select?q=*%3A*&rows=100&wt=json" | jq
```
- xpath式をjqから逆算などしたい
<file_sep># dockerイメージ作成
```
time docker build -t centos-7-6-18-10-racket . | tee log
```
# dockerコンテナ作成
```
docker run --privileged --shm-size=4gb --name centos-7-6-18-10-racket -itd -v /sys/fs/cgroup:/sys/fs/cgroup:ro -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id centos-7-6-18-10-racket
```
# Xアプリ転送許可設定
dockerコンテナ内のXアプリをdockerホストに転送許可する。 ローカルネットワーク内で存在する全てのマシンからのX転送を許可している。マシン単位で設定もできる。
```
xhost +local:
```
# dockerコンテナ潜入
```
docker exec --user kuraine -it centos-7-6-18-10-racket /bin/bash
```
# drracket起動
```
$drracket 1>~/launch_drracket.log 2>&1 &
[1] 122
$ll
total 0
-rw-rw-r--. 1 kuraine kuraine 0 1月 5 09:17 launch_drracket.log
```
<file_sep>DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
MYSQL_VERSION=8-X-X
PYTHON_VERSION=3-7-4
<file_sep>DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
SQLITE3_VERSION=3-30-0
<file_sep>dbeaverの接続以外はOK
16-04のパッケージ状態とdbeaverとの相性のもんだいか
<file_sep>DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MARIADB_VERSION=X-X-X
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
<file_sep>DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
POSTGRES_VERSION=12-X
PYTHON_VERSION=3-7-4
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1002|td|agent_pwd|1003|nginx|
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|db2fenc1|1001|db2fenc1|db2fenc1_pwd|
|1002|db2inst1|1002|db2inst1|db2inst1_pwd|
<file_sep>CHROMIUM_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
PYTHON_VERSION=3-7-4
<file_sep>rootユーザーで以下のスクリプトを実行
```
$cd /usr/local/src/script-repo
$bash ubuntu-19-10-healthcheck-mysql-8-X-X.sh
```
標準出力に以下のログが出力されればOK
```
mysqld will log errors to /var/log/mysql/error.log
mysqld is running as pid 1411
USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND
mysql 1 0.0 0.0 5108 1440 pts/0 Ss 21:53 0:00 bash /etc/init/run.sh
mysql 8 0.0 0.1 96080 37864 ? S 21:53 0:00 fcitx
mysql 9 0.0 0.0 5372 2192 pts/0 S+ 21:53 0:00 bash
mysql 14 0.0 0.0 6964 796 ? S 21:53 0:00 dbus-launch --autolaunch bc74deaa9e044c079ed6fc963d084157 --binary-syntax --close-stderr
mysql 15 0.0 0.0 7064 1320 ? Ss 21:53 0:00 /usr/bin/dbus-daemon --syslog-only --fork --print-pid 5 --print-address 7 --session
mysql 21 0.0 0.0 7064 1700 ? Ss 21:53 0:00 /usr/bin/dbus-daemon --syslog --fork --print-pid 5 --print-address 7 --config-file /usr/share/fcitx/dbus/daemon.conf
mysql 25 0.0 0.0 5328 432 ? SN 21:53 0:00 /usr/bin/fcitx-dbus-watcher unix:abstract=/tmp/dbus-gmhUjTdojE,guid=d753ea1a2bc3e6e73226ea965ec28558 21
mysql 26 0.0 0.0 73632 14324 ? Sl 21:53 0:00 /usr/lib/mozc/mozc_server
mysql 43 0.0 0.0 5400 2364 pts/1 Ss 21:54 0:00 /bin/bash
root 1340 0.0 0.0 6456 2400 pts/1 S 22:15 0:00 su root
root 1341 0.0 0.0 5372 2368 pts/1 S 22:15 0:00 bash
root 1353 0.0 0.0 5160 1916 pts/1 S+ 22:17 0:00 bash ubuntu-19-10-healthcheck-mysql-8-X-X.sh
mysql 1411 0.0 1.0 1935484 335180 ? Sl 22:17 0:00 mysqld -D --user=mysql
root 1455 0.0 0.0 7008 1560 pts/1 R+ 22:17 0:00 ps aux
2020-05-18T13:17:29.643055Z 0 [System] [MY-013169] [Server] /usr/sbin/mysqld (mysqld 8.0.20-0ubuntu0.19.10.1) initializing of server in progress as process 1364
2020-05-18T13:17:29.650492Z 1 [System] [MY-013576] [InnoDB] InnoDB initialization has started.
2020-05-18T13:17:30.506229Z 1 [System] [MY-013577] [InnoDB] InnoDB initialization has ended.
2020-05-18T13:17:32.501866Z 6 [Note] [MY-010454] [Server] A temporary password is generated for root@localhost: <<PASSWORD>
2020-05-18T13:17:37.060858Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 8.0.20-0ubuntu0.19.10.1) starting as process 1409
2020-05-18T13:17:37.078212Z 1 [System] [MY-013576] [InnoDB] InnoDB initialization has started.
2020-05-18T13:17:37.355239Z 1 [System] [MY-013577] [InnoDB] InnoDB initialization has ended.
2020-05-18T13:17:37.455126Z 0 [System] [MY-011323] [Server] X Plugin ready for connections. Socket: '/var/run/mysqld/mysqlx.sock' bind-address: '::' port: 33060
2020-05-18T13:17:37.593732Z 0 [Warning] [MY-010068] [Server] CA certificate ca.pem is self signed.
2020-05-18T13:17:37.627311Z 0 [System] [MY-010931] [Server] /usr/sbin/mysqld: ready for connections. Version: '8.0.20-0ubuntu0.19.10.1' socket: '/var/run/mysqld/mysqld.sock' port: 3306 (Ubuntu).
mysql: [Warning] Using a password on the command line interface can be insecure.
mysql: [Warning] Using a password on the command line interface can be insecure.
+----------------+
| user() |
+----------------+
| root@localhost |
+----------------+
+-------------------------+
| version() |
+-------------------------+
| 8.0.20-0ubuntu0.19.10.1 |
+-------------------------+
+---------------------------------+------+
| rsv_args | ele |
+---------------------------------+------+
| [{"x":"8"},{"x":"3"},{"x":"4"}] | 8 |
| [{"x":"8"},{"x":"3"},{"x":"4"}] | 3 |
| [{"x":"8"},{"x":"3"},{"x":"4"}] | 4 |
+---------------------------------+------+
+------+--------------+
| id | item |
+------+--------------+
| 1 | うんこ |
| 2 | もりもり |
| 3 | 森鴎外 |
+------+--------------+
+----------+
| count(*) |
+----------+
| 3 |
+----------+
mysql: [Warning] Using a password on the command line interface can be insecure.
+------------------+
| user() |
+------------------+
| user01@localhost |
+------------------+
+-------------------------+
| version() |
+-------------------------+
| 8.0.20-0ubuntu0.19.10.1 |
+-------------------------+
+---------------------------------+------+
| rsv_args | ele |
+---------------------------------+------+
| [{"x":"8"},{"x":"3"},{"x":"4"}] | 8 |
| [{"x":"8"},{"x":"3"},{"x":"4"}] | 3 |
| [{"x":"8"},{"x":"3"},{"x":"4"}] | 4 |
+---------------------------------+------+
+------+--------------+
| id | item |
+------+--------------+
| 1 | うんこ |
| 2 | もりもり |
| 3 | 森鴎外 |
+------+--------------+
+----------+
| count(*) |
+----------+
| 3 |
+----------+
mysql: [Warning] Using a password on the command line interface can be insecure.
```
<file_sep>DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
PERL_VERSION=5-30-0
POSTGRES_VERSION=12-0
PYTHON_VERSION=3-7-4
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 EXPOSE script-env
EOS
exit 0
}
CHK_WORD=$1;shift
ENV_REPO=$1;shift
[ -z $CHK_WORD ] && usage
[ -z $ENV_REPO ] && usage
while read tgt;do
if [ -f $tgt/Dockerfile.sub ];then
RT="$(grep $CHK_WORD $tgt/Dockerfile.sub | tr ' ' '=')"
if [ -z "$RT" ];then
:
else
while read n;do
echo "echo '-p $n' >>$tgt/env-expose.md" | bash
done < <(echo $RT | perl -pe 's/([0-9]+)/\1:\1/g;s/EXPOSE=//g'| tr ' ' '\n')
fi
else
:
fi
if [ -f $tgt/Dockerfile.sub ];then
echo "sed -i /^$/d $tgt/env-expose.md" | bash
else
:
fi
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt')
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
ENV_REPO=$1;shift
[ -z $ENV_REPO ] && usage
crontab < $HOME/$ENV_REPO/docker-build-wanted-list
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|rstudio|1001|rstudio|rstudio_pwd|
<file_sep>GROONGA_VERSION=9-1-2
GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
ENV_REPO=$1;shift
[ -z $ENV_REPO ] && usage
MD_FILE_NAME=env-image.md
while read tgt;do
if [ -f $HOME/$ENV_REPO/$tgt/$MD_FILE_NAME ];then
#echo 1 $tgt
:
else
#echo 0 $tgt
touch $HOME/$ENV_REPO/$tgt/$MD_FILE_NAME
fi
done < <(ls -l $HOME/$ENV_REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log)
<file_sep>dbeaverからの接続は安定している
ドライバは12cだったけど、19cでも余裕だった
初期化セットアップ
**root**ユーザーで実行
```
root docker-container-centos-7-6-18-10-oracle-dbeaver /home/oracle$./init_db.sh delete_cdb
root docker-container-centos-7-6-18-10-oracle-dbeaver /home/oracle$./init_db.sh create_cdb
```
**oracle**ユーザーで実行
pdb作成後、気持ち1分程待ってからリスナー登録するといいかんじ
```
oracle docker-container-centos-7-6-18-10-oracle-dbeaver ~$./init_db.sh create_pdb 2
oracle docker-container-centos-7-6-18-10-oracle-dbeaver ~$./init_db.sh create_listener 2
oracle docker-container-centos-7-6-18-10-oracle-dbeaver ~$./init_db.sh create_user 1 2
```
-管理者 as sysdba
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|1521|
|Database|ORCLCDB|
|ユーザー名|sys|
|パスワード|ORACLE_PWD|
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|1521|
|Database|ORCLPDB01|
|ユーザー名|sys|
|パスワード|ORACLE_PWD|
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|1521|
|Database|ORCLPDB02|
|ユーザー名|sys|
|パスワード|ORACLE_PWD|
-管理者以外
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|1521|
|Database|ORCLPDB01|
|ユーザー名|user01|
|パスワード|ORACLE_PWD|
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|1521|
|Database|ORCLPDB01|
|ユーザー名|user02|
|パスワード|ORACLE_PWD|
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|1521|
|Database|ORCLPDB02|
|ユーザー名|user01|
|パスワード|<PASSWORD>|
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|1521|
|Database|ORCLPDB02|
|ユーザー名|user02|
|パスワード|<PASSWORD>|
ドライバのプロパティタブを選択してダウンロード
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
WINE_VERSION=5-0
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|postgres|1001|postgres|postgres_pwd|
<file_sep>- LANGの問題だった
workbenchはmysqlユーザーで実行
とうふ
```
$export LANG=ja_JP.utf8
$mysql-workbench
```
とうふじゃない
```
$export LANG=C
$mysql-workbench
```
```
$mysql-workbench 1>launch-mysql-workbench.log 2>&1 &
```
<file_sep>GIT_VERSION=2-24-1
GROONGA_VERSION=9-1-2
PYTHON_VERSION=3-7-4
<file_sep>LANGの問題
これでいける
```
$export LANG=C
$myb
```
これはだめ
```
$export LANG=ja_JP.utf8
$myb
```
日本語データの登録も参照もできた。
mysql-workbenchで登録件数などをコンソールに出す操作がまだわからない。だしたい。
<file_sep>[ポスグレ公式](https://www.postgresql.jp/document/11/html/plperl-funcs.html)
[レポから配布されているソフトでやるのがあんぱい](https://qiita.com/tom-sato/items/e1903cb974fb6c6d5664)
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|991|elasticsearch|994|elasticsearch|elasticsearch_pwd|
|990|kibana|993|kibana|kibana_pwd|
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env docker-preprocess-stop-word-list
EOS
exit 0
}
ENV_REPO=$1;shift
STOP_WORD_LIST=$1;shift
[ -z $ENV_REPO ] && usage
[ -z $STOP_WORD_LIST ] && usage
while read tgt;do
if [ -f $tgt/Dockerfile.sub.done ];then
#Dockerfile.sub.doneが存在する場合はクレンジングをしない
:
else
#Dockerfile.sub.doneが存在しない場合はクレンジングをする
while read stop_word;do
echo "sed -i /$stop_word/d $tgt/Dockerfile.sub" | bash
done < $STOP_WORD_LIST
fi
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt')
<file_sep>#ENV ACCEPT_EULA=Y
#ENV SA_PASSWORD=<PASSWORD>
<file_sep>#!/bin/bash
TGT_BUILD_IMAGE_EXPECT_LIST="$(ls -l $HOME/script-env | grep -P '^d' | awk '{print $9}' | grep -v docker-log)"
TGT_BUILD_IMAGE_EXPECT_CNT=$(echo "$TGT_BUILD_IMAGE_EXPECT_LIST" | wc -l )
TGT_BUILD_IMAGE_ACTUAL_LIST="$(docker images | awk '{print $1}' | grep -P '(?:-[0-9]){1,}' | xargs -I@ bash -c "echo @ && docker history --human=false @ | sort -rk2 | sed -r 's;\s{1,}; ;g;' | cut -d' ' -f2 | sed -n '2p'" | xargs -n2)"
TGT_BUILD_IMAGE_ACTUAL_SUMMARY_LIST="$( echo "$TGT_BUILD_IMAGE_ACTUAL_LIST" | cut -d' ' -f1)"
TGT_BUILD_IMAGE_ACTUAL_DETAILE_LIST="$TGT_BUILD_IMAGE_ACTUAL_LIST"
TGT_BUILD_IMAGE_ACTUAL_CNT=$(echo "$TGT_BUILD_IMAGE_ACTUAL_DETAILE_LIST" | wc -l)
TGT_BUILD_IMAGE_ACTUAL_TODAY_DONE_LIST="$(echo "$TGT_BUILD_IMAGE_ACTUAL_DETAILE_LIST" | grep "$(date "+%Y-%m-%d")")"
TGT_BUILD_IMAGE_ACTUAL_SUMMARY_TODAY_DONE_LIST="$(echo "$TGT_BUILD_IMAGE_ACTUAL_TODAY_DONE_LIST" | cut -d' ' -f1)"
TGT_BUILD_IMAGE_ACTUAL_DETAILE_TODAY_DONE_LIST="$TGT_BUILD_IMAGE_ACTUAL_TODAY_DONE_LIST"
TGT_BUILD_IMAGE_ACTUAL_TODAY_DONE_CNT=$(echo "$TGT_BUILD_IMAGE_ACTUAL_DETAILE_TODAY_DONE_LIST" | wc -l)
TGT_BUILD_IMAGE_ACTUAL_NON_TODAY_DONE_LIST="$(echo "$TGT_BUILD_IMAGE_ACTUAL_DETAILE_LIST" | grep -v "$(date "+%Y-%m-%d")")"
TGT_BUILD_IMAGE_ACTUAL_SUMMARY_NON_TODAY_DONE_LIST="$(echo "$TGT_BUILD_IMAGE_ACTUAL_NON_TODAY_DONE_LIST" | cut -d' ' -f1)"
TGT_BUILD_IMAGE_ACTUAL_DETAILE_NON_TODAY_DONE_LIST="$TGT_BUILD_IMAGE_ACTUAL_NON_TODAY_DONE_LIST"
TGT_BUILD_IMAGE_ACTUAL_NON_TODAY_DONE_CNT=$(echo "$TGT_BUILD_IMAGE_ACTUAL_DETAILE_NON_TODAY_DONE_LIST" | wc -l)
TGT_BUILD_IMAGE_NON_DONE_LIST="$(echo "$TGT_BUILD_IMAGE_EXPECT_LIST" | grep -vP "$(echo "$TGT_BUILD_IMAGE_ACTUAL_LIST" | xargs | tr ' ' '|')")"
TGT_BUILD_IMAGE_NON_DONE_CNT=$(echo "$TGT_BUILD_IMAGE_NON_DONE_LIST" |wc -l)
echo "本日のビルド対象予定数は$TGT_BUILD_IMAGE_EXPECT_CNT件でした"
echo "$TGT_BUILD_IMAGE_EXPECT_LIST"
echo "本日のビルド対象実績数は$TGT_BUILD_IMAGE_ACTUAL_CNT件でした"
echo "$TGT_BUILD_IMAGE_ACTUAL_LIST"
echo "ビルド対象実績数の内、日付が本日以内でないものは$TGT_BUILD_IMAGE_ACTUAL_NON_TODAY_DONE_CNT"件でした
echo "$TGT_BUILD_IMAGE_ACTUAL_NON_TODAY_DONE_LIST"
echo "ビルド対象実績数の内、日付が本日以内であるものは$TGT_BUILD_IMAGE_ACTUAL_TODAY_DONE_CNT"件でした
echo "$TGT_BUILD_IMAGE_ACTUAL_TODAY_DONE_LIST"
echo "未作成イメージは$TGT_BUILD_IMAGE_NON_DONE_CNT件でした"
echo "$TGT_BUILD_IMAGE_NON_DONE_LIST"
<file_sep># MysqlWorkBenchでリバエン
- https://ti-tomo-knowledge.hatenablog.com/entry/2018/05/21/082211
# dockerイメージ作成
```
time docker build -t ubuntu-19-10-mysql-workbench . | tee log
```
# dockerコンテナ削除
```
docker ps -qa | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
# dockerイメージ削除
```
docker images | awk '$1=="<none>"{print $3}' | xargs -I@ docker rmi @
```
# dockerコンテナ起動
```
docker run --privileged --shm-size=8gb --name ubuntu-19-10-mysql-workbench -itd -v /etc/localtime:/etc/localtime -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id -p 3306:3306 ubuntu-19-10-mysql-workbench
```
# dockerコンテナ潜入
```
docker exec -it ubuntu-19-10-mysql-workbench /bin/bash
```
# mysqlインストール
- dbus関連のエラーを回避するため、コンテナ起動後実施
```
cd /usr/local/src && curl -o mysql.deb https://repo.mysql.com/mysql-apt-config_0.8.14-1_all.deb && \
export DEBIAN_FRONTEND=noninteractive && dpkg -i mysql.deb && \
apt install -y mysql-server
```
# バージョン確認
```
$mysql --version
mysql Ver 8.0.18-0ubuntu0.19.10.1 for Linux on x86_64 ((Ubuntu))
```
# 環境整備
rootユーザーでデータファイル等を洗い替え
- https://stackoverflow.com/questions/34954455/mysql-daemon-lock-issue
```
rm -rf /var/lib/mysql && \
>/var/log/mysql/error.log && \
mkdir -p /var/run/mysqld && \
chown mysql:mysql /var/run/mysqld && \
mysqld --initialize --user=mysql
```
# プロセス起動
バッググラウンドモードで起動
```
mysqld -D --user=mysql
```
プロセス確認
```
$ps aux
```
ログ確認
```
$tail -f /var/log/mysql/error.log
```
# mysqlプロセスでのrootユーザーのパスワードを変更
デフォルトの値をログから確認
```
$grep password /var/log/mysql/error.log | cut -d" " -f 13
?=%1ihb/B?je
```
次のコマンドを実行して、確認した値を入力する(ログからこぴぺ)
```
$mysql -uroot -p
```
rootユーザーのパスワード変更
```
ALTER USER 'root'@'localhost' IDENTIFIED BY 'Mysql3306';
```
一般ユーザーの作成
```
CREATE USER 'user01'@'localhost' IDENTIFIED BY 'Mysql3306';
```
一般ユーザーに権限付与
```
GRANT ALL PRIVILEGES ON mysql.* TO 'user01'@'localhost' WITH GRANT OPTION;
```
データベース作成
```
create database testdb;
```
データベースに対する権限付与
```
grant all privileges on testdb.* to user01@localhost;
```
# データベース接続
rootユーザー
```
mysql -uroot -pMysql3306 -Dtestdb
```
一般ユーザー
```
mysql -uuser01 -pMysql3306 -Dtestdb
```
# 動作確認
```
select version();
```
```
with sub as(
select '[{"x":"8"},{"x":"3"},{"x":"4"}]' as rsv_args
)
select s1.rsv_args,s2.ele
from
sub s1,json_table(
rsv_args,
"$[*]" columns(
ele longtext path "$.x"
)
) as s2;
```
8.x系からlateral使える
```
$cat ./a.sh
#!/bin/bash
f(){
start_rn=$1;shift;
end_rn=$1;shift;
mysql -uroot -pMysql3306 -t -n < <(cat <<EOS
set @nat=${start_rn}-1;
with nats as(
select @nat := @nat + 1 as n from information_schema.columns limit ${end_rn}
) select s1.n,s2.n from nats s1,lateral(select s2.n from nats s2 where s1.n<=s2.n) s2;
EOS
) 2>/dev/null
}
f "$@"
```
実行
```
$./a.sh 1 5
+------+------+
| n | n |
+------+------+
| 1 | 1 |
| 1 | 2 |
| 1 | 3 |
| 1 | 4 |
| 1 | 5 |
| 2 | 2 |
| 2 | 3 |
| 2 | 4 |
| 2 | 5 |
| 3 | 3 |
| 3 | 4 |
| 3 | 5 |
| 4 | 4 |
| 4 | 5 |
| 5 | 5 |
+------+------+
```
# ユーザー定義関数の作成
- https://qiita.com/onunu/items/71064bbf48dc40c6ce11
- https://qiita.com/okumurakengo/items/1208db98f98cb360c7ba
```
$vi factorial.cpp
$gcc --version
gcc (Ubuntu 9.2.1-9ubuntu2) 9.2.1 20191008
Copyright (C) 2019 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
```
# 環境変数の設定
```
$export CPATH=/usr/include/mysql:/usr/include/c++/9
```
# コンパイル
```
$gcc factorial.cpp -shared -o /usr/lib/mysql/plugin/factorial.so
```
- ダイナミックリンクファイル作成されているか確認
```
$ll /usr/lib/mysql/plugin/factorial.so
-rwxr-xr-x. 1 root root 23640 Jan 19 17:34 /usr/lib/mysql/plugin/factorial.so*
```
# 実行
```
$mysql -uuser01 -pMysql3306 -Dtestdb
mysql: [Warning] Using a password on the command line interface can be insecure.
Welcome to the MySQL monitor. Commands end with ; or \g.
Your MySQL connection id is 11
Server version: 8.0.18-0ubuntu0.19.10.1 (Ubuntu)
Copyright (c) 2000, 2019, Oracle and/or its affiliates. All rights reserved.
Oracle is a registered trademark of Oracle Corporation and/or its
affiliates. Other names may be trademarks of their respective
owners.
Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
mysql> create function factorial returns string soname 'factorial.so';
Query OK, 0 rows affected (0.03 sec)
mysql> select factorial(6);
+--------------+
| factorial(6) |
+--------------+
| 720 |
+--------------+
1 row in set (0.00 sec)
```
# MysqlWorkBench
```
mysql@151a03d725de:~$myb
[1] 1037
```




<file_sep># トラシュー
- 事象
- たいへん困った。ubuntu-19-10以外のubuntuで試す。
```
!SESSION 2020-05-04 14:39:00.254 -----------------------------------------------
eclipse.buildId=unknown
java.version=1.8.0_252
java.vendor=Private Build
BootLoader constants: OS=linux, ARCH=x86_64, WS=gtk, NL=ja_JP
Command-line arguments: -os linux -ws gtk -arch x86_64
!ENTRY org.eclipse.osgi 4 0 2020-05-04 14:39:01.356
!MESSAGE バンドル org.eclipse.ui.ide (147) を自動的に有効化している間にエラーが発生しました。
!STACK 0
org.osgi.framework.BundleException: バンドル org.eclipse.ui.ide の org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.start() での例外。
at org.eclipse.osgi.internal.framework.BundleContextImpl.startActivator(BundleContextImpl.java:863)
at org.eclipse.osgi.internal.framework.BundleContextImpl.start(BundleContextImpl.java:791)
at org.eclipse.osgi.internal.framework.EquinoxBundle.startWorker0(EquinoxBundle.java:1015)
at org.eclipse.osgi.internal.framework.EquinoxBundle$EquinoxModule.startWorker(EquinoxBundle.java:365)
at org.eclipse.osgi.container.Module.doStart(Module.java:603)
at org.eclipse.osgi.container.Module.start(Module.java:467)
at org.eclipse.osgi.framework.util.SecureAction.start(SecureAction.java:493)
at org.eclipse.osgi.internal.hooks.EclipseLazyStarter.postFindLocalClass(EclipseLazyStarter.java:117)
at org.eclipse.osgi.internal.loader.classpath.ClasspathManager.findLocalClass(ClasspathManager.java:571)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.findLocalClass(ModuleClassLoader.java:346)
at org.eclipse.osgi.internal.loader.BundleLoader.findLocalClass(BundleLoader.java:398)
at org.eclipse.osgi.internal.loader.sources.SingleSourcePackage.loadClass(SingleSourcePackage.java:41)
at org.eclipse.osgi.internal.loader.BundleLoader.findClass(BundleLoader.java:473)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.loadClass(ModuleClassLoader.java:171)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
at org.jkiss.dbeaver.ui.app.standalone.DBeaverApplication.setIDEWorkspace(DBeaverApplication.java:276)
at org.jkiss.dbeaver.ui.app.standalone.DBeaverApplication.start(DBeaverApplication.java:161)
at org.eclipse.equinox.internal.app.EclipseAppHandle.run(EclipseAppHandle.java:203)
at org.eclipse.core.runtime.internal.adaptor.EclipseAppLauncher.runApplication(EclipseAppLauncher.java:137)
at org.eclipse.core.runtime.internal.adaptor.EclipseAppLauncher.start(EclipseAppLauncher.java:107)
at org.eclipse.core.runtime.adaptor.EclipseStarter.run(EclipseStarter.java:401)
at org.eclipse.core.runtime.adaptor.EclipseStarter.run(EclipseStarter.java:255)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.eclipse.equinox.launcher.Main.invokeFramework(Main.java:657)
at org.eclipse.equinox.launcher.Main.basicRun(Main.java:594)
at org.eclipse.equinox.launcher.Main.run(Main.java:1447)
at org.eclipse.equinox.launcher.Main.main(Main.java:1420)
Caused by: java.lang.UnsatisfiedLinkError: Could not load SWT library. Reasons:
no swt-pi4-gtk-4932r18 in java.library.path
no swt-pi4-gtk in java.library.path
Can't load library: /home/mysql/.swt/lib/linux/x86_64/libswt-pi4-gtk-4932r18.so
Can't load library: /home/mysql/.swt/lib/linux/x86_64/libswt-pi4-gtk.so
at org.eclipse.swt.internal.Library.loadLibrary(Library.java:342)
at org.eclipse.swt.internal.Library.loadLibrary(Library.java:256)
at org.eclipse.swt.internal.gtk.OS.<clinit>(OS.java:90)
at org.eclipse.swt.internal.Converter.wcsToMbcs(Converter.java:209)
at org.eclipse.swt.internal.Converter.wcsToMbcs(Converter.java:155)
at org.eclipse.swt.widgets.Display.<clinit>(Display.java:164)
at org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.createProblemsViews(IDEWorkbenchPlugin.java:391)
at org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.start(IDEWorkbenchPlugin.java:348)
at org.eclipse.osgi.internal.framework.BundleContextImpl$3.run(BundleContextImpl.java:842)
at org.eclipse.osgi.internal.framework.BundleContextImpl$3.run(BundleContextImpl.java:1)
at java.security.AccessController.doPrivileged(Native Method)
at org.eclipse.osgi.internal.framework.BundleContextImpl.startActivator(BundleContextImpl.java:834)
... 29 more
Root exception:
java.lang.UnsatisfiedLinkError: Could not load SWT library. Reasons:
no swt-pi4-gtk-4932r18 in java.library.path
no swt-pi4-gtk in java.library.path
Can't load library: /home/mysql/.swt/lib/linux/x86_64/libswt-pi4-gtk-4932r18.so
Can't load library: /home/mysql/.swt/lib/linux/x86_64/libswt-pi4-gtk.so
at org.eclipse.swt.internal.Library.loadLibrary(Library.java:342)
at org.eclipse.swt.internal.Library.loadLibrary(Library.java:256)
at org.eclipse.swt.internal.gtk.OS.<clinit>(OS.java:90)
at org.eclipse.swt.internal.Converter.wcsToMbcs(Converter.java:209)
at org.eclipse.swt.internal.Converter.wcsToMbcs(Converter.java:155)
at org.eclipse.swt.widgets.Display.<clinit>(Display.java:164)
at org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.createProblemsViews(IDEWorkbenchPlugin.java:391)
at org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.start(IDEWorkbenchPlugin.java:348)
at org.eclipse.osgi.internal.framework.BundleContextImpl$3.run(BundleContextImpl.java:842)
at org.eclipse.osgi.internal.framework.BundleContextImpl$3.run(BundleContextImpl.java:1)
at java.security.AccessController.doPrivileged(Native Method)
at org.eclipse.osgi.internal.framework.BundleContextImpl.startActivator(BundleContextImpl.java:834)
at org.eclipse.osgi.internal.framework.BundleContextImpl.start(BundleContextImpl.java:791)
at org.eclipse.osgi.internal.framework.EquinoxBundle.startWorker0(EquinoxBundle.java:1015)
at org.eclipse.osgi.internal.framework.EquinoxBundle$EquinoxModule.startWorker(EquinoxBundle.java:365)
at org.eclipse.osgi.container.Module.doStart(Module.java:603)
at org.eclipse.osgi.container.Module.start(Module.java:467)
at org.eclipse.osgi.framework.util.SecureAction.start(SecureAction.java:493)
at org.eclipse.osgi.internal.hooks.EclipseLazyStarter.postFindLocalClass(EclipseLazyStarter.java:117)
at org.eclipse.osgi.internal.loader.classpath.ClasspathManager.findLocalClass(ClasspathManager.java:571)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.findLocalClass(ModuleClassLoader.java:346)
at org.eclipse.osgi.internal.loader.BundleLoader.findLocalClass(BundleLoader.java:398)
at org.eclipse.osgi.internal.loader.sources.SingleSourcePackage.loadClass(SingleSourcePackage.java:41)
at org.eclipse.osgi.internal.loader.BundleLoader.findClass(BundleLoader.java:473)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.loadClass(ModuleClassLoader.java:171)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
at org.jkiss.dbeaver.ui.app.standalone.DBeaverApplication.setIDEWorkspace(DBeaverApplication.java:276)
at org.jkiss.dbeaver.ui.app.standalone.DBeaverApplication.start(DBeaverApplication.java:161)
at org.eclipse.equinox.internal.app.EclipseAppHandle.run(EclipseAppHandle.java:203)
at org.eclipse.core.runtime.internal.adaptor.EclipseAppLauncher.runApplication(EclipseAppLauncher.java:137)
at org.eclipse.core.runtime.internal.adaptor.EclipseAppLauncher.start(EclipseAppLauncher.java:107)
at org.eclipse.core.runtime.adaptor.EclipseStarter.run(EclipseStarter.java:401)
at org.eclipse.core.runtime.adaptor.EclipseStarter.run(EclipseStarter.java:255)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.eclipse.equinox.launcher.Main.invokeFramework(Main.java:657)
at org.eclipse.equinox.launcher.Main.basicRun(Main.java:594)
at org.eclipse.equinox.launcher.Main.run(Main.java:1447)
at org.eclipse.equinox.launcher.Main.main(Main.java:1420)
!ENTRY org.eclipse.osgi 4 0 2020-05-04 14:39:01.358
!MESSAGE アプリケーション・エラー
!STACK 1
java.lang.NoClassDefFoundError: org/eclipse/ui/internal/ide/ChooseWorkspaceData
at org.jkiss.dbeaver.ui.app.standalone.DBeaverApplication.setIDEWorkspace(DBeaverApplication.java:276)
at org.jkiss.dbeaver.ui.app.standalone.DBeaverApplication.start(DBeaverApplication.java:161)
at org.eclipse.equinox.internal.app.EclipseAppHandle.run(EclipseAppHandle.java:203)
at org.eclipse.core.runtime.internal.adaptor.EclipseAppLauncher.runApplication(EclipseAppLauncher.java:137)
at org.eclipse.core.runtime.internal.adaptor.EclipseAppLauncher.start(EclipseAppLauncher.java:107)
at org.eclipse.core.runtime.adaptor.EclipseStarter.run(EclipseStarter.java:401)
at org.eclipse.core.runtime.adaptor.EclipseStarter.run(EclipseStarter.java:255)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.eclipse.equinox.launcher.Main.invokeFramework(Main.java:657)
at org.eclipse.equinox.launcher.Main.basicRun(Main.java:594)
at org.eclipse.equinox.launcher.Main.run(Main.java:1447)
at org.eclipse.equinox.launcher.Main.main(Main.java:1420)
Caused by: java.lang.ClassNotFoundException: バンドル org.eclipse.ui.ide (147) を自動的に有効化している間にエラーが発生しました。
at org.eclipse.osgi.internal.hooks.EclipseLazyStarter.postFindLocalClass(EclipseLazyStarter.java:126)
at org.eclipse.osgi.internal.loader.classpath.ClasspathManager.findLocalClass(ClasspathManager.java:571)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.findLocalClass(ModuleClassLoader.java:346)
at org.eclipse.osgi.internal.loader.BundleLoader.findLocalClass(BundleLoader.java:398)
at org.eclipse.osgi.internal.loader.sources.SingleSourcePackage.loadClass(SingleSourcePackage.java:41)
at org.eclipse.osgi.internal.loader.BundleLoader.findClass(BundleLoader.java:473)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.loadClass(ModuleClassLoader.java:171)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
... 15 more
Caused by: org.osgi.framework.BundleException: バンドル org.eclipse.ui.ide の org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.start() での例外。
at org.eclipse.osgi.internal.framework.BundleContextImpl.startActivator(BundleContextImpl.java:863)
at org.eclipse.osgi.internal.framework.BundleContextImpl.start(BundleContextImpl.java:791)
at org.eclipse.osgi.internal.framework.EquinoxBundle.startWorker0(EquinoxBundle.java:1015)
at org.eclipse.osgi.internal.framework.EquinoxBundle$EquinoxModule.startWorker(EquinoxBundle.java:365)
at org.eclipse.osgi.container.Module.doStart(Module.java:603)
at org.eclipse.osgi.container.Module.start(Module.java:467)
at org.eclipse.osgi.framework.util.SecureAction.start(SecureAction.java:493)
at org.eclipse.osgi.internal.hooks.EclipseLazyStarter.postFindLocalClass(EclipseLazyStarter.java:117)
... 22 more
Caused by: java.lang.UnsatisfiedLinkError: Could not load SWT library. Reasons:
no swt-pi4-gtk-4932r18 in java.library.path
no swt-pi4-gtk in java.library.path
Can't load library: /home/mysql/.swt/lib/linux/x86_64/libswt-pi4-gtk-4932r18.so
Can't load library: /home/mysql/.swt/lib/linux/x86_64/libswt-pi4-gtk.so
at org.eclipse.swt.internal.Library.loadLibrary(Library.java:342)
at org.eclipse.swt.internal.Library.loadLibrary(Library.java:256)
at org.eclipse.swt.internal.gtk.OS.<clinit>(OS.java:90)
at org.eclipse.swt.internal.Converter.wcsToMbcs(Converter.java:209)
at org.eclipse.swt.internal.Converter.wcsToMbcs(Converter.java:155)
at org.eclipse.swt.widgets.Display.<clinit>(Display.java:164)
at org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.createProblemsViews(IDEWorkbenchPlugin.java:391)
at org.eclipse.ui.internal.ide.IDEWorkbenchPlugin.start(IDEWorkbenchPlugin.java:348)
at org.eclipse.osgi.internal.framework.BundleContextImpl$3.run(BundleContextImpl.java:842)
at org.eclipse.osgi.internal.framework.BundleContextImpl$3.run(BundleContextImpl.java:1)
at java.security.AccessController.doPrivileged(Native Method)
at org.eclipse.osgi.internal.framework.BundleContextImpl.startActivator(BundleContextImpl.java:834)
... 29 more
```
- 原因
swtライブラリのjniがない
```
apt install -y libswt-webkit-gtk-4-jni
```
- 対応
インストール
```
apt install -y libswt-webkit-gtk-4-jni
```
- 予防
script-repoに登録
<file_sep>GIT_VERSION=2-24-1
NODEJS_VERSION=12-X-X
PYTHON_VERSION=3-7-4
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
第一引数にキーワード
第二引数にマージ元ファイル
$0 env centos-7-6-18-10-config-env.sh
EOS
}
KEYWORD=$1
MERGE_SRC_FILE=$2
TGT_DIR="/usr/local/src/script-repo"
[ -z $KEYWORD ] && usage && exit 1
[ -z $MERGE_SRC_FILE ] && usage && exit 1
MERGE_CONTENT="$(while read MERGE_TGT_FILE;do
comm -13 --nocheck-order <(sort $MERGE_SRC_FILE | sed -r '/^$/d') <(sort $MERGE_TGT_FILE | sed -r '/^$/d')
done < <(ls $TGT_DIR | grep $KEYWORD) | sort | uniq)"
echo "$MERGE_CONTENT" >>$MERGE_SRC_FILE
echo 'source $HOME/.bashrc' >>$MERGE_SRC_FILE
#最終行以外のsourceコマンドを排除
while true;do
CMD="$(grep -n -P 'source \$HOME/\.bashrc' $MERGE_SRC_FILE | sed '$d' | perl -pe 's/:.*//' | xargs -I@ echo "sed -i '@d' $MERGE_SRC_FILE")"
[ -z "$CMD" ] && break
[ -z "$CMD" ] || echo "$CMD" | sh
done
#マージ反映
while read MERGE_TGT_FILE;do
[ $MERGE_TGT_FILE == $MERGE_SRC_FILE ] && :
[ $MERGE_TGT_FILE == $MERGE_SRC_FILE ] || echo "cp "$MERGE_SRC_FILE $MERGE_TGT_FILE | sh
done < <(ls $TGT_DIR | grep $KEYWORD)
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
execute(){
OS_VERSION=$1;shift
EDITOR=$1;shift
[ -z $OS_VERSION ] && usage
OS_NAME=$(echo $OS_VERSION | perl -pe 's/^([a-z]+)-(.*)$/\1/g')
IMAGE_VERSION=$(echo $OS_VERSION | perl -pe 's/^([a-z]+)-(.*)$/\2/g')
if [ "centos" == $OS_NAME ];then
IMAGE_VERSION=$(echo $IMAGE_VERSION | perl -pe 's/-/\./;s/-/\./;s/-//;')
fi
if [ "ubuntu" == $OS_NAME ];then
IMAGE_VERSION=$(echo $IMAGE_VERSION | perl -pe 's/-/\./;')
fi
TEMPLATE_FILE=$(find $HOME/$REPO -name "docker-template-Dockerfile-$OS_NAME")
EDITOR_LIST="$(ls $HOME/script-env/env-editor-* | grep $OS_NAME | awk -v FS='-' -v OFS='-' '{$1="";$2="";$3="";$4="";print $0}' | sed -r 's/^-{1,}//g' | sort | uniq)"
while read tgt;do
#テンプレートファイルのIMAGE_VERSIONの置換
if [ -f $tgt/env-image.md ];then
RT="$(grep FROM $tgt/env-image.md)"
if [ -z "$RT" ];then
#環境個別のイメージファイルがない場合
echo "sed 's;BASE_IMAGE;FROM $OS_NAME:$IMAGE_VERSION;' $TEMPLATE_FILE >$tgt/Dockerfile.auto" | bash
else
#環境個別のイメージファイルがある場合
echo "sed 's;BASE_IMAGE;$RT;' $TEMPLATE_FILE >$tgt/Dockerfile.auto" | bash
fi
else
#環境個別のイメージファイルがない場合
:
fi
echo "sed -i '/^USER/,/^EXPOSE/d' $tgt/Dockerfile.auto" | bash
#テンプレートファイルのDOCKERFILE_ARGの置換
cat $tgt/env-build-arg.md | sed 's/=.*//;s/^/ARG /;' >/tmp/env-build-arg-$(echo $tgt | perl -pe 's;/;-;g')
echo "sed -i '/DOCKERFILE_ARG/r /tmp/env-build-arg-$(echo $tgt | perl -pe 's;/;-;g')' $tgt/Dockerfile.auto" | bash
echo "sed -i '/DOCKERFILE_ARG/d' $tgt/Dockerfile.auto" | bash
#テンプレートファイルのDOCKERFILE_ENVの置換
echo "sed -i '/DOCKERFILE_ENV/r $tgt/env-env.md' $tgt/Dockerfile.auto" | bash
echo "sed -i '/DOCKERFILE_ENV/d' $tgt/Dockerfile.auto" | bash
#テンプレートファイルのDOCKERFILE_EDITORの置換
for n in "$EDITOR_LIST";do
#エディタが決まるまでは、ちとめんどい。
EDITOR="$(echo "echo" $tgt "| grep -Po '("$(echo $n | tr ' ' '|')")$'" | bash)"
if [ -z "$EDITOR" ];then
#vimの場合またはenv-editor未定義かつ明示的にvimと環境ディレクトリに明記していない場合
echo "sed -i '/DOCKERFILE_EDITOR/d' $tgt/Dockerfile.auto" | bash
else
#env-editor定義の環境ディレクトリの場合
echo "sed -i '/DOCKERFILE_EDITOR/r $(find $HOME/$REPO -maxdepth 1 -type f -name "env-editor*" | grep $OS_NAME | grep -P "$EDITOR$")' $tgt/Dockerfile.auto" | bash
echo "sed -i '/DOCKERFILE_EDITOR/d' $tgt/Dockerfile.auto" | bash
fi
done
#テンプレートファイルのMAIN_USERの置換
{
echo $tgt
grep -c -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md
grep -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md | awk -v FS='|' -v ORS='' '{print ","$3}'
} | xargs -n3 | \
while read file cnt usr;do
if [ 0 -eq $cnt ];then
echo "sed -n '/^USER/,/^EXPOSE/p' $TEMPLATE_FILE | head -n-1 | perl -pe "s/MAIN_USER/kuraine/g" >>$file/Dockerfile.auto" | bash
fi
for (( i=0;i<$cnt;i++));do
echo "sed -n '/^USER/,/^EXPOSE/p' $TEMPLATE_FILE | head -n-1 | perl -pe "s/MAIN_USER/$(echo $usr | cut -d',' -f$(($i+2)))/g" >>$file/Dockerfile.auto" | bash
done
done
#テンプレートファイルのEXPOSEの置換
{
echo $tgt
grep -c -P '\-p' $tgt/env-expose.md
grep -P '\-p' $tgt/env-expose.md | awk -v ORS='' '{print ","$1$2}'
} | xargs -n3 | \
while read file cnt port;do
for (( i=0;i<$cnt;i++));do
echo "sed -n '/EXPOSE/p' $TEMPLATE_FILE | perl -pe "s/PORT/$(echo $port | cut -d',' -f$(($i+2))|sed -r 's/.*://')/g" >>$file/Dockerfile.auto" | bash
done
done
#最後に見つかったWORKDIR以外を削除
grep -n -P 'WORKDIR' $tgt/Dockerfile.auto | cut -d' ' -f1 | xargs | sed '/^$/d' | awk -v FS=' ' '{$NF="";print $0}' | xargs -I@ echo @ | perl -pe "s;:.*;;;s;^;sed -i ;;s;$;d $tgt/Dockerfile.auto;" | bash
done < <(find $HOME/$REPO -type d | grep -v docker-log | grep $OS_VERSION | grep -vP mnt)
rm -rf /tmp/env-build-arg*
}
main(){
REPO="$1";shift
[ -z $REPO ] && usage
bash P04-環境ディレクトリ内のmntディレクトリを削除する.sh $REPO | bash
export -f execute
find $HOME/$REPO -type d | grep -Po '[a-z]+(-[0-9]{1,}){1,}' | sort | uniq | while read tgt;do execute $tgt ;done
}
main "$@"
<file_sep>APACHE_SOLR_VERSION=8-5-1
GIT_VERSION=2-24-1
JAVA_VERSION=11
PYTHON_VERSION=3-7-4
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
ENV_REPO=$1;shift
[ -z $ENV_REPO ] && usage
while read tgt;do
if [ -f $tgt/Dockerfile.asis ];then
:
else
#asisが存在しない場合はリネーム
echo "mv $tgt/Dockerfile $tgt/Dockerfile.asis"
fi
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt')
<file_sep>```
https://lucene.apache.org/solr/guide/8_4/solr-tutorial.html#solr-tutorial
コア作成(データベース作成みたいなもん)
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$bin/solr start -e techproducts
Creating Solr home directory /usr/local/src/solr-8.5.1/example/techproducts/solr
Starting up Solr on port 8983 using command:
"bin/solr" start -p 8983 -s "example/techproducts/solr"
Waiting up to 180 seconds to see Solr running on port 8983 [/]
Started Solr server on port 8983 (pid=229). Happy searching!
Created new core 'techproducts'
Indexing tech product example docs from /usr/local/src/solr-8.5.1/example/exampledocs
SimplePostTool version 5.0.0
Posting files to [base] url http://localhost:8983/solr/techproducts/update using content-type application/xml...
POSTing file hd.xml to [base]
POSTing file solr.xml to [base]
POSTing file gb18030-example.xml to [base]
POSTing file ipod_other.xml to [base]
POSTing file ipod_video.xml to [base]
POSTing file mem.xml to [base]
POSTing file monitor2.xml to [base]
POSTing file vidcard.xml to [base]
POSTing file money.xml to [base]
POSTing file monitor.xml to [base]
POSTing file utf8-example.xml to [base]
POSTing file mp500.xml to [base]
POSTing file sd500.xml to [base]
POSTing file manufacturers.xml to [base]
14 files indexed.
COMMITting Solr index changes to http://localhost:8983/solr/techproducts/update...
Time spent: 0:00:00.373
Solr techproducts example launched successfully. Direct your Web browser to http://localhost:8983/solr to visit the Solr Admin UI
以下のURLにブラウザからアクセス
http://localhost:8983/solr
梱包されているサンプルデータも投入してみる
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$ll example/exampledocs/*
-rw-r--r--. 1 solr solr 959 4月 8 14:02 example/exampledocs/books.csv
-rw-r--r--. 1 solr solr 1148 4月 8 14:02 example/exampledocs/books.json
-rw-r--r--. 1 solr solr 1333 4月 8 14:02 example/exampledocs/gb18030-example.xml
-rw-r--r--. 1 solr solr 2245 4月 8 14:02 example/exampledocs/hd.xml
-rw-r--r--. 1 solr solr 2074 4月 8 14:02 example/exampledocs/ipod_other.xml
-rw-r--r--. 1 solr solr 2109 4月 8 14:02 example/exampledocs/ipod_video.xml
-rw-r--r--. 1 solr solr 2801 4月 8 14:02 example/exampledocs/manufacturers.xml
-rw-r--r--. 1 solr solr 3090 4月 8 14:02 example/exampledocs/mem.xml
-rw-r--r--. 1 solr solr 2156 4月 8 14:02 example/exampledocs/money.xml
-rw-r--r--. 1 solr solr 1420 4月 8 14:02 example/exampledocs/monitor.xml
-rw-r--r--. 1 solr solr 1402 4月 8 14:02 example/exampledocs/monitor2.xml
-rw-r--r--. 1 solr solr 178 4月 8 14:02 example/exampledocs/more_books.jsonl
-rw-r--r--. 1 solr solr 1976 4月 8 14:02 example/exampledocs/mp500.xml
-rw-r--r--. 1 solr solr 27478 4月 8 16:01 example/exampledocs/post.jar
-rw-r--r--. 1 solr solr 235 4月 8 14:02 example/exampledocs/sample.html
-rw-r--r--. 1 solr solr 1684 4月 8 14:02 example/exampledocs/sd500.xml
-rw-r--r--. 1 solr solr 21052 4月 8 14:02 example/exampledocs/solr-word.pdf
-rw-r--r--. 1 solr solr 1810 4月 8 14:02 example/exampledocs/solr.xml
-rwxr-xr-x. 1 solr solr 3742 4月 8 14:02 example/exampledocs/test_utf8.sh*
-rw-r--r--. 1 solr solr 1835 4月 8 14:02 example/exampledocs/utf8-example.xml
-rw-r--r--. 1 solr solr 2697 4月 8 14:02 example/exampledocs/vidcard.xml
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$bin/post -c techproducts example/exampledocs/*
/usr/local/src/jdk-11/bin/java -classpath /usr/local/src/solr-8.5.1/dist/solr-core-8.5.1.jar -Dauto=yes -Dc=techproducts -Ddata=files org.apache.solr.util.SimplePostTool example/exampledocs/books.csv example/exampledocs/books.json example/exampledocs/gb18030-example.xml example/exampledocs/hd.xml example/exampledocs/ipod_other.xml example/exampledocs/ipod_video.xml example/exampledocs/manufacturers.xml example/exampledocs/mem.xml example/exampledocs/money.xml example/exampledocs/monitor.xml example/exampledocs/monitor2.xml example/exampledocs/more_books.jsonl example/exampledocs/mp500.xml example/exampledocs/post.jar example/exampledocs/sample.html example/exampledocs/sd500.xml example/exampledocs/solr-word.pdf example/exampledocs/solr.xml example/exampledocs/test_utf8.sh example/exampledocs/utf8-example.xml example/exampledocs/vidcard.xml
SimplePostTool version 5.0.0
Posting files to [base] url http://localhost:8983/solr/techproducts/update...
Entering auto mode. File endings considered are xml,json,jsonl,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log
POSTing file books.csv (text/csv) to [base]
POSTing file books.json (application/json) to [base]/json/docs
POSTing file gb18030-example.xml (application/xml) to [base]
POSTing file hd.xml (application/xml) to [base]
POSTing file ipod_other.xml (application/xml) to [base]
POSTing file ipod_video.xml (application/xml) to [base]
POSTing file manufacturers.xml (application/xml) to [base]
POSTing file mem.xml (application/xml) to [base]
POSTing file money.xml (application/xml) to [base]
POSTing file monitor.xml (application/xml) to [base]
POSTing file monitor2.xml (application/xml) to [base]
POSTing file more_books.jsonl (application/json) to [base]/json/docs
POSTing file mp500.xml (application/xml) to [base]
POSTing file post.jar (application/octet-stream) to [base]/extract
POSTing file sample.html (text/html) to [base]/extract
POSTing file sd500.xml (application/xml) to [base]
POSTing file solr-word.pdf (application/pdf) to [base]/extract
POSTing file solr.xml (application/xml) to [base]
POSTing file test_utf8.sh (application/octet-stream) to [base]/extract
POSTing file utf8-example.xml (application/xml) to [base]
POSTing file vidcard.xml (application/xml) to [base]
21 files indexed.
COMMITting Solr index changes to http://localhost:8983/solr/techproducts/update...
Time spent: 0:00:01.095
これで検索できる準備ができるようになったそう
- 検索条件 未指定 先頭から3件
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=*%3A*&rows=3&start=0' | jq --stream -c
[["responseHeader","status"],0]
[["responseHeader","QTime"],0]
[["responseHeader","params","q"],"*:*"]
[["responseHeader","params","start"],"0"]
[["responseHeader","params","rows"],"3"]
[["responseHeader","params","rows"]]
[["responseHeader","params"]]
[["response","numFound"],52]
[["response","start"],0]
[["response","docs",0,"id"],"0553573403"]
[["response","docs",0,"cat",0],"book"]
[["response","docs",0,"cat",0]]
[["response","docs",0,"name"],"A Game of Thrones"]
[["response","docs",0,"price"],7.99]
[["response","docs",0,"price_c"],"7.99,USD"]
[["response","docs",0,"inStock"],true]
[["response","docs",0,"author"],"<NAME>"]
[["response","docs",0,"author_s"],"<NAME>"]
[["response","docs",0,"series_t"],"A Song of Ice and Fire"]
[["response","docs",0,"sequence_i"],1]
[["response","docs",0,"genre_s"],"fantasy"]
[["response","docs",0,"_version_"],1665866796928860200]
[["response","docs",0,"price_c____l_ns"],799]
[["response","docs",0,"price_c____l_ns"]]
[["response","docs",1,"id"],"0553579908"]
[["response","docs",1,"cat",0],"book"]
[["response","docs",1,"cat",0]]
[["response","docs",1,"name"],"A Clash of Kings"]
[["response","docs",1,"price"],7.99]
[["response","docs",1,"price_c"],"7.99,USD"]
[["response","docs",1,"inStock"],true]
[["response","docs",1,"author"],"<NAME>"]
[["response","docs",1,"author_s"],"<NAME>"]
[["response","docs",1,"series_t"],"A Song of Ice and Fire"]
[["response","docs",1,"sequence_i"],2]
[["response","docs",1,"genre_s"],"fantasy"]
[["response","docs",1,"_version_"],1665866796930957300]
[["response","docs",1,"price_c____l_ns"],799]
[["response","docs",1,"price_c____l_ns"]]
[["response","docs",2,"id"],"055357342X"]
[["response","docs",2,"cat",0],"book"]
[["response","docs",2,"cat",0]]
[["response","docs",2,"name"],"A Storm of Swords"]
[["response","docs",2,"price"],7.99]
[["response","docs",2,"price_c"],"7.99,USD"]
[["response","docs",2,"inStock"],true]
[["response","docs",2,"author"],"<NAME>"]
[["response","docs",2,"author_s"],"<NAME>"]
[["response","docs",2,"series_t"],"A Song of Ice and Fire"]
[["response","docs",2,"sequence_i"],3]
[["response","docs",2,"genre_s"],"fantasy"]
[["response","docs",2,"_version_"],1665866796932006000]
[["response","docs",2,"price_c____l_ns"],799]
[["response","docs",2,"price_c____l_ns"]]
[["response","docs",2]]
[["response","docs"]]
[["response"]]
- 検索条件 未指定 4件目から3件
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=*%3A*&rows=3&start=3' | jq --stream -c
[["responseHeader","status"],0]
[["responseHeader","QTime"],0]
[["responseHeader","params","q"],"*:*"]
[["responseHeader","params","start"],"3"]
[["responseHeader","params","rows"],"3"]
[["responseHeader","params","rows"]]
[["responseHeader","params"]]
[["response","numFound"],52]
[["response","start"],3]
[["response","docs",0,"id"],"0553293354"]
[["response","docs",0,"cat",0],"book"]
[["response","docs",0,"cat",0]]
[["response","docs",0,"name"],"Foundation"]
[["response","docs",0,"price"],7.99]
[["response","docs",0,"price_c"],"7.99,USD"]
[["response","docs",0,"inStock"],true]
[["response","docs",0,"author"],"<NAME>"]
[["response","docs",0,"author_s"],"<NAME>"]
[["response","docs",0,"series_t"],"Foundation Novels"]
[["response","docs",0,"sequence_i"],1]
[["response","docs",0,"genre_s"],"scifi"]
[["response","docs",0,"_version_"],1665866796932006000]
[["response","docs",0,"price_c____l_ns"],799]
[["response","docs",0,"price_c____l_ns"]]
[["response","docs",1,"id"],"0812521390"]
[["response","docs",1,"cat",0],"book"]
[["response","docs",1,"cat",0]]
[["response","docs",1,"name"],"The Black Company"]
[["response","docs",1,"price"],6.99]
[["response","docs",1,"price_c"],"6.99,USD"]
[["response","docs",1,"inStock"],false]
[["response","docs",1,"author"],"Glen Cook"]
[["response","docs",1,"author_s"],"Glen Cook"]
[["response","docs",1,"series_t"],"The Chronicles of The Black Company"]
[["response","docs",1,"sequence_i"],1]
[["response","docs",1,"genre_s"],"fantasy"]
[["response","docs",1,"_version_"],1665866796933054500]
[["response","docs",1,"price_c____l_ns"],699]
[["response","docs",1,"price_c____l_ns"]]
[["response","docs",2,"id"],"0812550706"]
[["response","docs",2,"cat",0],"book"]
[["response","docs",2,"cat",0]]
[["response","docs",2,"name"],"Ender's Game"]
[["response","docs",2,"price"],6.99]
[["response","docs",2,"price_c"],"6.99,USD"]
[["response","docs",2,"inStock"],true]
[["response","docs",2,"author"],"<NAME>"]
[["response","docs",2,"author_s"],"<NAME>"]
[["response","docs",2,"series_t"],"Ender"]
[["response","docs",2,"sequence_i"],1]
[["response","docs",2,"genre_s"],"scifi"]
[["response","docs",2,"_version_"],1665866796933054500]
[["response","docs",2,"price_c____l_ns"],699]
[["response","docs",2,"price_c____l_ns"]]
[["response","docs",2]]
[["response","docs"]]
[["response"]]
- 検索条件 未指定 検索最終件目から3件
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=*%3A*&rows=3&start=51' | jq --stream -c
[["responseHeader","status"],0]
[["responseHeader","QTime"],0]
[["responseHeader","params","q"],"*:*"]
[["responseHeader","params","start"],"51"]
[["responseHeader","params","rows"],"3"]
[["responseHeader","params","rows"]]
[["responseHeader","params"]]
[["response","numFound"],52]
[["response","start"],51]
[["response","docs",0,"id"],"100-435805"]
[["response","docs",0,"name"],"ATI Radeon X1900 XTX 512 MB PCIE Video Card"]
[["response","docs",0,"manu"],"ATI Technologies"]
[["response","docs",0,"manu_id_s"],"ati"]
[["response","docs",0,"cat",0],"electronics"]
[["response","docs",0,"cat",1],"graphics card"]
[["response","docs",0,"cat",1]]
[["response","docs",0,"features",0],"ATI RADEON X1900 GPU/VPU clocked at 650MHz"]
[["response","docs",0,"features",1],"512MB GDDR3 SDRAM clocked at 1.55GHz"]
[["response","docs",0,"features",2],"PCI Express x16"]
[["response","docs",0,"features",3],"dual DVI, HDTV, svideo, composite out"]
[["response","docs",0,"features",4],"OpenGL 2.0, DirectX 9.0"]
[["response","docs",0,"features",4]]
[["response","docs",0,"weight"],48]
[["response","docs",0,"price"],649.99]
[["response","docs",0,"price_c"],"649.99,USD"]
[["response","docs",0,"popularity"],7]
[["response","docs",0,"inStock"],false]
[["response","docs",0,"manufacturedate_dt"],"2006-02-13T00:00:00Z"]
[["response","docs",0,"store"],"40.7143,-74.006"]
[["response","docs",0,"_version_"],1665866797934444500]
[["response","docs",0,"price_c____l_ns"],64999]
[["response","docs",0,"price_c____l_ns"]]
[["response","docs",0]]
[["response","docs"]]
[["response"]]
- 検索条件 検索単語 foundation 取得列を問わない
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s http://localhost:8983/solr/techproducts/select?q=foundation | jq --stream -c
- 検索条件 検索単語 foundation 取得列 id
http://localhost:8983/solr/techproducts/select?fl=id&q=foundation
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?fl=id&q=foundation' | jq --stream -c
[["responseHeader","status"],0]
[["responseHeader","QTime"],0]
[["responseHeader","params","q"],"foundation"]
[["responseHeader","params","fl"],"id"]
[["responseHeader","params","fl"]]
[["responseHeader","params"]]
[["response","numFound"],4]
[["response","start"],0]
[["response","docs",0,"id"],"0553293354"]
[["response","docs",0,"id"]]
[["response","docs",1,"id"],"UTF8TEST"]
[["response","docs",1,"id"]]
[["response","docs",2,"id"],"SOLR1000"]
[["response","docs",2,"id"]]
[["response","docs",3,"id"],"/usr/local/src/solr-8.5.1/example/exampledocs/test_utf8.sh"]
[["response","docs",3,"id"]]
[["response","docs",3]]
[["response","docs"]]
[["response"]]
- 検索条件 取得列catの値がelectronicsである要素を取得
取得列:検索キーワード
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=cat%3Aelectronics' | jq --stream -c
- 検索条件 複数単語を一語として扱って検索
ある単語+ある単語
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=CAS%2Blatency' | jq --stream -c
- 検索条件 ある単語は含むがある単語は含まない
+electronics -music
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=%2Belectronics%20-music' | jq --stream -c
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=%2Belectronics' | jq --stream -c
検索の詳細なやり方については以下
https://lucene.apache.org/solr/guide/8_4/searching.html#searching
コアの削除
bin/solr delete -c techproducts
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$bin/solr delete -c techproducts
Deleting core 'techproducts' using command:
http://localhost:8983/solr/admin/cores?action=UNLOAD&core=techproducts&deleteIndex=true&deleteDataDir=true&deleteInstanceDir=true
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/techproducts/select?q=%2Belectronics'
<html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8"/>
<title>Error 404 Not Found</title>
</head>
<body><h2>HTTP ERROR 404 Not Found</h2>
<table>
<tr><th>URI:</th><td>/solr/techproducts/select</td></tr>
<tr><th>STATUS:</th><td>404</td></tr>
<tr><th>MESSAGE:</th><td>Not Found</td></tr>
<tr><th>SERVLET:</th><td>default</td></tr>
</table>
</body>
</html>
--ここから新規での手番
solrプロセスの起動
bin/solr start
コアの作成(レプリカ数を指定)
bin/solr create -c <yourCollection> -s 2 -rf 2
bin/solr create -c films -s 2 -rf 2
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$bin/solr create -c films -s 2 -rf 2
WARNING: Using _default configset with data driven schema functionality. NOT RECOMMENDED for production use.
To turn off: bin/solr config -c films -p 8983 -action set-user-property -property update.autoCreateFields -value false
Created new core 'films'
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$find / -name "films" 2>/dev/null
/usr/local/src/solr-8.5.1/example/films
/usr/local/src/solr-8.5.1/example/techproducts/solr/films
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$ll /usr/local/src/solr-8.5.1/example/films
total 896
drwxr-xr-x. 2 solr solr 4096 5月 5 18:05 ./
drwxr-xr-x. 1 solr solr 4096 5月 6 00:40 ../
-rw-r--r--. 1 solr solr 4986 4月 8 14:02 README.txt
-rw-r--r--. 1 solr solr 3829 4月 8 14:02 film_data_generator.py
-rw-r--r--. 1 solr solr 299 4月 8 14:02 films-LICENSE.txt
-rw-r--r--. 1 solr solr 124581 4月 8 14:02 films.csv
-rw-r--r--. 1 solr solr 300955 4月 8 14:02 films.json
-rw-r--r--. 1 solr solr 455444 4月 8 14:02 films.xml
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$ll /usr/local/src/solr-8.5.1/example/techproducts/solr/films
total 20
drwxr-xr-x. 4 solr solr 4096 5月 6 01:33 ./
drwxr-xr-x. 5 solr solr 4096 5月 6 01:33 ../
drwxr-xr-x. 3 solr solr 4096 5月 5 18:05 conf/
-rw-r--r--. 1 solr solr 75 5月 6 01:33 core.properties
drwxr-xr-x. 5 solr solr 4096 5月 6 01:33 data/
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$cat /usr/local/src/solr-8.5.1/example/films/films.json | jq --stream -c '' | head
[[0,"id"],"/en/45_2006"]
[[0,"directed_by",0],"<NAME>"]
[[0,"directed_by",0]]
[[0,"initial_release_date"],"2006-11-30"]
[[0,"genre",0],"Black comedy"]
[[0,"genre",1],"Thriller"]
[[0,"genre",2],"Psychological thriller"]
[[0,"genre",3],"Indie film"]
[[0,"genre",4],"Action Film"]
[[0,"genre",5],"Crime Thriller"]
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$cat /usr/local/src/solr-8.5.1/example/films/films.json | jq --stream -c '' | tail
[[1099,"genre",1],"Mystery"]
[[1099,"genre",2],"Adventure Film"]
[[1099,"genre",3],"Fantasy"]
[[1099,"genre",4],"Fantasy Adventure"]
[[1099,"genre",5],"Fiction"]
[[1099,"genre",5]]
[[1099,"directed_by",0],"<NAME>"]
[[1099,"directed_by",0]]
[[1099,"directed_by"]]
[[1099]]
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$cat /usr/local/src/solr-8.5.1/example/films/films.csv | head
name,directed_by,genre,type,id,initial_release_date
.45,<NAME>,Black comedy|Thriller|Psychological thriller|Indie film|Action Film|Crime Thriller|Crime Fiction|Drama,,/en/45_2006,2006-11-30
9,<NAME>,Computer Animation|Animation|Apocalyptic and post-apocalyptic fiction|Science Fiction|Short Film|Thriller|Fantasy,,/en/9_2005,2005-04-21
69,Lee Sang-il,Japanese Movies|Drama,,/en/69_2004,2004-07-10
300,<NAME>,Epic film|Adventure Film|Fantasy|Action Film|Historical fiction|War film|Superhero movie|Historical Epic,,/en/300_2007,2006-12-09
2046,<NAME>,Romance Film|Fantasy|Science Fiction|Drama,,/en/2046_2004,2004-05-20
¿Quién es el señor López?,<NAME>,Documentary film,,/en/quien_es_el_senor_lopez,
"""Weird Al"" Yankovic: The Ultimate Video Collection","<NAME>vey|""Weird Al"" Yankovic",Music video|Parody,,/en/weird_al_yankovic_the_ultimate_video_collection,2003-11-04
15 Park Avenue,Aparna Sen,Art film|Romance Film|Musical|Drama|Musical Drama,,/en/15_park_avenue,2005-10-27
2 Fast 2 Furious,<NAME>,Thriller|Action Film|Crime Fiction,,/en/2_fast_2_furious,2003-06-03
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$cat /usr/local/src/solr-8.5.1/example/films/films.csv | tail
I Love New Year,<NAME>|<NAME>,Caper story|Crime Fiction|Romantic comedy|Romance Film|Bollywood|World cinema,,/wikipedia/en_title/I_Love_New_Year,2013-12-30
<NAME>,<NAME>,Musical|Romance Film|World cinema|Musical Drama|Drama,,/en/har_dil_jo_pyar_karega,2000-07-24
<NAME>,<NAME>,Psychological thriller|Thriller|Suspense|Indie film|Erotic thriller|Drama,,/en/hard_candy,
Hard Luck,<NAME>,Thriller|Crime Fiction|Action/Adventure|Action Film|Drama,,/en/hard_luck,2006-10-17
Hardball,<NAME>,Sports|Drama,,/en/hardball,2001-09-14
Harold & Kumar Go to White Castle,<NAME>,Stoner film|Buddy film|Adventure Film|Comedy,,/en/harold_kumar_go_to_white_castle,2004-05-20
Harry Potter and the Chamber of Secrets,Chris Columbus,Adventure Film|Family|Fantasy|Mystery,,/en/harry_potter_and_the_chamber_of_secrets_2002,2002-11-03
<NAME>ter and the Goblet of Fire,<NAME>,Family|Fantasy|Adventure Film|Thriller|Science Fiction|Supernatural|Mystery|Children's Fantasy|Children's/Family|Fantasy Adventure|Fiction,,/en/harry_potter_and_the_goblet_of_fire_2005,2005-11-06
Harry Potter and the Half-Blood Prince,David Yates,Adventure Film|Fantasy|Mystery|Action Film|Family|Romance Film|Children's Fantasy|Children's/Family|Fantasy Adventure|Fiction,,/en/harry_potter_and_the_half_blood_prince_2008,2009-07-06
Harry Potter and the Order of the Phoenix,David Yates,Family|Mystery|Adventure Film|Fantasy|Fantasy Adventure|Fiction,,/en/harry_potter_and_the_order_of_the_phoenix_2007,2007-06-28
ここから、サーバーにデータをポストしていく
コアのカラム作成
This data consists of the following fields:
* "id" - unique identifier for the movie
* "name" - Name of the movie
* "directed_by" - The person(s) who directed the making of the film
* "initial_release_date" - The earliest official initial film screening date in any country
* "genre" - The genre(s) that the movie belongs to
curl -X POST -H 'Content-type:application/json' --data-binary '{"add-field": {"name":"name", "type":"text_general", "multiValued":false, "stored":true}}' http://localhost:8983/solr/films/schema
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1/example/techproducts/solr/films/conf$curl -X POST -H 'Content-type:application/json' --data-binary '{"add-field": {"name":"name", "type":"text_general", "multiValued":false, "stored":true}}' http://localhost:8983/solr/films/schema
{
"responseHeader":{
"status":0,
"QTime":158}}
カラム構成を変更する場合
curl http://localhost:8983/solr/films/schema -X POST -H 'Content-type:application/json' --data-binary '{
"add-field" : {
"name":"name",
"type":"text_general",
"multiValued":false,
"stored":true
},
"add-field" : {
"name":"initial_release_date",
"type":"pdate",
"stored":true
}
}'
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1/example/techproducts/solr/films/conf$cat managed-schema.json | jq --stream -c '' | grep name
データ投入
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$bin/post -c films example/films/films.json
/usr/local/src/jdk-11/bin/java -classpath /usr/local/src/solr-8.5.1/dist/solr-core-8.5.1.jar -Dauto=yes -Dc=films -Ddata=files org.apache.solr.util.SimplePostTool example/films/films.json
SimplePostTool version 5.0.0
Posting files to [base] url http://localhost:8983/solr/films/update...
Entering auto mode. File endings considered are xml,json,jsonl,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log
POSTing file films.json (application/json) to [base]/json/docs
1 files indexed.
COMMITting Solr index changes to http://localhost:8983/solr/films/update...
Time spent: 0:00:00.318
データ取得
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/films/select?q=*%3A*' | jq --stream -c ''
ファセット
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/films/select?q=*%3A*&rows=1100&start=0' | jq --stream -c ''
ピボットファセット
サブグループの集計
curl "http://localhost:8983/solr/films/select?q=*:*&rows=0&facet=on&facet.pivot=genre_str,directed_by_str"
レンジファセット
年別集計
curl 'http://localhost:8983/solr/films/select?q=*:*&rows=0&facet=true&facet.range=initial_release_date&facet.range.start=NOW-20YEAR&facet.range.end=NOW&facet.range.gap=%2B1YEAR'
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl 'http://localhost:8983/solr/films/select?q=*:*&rows=0&facet=true&facet.range=initial_release_date&facet.range.start=NOW-20YEAR&facet.range.end=NOW&facet.range.gap=%2B1YEAR'
{
"responseHeader":{
"status":0,
"QTime":17,
"params":{
"facet.range":"initial_release_date",
"q":"*:*",
"facet.range.gap":"+1YEAR",
"rows":"0",
"facet":"true",
"facet.range.start":"NOW-20YEAR",
"facet.range.end":"NOW"}},
"response":{"numFound":1100,"start":0,"docs":[]
},
"facet_counts":{
"facet_queries":{},
"facet_fields":{},
"facet_ranges":{
"initial_release_date":{
"counts":[
"2000-05-06T10:24:32.571Z",81,
"2001-05-06T10:24:32.571Z",105,
"2002-05-06T10:24:32.571Z",117,
"2003-05-06T10:24:32.571Z",137,
"2004-05-06T10:24:32.571Z",154,
"2005-05-06T10:24:32.571Z",205,
"2006-05-06T10:24:32.571Z",110,
"2007-05-06T10:24:32.571Z",32,
"2008-05-06T10:24:32.571Z",8,
"2009-05-06T10:24:32.571Z",4,
"2010-05-06T10:24:32.571Z",1,
"2011-05-06T10:24:32.571Z",0,
"2012-05-06T10:24:32.571Z",1,
"2013-05-06T10:24:32.571Z",1,
"2014-05-06T10:24:32.571Z",0,
"2015-05-06T10:24:32.571Z",1,
"2016-05-06T10:24:32.571Z",0,
"2017-05-06T10:24:32.571Z",0,
"2018-05-06T10:24:32.571Z",0,
"2019-05-06T10:24:32.571Z",0],
"gap":"+1YEAR",
"start":"2000-05-06T10:24:32.571Z",
"end":"2020-05-06T10:24:32.571Z"}},
"facet_intervals":{},
"facet_heatmaps":{}}}
年月別
curl 'http://localhost:8983/solr/films/select?q=*:*&rows=0&facet=true&facet.range=initial_release_date&facet.range.start=NOW-20YEAR&facet.range.end=NOW&facet.range.gap=%2B1MONTH'
年月日別
curl 'http://localhost:8983/solr/films/select?q=*:*&rows=0&facet=true&facet.range=initial_release_date&facet.range.start=NOW-20YEAR&facet.range.end=NOW&facet.range.gap=%2B1MONTH'
特定のドキュメントのみ削除
削除前
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/films/select?q=id:*/en/300_2007' | jq --stream -c
[["responseHeader","status"],0]
[["responseHeader","QTime"],2]
[["responseHeader","params","q"],"id:*/en/300_2007"]
[["responseHeader","params","q"]]
[["responseHeader","params"]]
[["response","numFound"],1]
[["response","start"],0]
[["response","docs",0,"id"],"/en/300_2007"]
[["response","docs",0,"directed_by",0],"<NAME>"]
[["response","docs",0,"directed_by",0]]
[["response","docs",0,"initial_release_date",0],"2006-12-09T00:00:00Z"]
[["response","docs",0,"initial_release_date",0]]
[["response","docs",0,"genre",0],"Epic film"]
[["response","docs",0,"genre",1],"Adventure Film"]
[["response","docs",0,"genre",2],"Fantasy"]
[["response","docs",0,"genre",3],"Action Film"]
[["response","docs",0,"genre",4],"Historical fiction"]
[["response","docs",0,"genre",5],"War film"]
[["response","docs",0,"genre",6],"Superhero movie"]
[["response","docs",0,"genre",7],"Historical Epic"]
[["response","docs",0,"genre",7]]
[["response","docs",0,"name"],"300"]
[["response","docs",0,"_version_"],1665931701953495000]
[["response","docs",0,"_version_"]]
[["response","docs",0]]
[["response","docs"]]
[["response"]]
削除後
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$bin/post -c films -d "<delete><id>/en/300_2007</id></delete>"
/usr/local/src/jdk-11/bin/java -classpath /usr/local/src/solr-8.5.1/dist/solr-core-8.5.1.jar -Dauto=yes -Dc=films -Ddata=args org.apache.solr.util.SimplePostTool <delete><id>/en/300_2007</id></delete>
SimplePostTool version 5.0.0
POSTing args to http://localhost:8983/solr/films/update...
COMMITting Solr index changes to http://localhost:8983/solr/films/update...
Time spent: 0:00:00.041
削除後検索
solr docker-container-ubuntu-19-10-java-apache-solr-vim /usr/local/src/solr-8.5.1$curl -s 'http://localhost:8983/solr/films/select?q=id:*/en/300_2007' | jq --stream -c
[["responseHeader","status"],0]
[["responseHeader","QTime"],2]
[["responseHeader","params","q"],"id:*/en/300_2007"]
[["responseHeader","params","q"]]
[["responseHeader","params"]]
[["response","numFound"],0]
[["response","start"],0]
[["response","docs"],[]]
[["response","docs"]]
[["response"]]
コアごと削除
bin/solr delete -c films
```
<file_sep>GIT_VERSION=2-24-1
KDEVELOP_VERSION=5-5-0
PYTHON_VERSION=3-7-4
<file_sep>過去分はここから
https://www.elastic.co/jp/downloads/past-releases
dbeaverのドライバのサポートバージョンが7.4.0なので、これに合わせる形
jdk
```
$java -version
openjdk version "11" 2018-09-25
OpenJDK Runtime Environment 18.9 (build 11+28)
OpenJDK 64-Bit Server VM 18.9 (build 11+28, mixed mode)
```
elasticsearch
```
cd /usr/local/src
tar xvf elasticsearch-oss-7.4.0-linux-x86_64.tar.gz
chown -R elasticsearch:elasticsearch elasticsearch-7.4.0/
```
パス設定
elasticsearchユーザーに設定
```
echo 'export ES_HOME=/usr/local/src/elasticsearch-7.4.0' >>$HOME/.bashrc
echo 'export PATH=$ES_HOME/bin:$PATH' >>$HOME/.bashrc
```
バージョン
```
$elasticsearch --version
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
Version: 7.4.0, Build: oss/tar/22e1767283e61a198cb4db791ea66e3f11ab9910/2019-09-27T08:36:48.569419Z, JVM: 11
```
ヘルプ
```
$elasticsearch --help
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
starts elasticsearch
Option Description
------ -----------
-E <KeyValuePair> Configure a setting
-V, --version Prints elasticsearch version information and exits
-d, --daemonize Starts Elasticsearch in the background
-h, --help show help
-p, --pidfile <Path> Creates a pid file in the specified path on start
-q, --quiet Turns off standard output/error streams logging in console
-s, --silent show minimal output
-v, --verbose show verbose output
```
kibana
kibanaユーザーに設定
```
cd /usr/local/src
tar xvf kibana-oss-7.4.0-linux-x86_64.tar.gz
chown -R kibana:kibana kibana-7.4.0-linux-x86_64/
```
パス設定
```
echo 'export KIBANA_HOME=/usr/local/src/kibana-7.4.0-linux-x86_64' >>$HOME/.bashrc
echo 'export PATH=$KIBANA_HOME/bin:$PATH' >>$HOME/.bashrc
```
バージョン
```
$kibana --version
7.4.0
```
ヘルプ
```
$kibana --help
Usage: bin/kibana [command=serve] [options]
Kibana is an open source (Apache Licensed), browser based analytics and search dashboard for Elasticsearch.
Commands:
serve [options] Run the kibana server
help <command> Get the help for a specific command
"serve" Options:
-e, --elasticsearch <uri1,uri2> Elasticsearch instances
-c, --config <path> Path to the config file, use multiple --config args to include multiple config files (default: ["/usr/local/src/kibana-7.4.0-linux-x86_64/config/kibana.yml"])
-p, --port <port> The port to bind to
-q, --quiet Prevent all logging except errors
-Q, --silent Prevent all logging
--verbose Turns on verbose logging
-H, --host <host> The host to bind to
-l, --log-file <path> The file to log to
--plugin-dir <path> A path to scan for plugins, this can be specified multiple times to specify multiple directories (default: ["/usr/local/src/kibana-7.4.0-linux-x86_64/plugins","/usr/local/src/kibana-7.4.0-linux-x86_64/src/legacy/core_plugins"])
--plugin-path <path> A path to a plugin which should be included by the server, this can be specified multiple times to specify multiple paths (default: [])
--plugins <path> an alias for --plugin-dir
--optimize Optimize and then stop the server
-h, --help output usage information
```
elasticsearchプロセス起動
```
$elasticsearch -d --verbose 1>$HOME/launch-elasticsearch.log 2>&1
```
```
$ps aux
USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND
kibana 1 0.0 0.0 14240 1248 pts/0 Ss 14:02 0:00 bash /etc/init/run.sh
kibana 8 0.0 0.0 237388 18380 ? S 14:02 0:05 fcitx
kibana 9 0.0 0.0 14380 2016 pts/0 S+ 14:02 0:00 bash
kibana 22 0.0 0.0 58908 884 ? S 14:02 0:00 dbus-launch --autolaunch bc74deaa9e044c079ed6fc963d084157 --binary-syntax --close-stderr
kibana 29 0.0 0.0 58004 1364 ? Ss 14:02 0:00 /usr/bin/dbus-daemon --fork --print-pid 5 --print-address 7 --session
kibana 34 0.0 0.0 58284 1732 ? Ss 14:02 0:01 /usr/bin/dbus-daemon --fork --print-pid 5 --print-address 7 --config-file /usr/share/fcitx/dbus/daemon.conf
kibana 38 0.0 0.0 51300 784 ? SN 14:02 0:00 /usr/bin/fcitx-dbus-watcher unix:abstract=/tmp/dbus-qqr563ropB,guid=2f0971c0a039b3e0a6cb87355ebf73ef 34
kibana 2428 0.0 0.0 14380 2076 pts/1 Ss 18:32 0:00 /bin/bash
kibana 2534 0.0 0.0 187424 2912 ? Sl 18:33 0:00 /usr/libexec/dconf-service
kibana 2547 0.0 0.0 192900 12300 pts/1 S 18:35 0:00 dbeaver
kibana 2571 3.4 1.1 5843012 367424 pts/1 Sl 18:35 0:34 /bin/java -XX:+IgnoreUnrecognizedVMOptions -Xms64m -Xmx1024m -jar /usr/share/dbeaver//plugins/org.eclipse.equinox.launcher_1.5.700.v20200207-2156.
root 2671 0.0 0.0 87268 2632 pts/1 S 18:51 0:00 su elasticsearch
elastic+ 2674 0.0 0.0 14380 2056 pts/1 S 18:51 0:00 bash
elastic+ 2771 323 3.9 6131996 1290104 pts/1 Sl 18:51 0:12 /usr/local/src/jdk-11/bin/java -Xms1g -Xmx1g -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -Des
elastic+ 2828 0.0 0.0 54312 1872 pts/1 R+ 18:51 0:00 ps axu
```
```
$lsof -i:9200
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
java 2771 elasticsearch 248u IPv4 27262732 0t0 TCP localhost:wap-wsp (LISTEN)
```
ヘルスチェック
```
$curl http://localhost:9200
{
"name" : "doc-centos-7-6-18-10-elasticsearch-kibana-java-dbeaver",
"cluster_name" : "elasticsearch",
"cluster_uuid" : "xGV_LjzOTiW_QMFu-_Xvjw",
"version" : {
"number" : "7.4.0",
"build_flavor" : "oss",
"build_type" : "tar",
"build_hash" : "22e1767283e61a198cb4db791ea66e3f11ab9910",
"build_date" : "2019-09-27T08:36:48.569419Z",
"build_snapshot" : false,
"lucene_version" : "8.2.0",
"minimum_wire_compatibility_version" : "6.8.0",
"minimum_index_compatibility_version" : "6.0.0-beta1"
},
"tagline" : "You Know, for Search"
}
```
kibanaプロセス起動
設定ファイル修正
コマンドライン引数から指定できるぽいから臨機応変に
インスタントに必要やったら、コマンドライン引数に指定したほうがいいか
修正前
```
$grep -P 'server\.(name|port)' /usr/local/src/kibana-7.4.0-linux-x86_64/config/kibana.yml
#server.port: 5601
#server.name: "your-hostname"
```
修正内容確認
```
$sed -r "s/\#(.*)/\1/g" /usr/local/src/kibana-7.4.0-linux-x86_64/config/kibana.yml | grep -P 'server\.(port)'
server.port: 5601
```
```
$sed -r "s/\#server.name: \"your-hostname\"/server.name: $(ip a show | grep -Po '(\.?[0-9]{1,}){4}(?=/)' | grep -vP '^127')/g" /usr/local/src/kibana-7.4.0-linux-x86_64/config/kibana.yml | grep -P 'server\.(name)'
server.name: 172.17.0.4
```
```
$kibana -p 5601 -H $(ip a show | grep -Po '(\.?[0-9]{1,}){4}(?=/)' | grep -vP '^127') 1>$HOME/launch-kibana.log 2>&1 &
[2] 3087
$ps aux
USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND
kibana 1 0.0 0.0 14240 1248 pts/0 Ss 14:02 0:00 bash /etc/init/run.sh
kibana 8 0.0 0.0 237388 18380 ? S 14:02 0:05 fcitx
kibana 9 0.0 0.0 14380 2016 pts/0 S+ 14:02 0:00 bash
kibana 22 0.0 0.0 58908 884 ? S 14:02 0:00 dbus-launch --autolaunch bc74deaa9e044c079ed6fc963d084157 --binary-syntax --close-stderr
kibana 29 0.0 0.0 58004 1364 ? Ss 14:02 0:00 /usr/bin/dbus-daemon --fork --print-pid 5 --print-address 7 --session
kibana 34 0.0 0.0 58284 1732 ? Ss 14:02 0:01 /usr/bin/dbus-daemon --fork --print-pid 5 --print-address 7 --config-file /usr/share/fcitx/dbus/daemon.conf
kibana 38 0.0 0.0 51300 784 ? SN 14:02 0:00 /usr/bin/fcitx-dbus-watcher unix:abstract=/tmp/dbus-qqr563ropB,guid=2f0971c0a039b3e0a6cb87355ebf73ef 34
kibana 2428 0.0 0.0 14380 2076 pts/1 Ss 18:32 0:00 /bin/bash
kibana 2534 0.0 0.0 187424 2912 ? Sl 18:33 0:00 /usr/libexec/dconf-service
kibana 2547 0.0 0.0 192900 12300 pts/1 S 18:35 0:00 dbeaver
kibana 2571 2.6 1.1 5843012 367544 pts/1 Sl 18:35 0:36 /bin/java -XX:+IgnoreUnrecognizedVMOptions -Xms64m -Xmx1024m -jar /usr/share/dbeaver//plugins/org.eclipse.equinox.launcher_1.5.700.v20200207-2156.
elastic+ 2771 4.6 4.0 7203164 1310516 pts/1 Sl 18:51 0:19 /usr/local/src/jdk-11/bin/java -Xms1g -Xmx1g -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -Des
kibana 3087 57.5 0.4 1219148 132184 pts/1 Sl 18:58 0:02 /usr/local/src/kibana-7.4.0-linux-x86_64/bin/../node/bin/node /usr/local/src/kibana-7.4.0-linux-x86_64/bin/../src/cli -p 5601 -H 172.17.0.4
kibana 3106 0.0 0.0 54308 1868 pts/1 R+ 18:58 0:00 ps uax
```
```
$lsof -i:5601
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
node 3087 kibana 18u IPv4 27390766 0t0 TCP doc-centos-7-6-18-10-elasticsearch-kibana-java-dbeaver:esmagent (LISTEN)
```
dbeaverなどでelasticsearchのデータをいじる
```
$dbeaver 1>$HOME/launch-dbeaver.log 2>&1 &
```
<file_sep>```
cd $HOME/script-env
ls D[0][1]* D[0][6]* D[0][9]* P[0-1][0-9]* R07* | xargs -n1 -I@ echo "bash @ script-env" | bash
```
```
env-->sketch-->repo
remove env
でいいとおもう
sketchにはバージョン入れたほうがいい
```
<file_sep>FROM ibmcom/db2
ARG OS_VERSION
ARG REPO
ARG GIT_VERSION
ARG PYTHON_VERSION
ENV LANG=ja_JP.UTF-8
ENV DISPLAY=:0.0
ENV BLU=false
ENV DB2INST1_PASSWORD=<PASSWORD>
ENV DB2INSTANCE=db2inst1
ENV DBNAME=testdb
ENV ENABLE_ORACLE_COMPATIBILITY=true
ENV LICENSE=accept
ENV SAMPLEDB=true
RUN yum install -y git
RUN cd /usr/local/src && git clone https://github.com/ukijumotahaneniarukenia/$REPO.git
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-docker-host-user.sh | bash
RUN cd /usr/local/src/$REPO && echo './$OS_VERSION-install-default-user.sh 1002 db2inst1 1002 db2inst1' | bash
RUN cd /usr/local/src/$REPO && echo './$OS_VERSION-install-default-user.sh 1001 db2fenc1 1001 db2fenc1' | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-repository-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-dev-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-tool-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-network-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-system.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-python-$PYTHON_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-git-$GIT_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-locale.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-ld.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-ibus_mozc.sh | bash
#RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-fcitx_anthy.sh | bash
COPY subrun.sh /root/subrun.sh
#RUN bash /root/subrun.sh
USER db2fenc1
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
USER db2inst1
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
WORKDIR /home/db2inst1
EXPOSE 50000
COPY run.sh /etc/init/run.sh
ENTRYPOINT ["/etc/init/run.sh","centos"]
<file_sep>GIT_VERSION=2-24-1
POSTGRES_VERSION=12-0
PYTHON_VERSION=3-7-4
TCLSH_VERSION=8-6-10
<file_sep># トラシュー
- 事象
- dbeaverのインストールできない
```
パッケージリストを読み込んでいます... 完了
依存関係ツリーを作成しています
状態情報を読み取っています... 完了
パッケージ openjdk-11-jre-headless は使用できませんが、別のパッケージから参照されます。
これは、パッケージが欠落しているか、廃止されたか、または別のソース
からのみ利用可能であることを意味します。
パッケージ openjdk-11-jdk-headless は使用できませんが、別のパッケージから参照されます。
これは、パッケージが欠落しているか、廃止されたか、または別のソース
からのみ利用可能であることを意味します。
E: パッケージ 'openjdk-11-jre-headless' にはインストール候補がありません
E: パッケージ 'openjdk-11-jdk-headless' にはインストール候補がありません
```
- 原因
- jdkレポから配布されていないから
- 対応
- ソースからいんすこ
- 予防
- dbeaverのインストールはjdkのいんすこおえてから
<file_sep>#!/usr/bin/env bash
d=1 #21時始まりにしてみた
template(){
n=$1;shift;
l=$1;shift;
MM=$(printf "%02d" $(($n%12)))
if [ $MM = '00' ];then
cat <<EOS
10 00 $(printf "%02d" $d) 12 * $HOME/$ENV_REPO/docker-crontab-wrapper.sh 1 $l $SCRIPT_REPO
EOS
if [[ $d -eq 32 ]];then
d=1
else
d=$(($d+1))
fi
else
cat <<EOS
10 00 $(printf "%02d" $d) $MM * $HOME/$ENV_REPO/docker-crontab-wrapper.sh 1 $l $SCRIPT_REPO
EOS
fi
}
usage(){
cat <<EOS
Usage:
$0 script-env script-repo
EOS
exit 0
}
ENV_REPO=$1;shift
SCRIPT_REPO=$1;shift
[ -z $ENV_REPO ] && usage
[ -z $SCRIPT_REPO ] && usage
while read tgt;do
echo $tgt | perl -pe 's;.*/;;' | perl -nlE 's/(?:[a-z]+(?:-[0-9]{1,}){1,})(.*)/\1/ and say' | tr '-' '\n' | sed /^$/d
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt') | sort | uniq | \
while read tgt;do
find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | sed 's;.*/;;' | grep -P "(?<=-)$tgt" | xargs -n3 | nl | \
while read n {a..c};do
#echo $tgt $n $(eval echo '$'{a..c}|xargs -n1;)
eval echo '$'{a..c}|xargs -n1 >docker-build-$tgt-list-$(printf "%02d" $n);
done
done
ls docker-build*list* | grep -vP 'list$' | nl | \
while read n l ;do
template $n $l
done | sort -k4 >docker-crontab-BY-ONE-MONTH
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|solr|1001|solr|solr_pwd|
<file_sep># 参考文献
https://www.nodebeginner.org/index-jp.html</br>
https://github.com/nodesource/distributions/blob/master/README.md</br>
nodeのパッケージ管理にはyarnとnpmの2つがある
https://qiita.com/jigengineer/items/c75ca9b8f0e9ce462e99</br>
# dockerイメージ作成
```
time docker build -t centos-7-6-18-10-node . | tee log
```
# dockerコンテナ作成
```
docker run --privileged --shm-size=8gb -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id -v /etc/localtime:/etc/localtime -p 8080:80 -p 5601:5601 -p 9200:9200 -itd --name centos-7-6-18-10-node centos-7-6-18-10-node
```
# dockerコンテナ潜入
```
docker exec --user kuraine -it centos-7-6-18-10-node /bin/bash
```
# 不要コンテナ削除
```
docker ps -a | grep -vE "node|tcl|mysql|racket|postgres|oracle|egison|java|sqlite" | awk '/Ex/{print $1}' | xargs docker rm
```
# 不要イメージ削除
```
docker images | awk '$1=="<none>"{print $3}' | xargs -I@ docker rmi @
```
<file_sep>#!/bin/bash
usage(){
cat <<EOS
Usage:
$0 build-arg script-env
EOS
exit 0
}
N="$1";shift
REPO="$1";shift
[ -z $N ] && usage
[ -z $REPO ] && usage
while read tgt;do
grep -r -n -P "$N" $HOME/$REPO/$tgt | grep -P env\.md | awk -v FS=':' -v N=$N '{FILE_NAME=$1;gsub(/md-env.md/,"env-"N".md",FILE_NAME);print "rm -rf "FILE_NAME}'
grep -r -n -P "$N" $HOME/$REPO/$tgt | grep -P env\.md | awk -v FS=':' -v OFS='\n' -v N=$N '{
s=split($3,ary," ");
FILE_NAME=$1;
}
END{
for(e in ary){
print ary[e]
gsub(/md-env.md/,"env-"N".md",FILE_NAME);print FILE_NAME
}
}' | xargs -n2 | grep -vP "\--$N" | sort -k2,1 | awk '{print "echo \x27"$1"\x27>>"$2}'
done < <(ls -l $HOME/$REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log)
#ファイル存在チェック
#ls -l $HOME/$REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log | while read tgt;do echo $HOME/$REPO/$tgt/env-$N.md;done | xargs -I@ ls @
<file_sep>APACHE_SPARK_VERSION=3-0-0
DBEAVER_VERSION=X-X-X
HADOOP_VERSION=3-2
GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
<file_sep>GDB_VERSION=9-1
GIT_VERSION=2-24-1
KDEVELOP_VERSION=5-5-0
NIM_VERSION=1-0-6
PYTHON_VERSION=3-7-4
TEXINFO_VERSION=6-7
<file_sep>FROM mcr.microsoft.com/mssql/server:2019-GA-ubuntu-16.04
ARG OS_VERSION
ARG REPO
ARG DEBIAN_FRONTEND=noninteractive
ARG DBEAVER_VERSION
ARG GIT_VERSION
ARG JAVA_VERSION
ARG MAVEN_VERSION
ARG PYTHON_VERSION
ARG SQLSERVER_VERSION
ENV LANG=ja_JP.UTF-8
ENV DISPLAY=:0.0
ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=<PASSWORD>
USER root
RUN sed -i 's@<EMAIL>/pub/Linux@g' /etc/apt/sources.list && \
apt update && \
apt upgrade -y
#ENV ACCEPT_EULA=Y
#ENV SA_PASSWORD=<PASSWORD>
RUN apt install -y git
RUN cd /usr/local/src && git clone https://github.com/ukijumotahaneniarukenia/$REPO.git
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-docker-host-user.sh | bash
RUN cd /usr/local/src/$REPO && echo './$OS_VERSION-install-default-user.sh 1001 mssql 1001 mssql' | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-repository-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-dev-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-tool-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-network-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-system.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-python-$PYTHON_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-pre-patch-git-$GIT_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-git-$GIT_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-locale.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-ld.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-fcitx_mozc.sh | bash
COPY subrun.sh /root/subrun.sh
#RUN bash /root/subrun.sh
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-post-patch-sqlserver-$SQLSERVER_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-jdk-$JAVA_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-maven-$MAVEN_VERSION.sh | bash
#ソースからdbeaverいんすこする必要があるから設定
ENV JAVA_HOME=/usr/local/src/jdk-$JAVA_VERSION
ENV PATH=$JAVA_HOME/bin:$PATH
ENV MAVEN_HOME=/usr/local/src/apache-maven-$MAVEN_VERSION
ENV PATH=$MAVEN_HOME/bin:$PATH
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-dbeaver-$DBEAVER_VERSION.sh | bash
USER mssql
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
WORKDIR /home/mssql
EXPOSE 1433
COPY run.sh /etc/init/run.sh
ENTRYPOINT ["/etc/init/run.sh","ubuntu"]
<file_sep>FROM mcr.microsoft.com/mssql/server:2019-GA-ubuntu-16.04
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 FROM script-env
EOS
exit 0
}
CHK_WORD=$1;shift
ENV_REPO=$1;shift
[ -z $CHK_WORD ] && usage
[ -z $ENV_REPO ] && usage
while read tgt;do
if [ -f $tgt/env-image.md ];then
:
else
echo touch $tgt/env-image.md | bash
fi
if [ -f $tgt/Dockerfile.sub ];then
RT="$(grep $CHK_WORD $tgt/Dockerfile.sub)"
if [ -z "$RT" ];then
:
else
echo "echo '$RT' >>$tgt/env-image.md" | bash
fi
else
:
fi
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt')
<file_sep>[正規表現](https://rubular.com/)
[Rubyコード補完](https://qiita.com/tequila0725/items/8e93dc894e0ab67d322f)
<file_sep>GIT_VERSION=2-24-1
PHP_VERSION=7-3-X
PYTHON_VERSION=3-7-4
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
or
$0 script-env --debug
EOS
exit 0
}
execute(){
OS_VERSION=$1;shift
[ -z $OS_VERSION ] && usage
OS_NAME=$(echo $OS_VERSION | perl -pe 's/^([a-z]+)-(.*)$/\1/g')
TEMPLATE_FILE=$(find $HOME/$ENV_REPO -name "docker-template-Dockerfile-$OS_NAME")
TEMPLATE_CMD="RUN cd /usr/local/src/\\\$REPO && echo './\\\$OS_VERSION-install-default-user.sh ARGS' | bash"
while read tgt;do
REPLACE_POS=$(grep -n DOCKERFILE_INSTALL_USER $tgt/Dockerfile.auto | sed 's/:.*//')
#テンプレートファイルのDOCKERFILE_INSTALL_USERの置換
{
echo $tgt
grep -c -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md
grep -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md | awk -v FS='|' -v ORS='' '{print ","$2}'|sed 's/$/@/'
grep -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md | awk -v FS='|' -v ORS='' '{print ","$3}'|sed 's/$/@/'
grep -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md | awk -v FS='|' -v ORS='' '{print ","$4}'|sed 's/$/@/'
grep -vP 'ユーザーID|aine|kuraine|nahato|mujiku|:-:|root' $tgt/env-user.md | awk -v FS='|' -v ORS='' '{print ","$5}'|sed 's/$/@/'
} | xargs -n6 | sed '/^$/d' |\
while read file row_cnt user_info;do
#echo $file $row_cnt $user_id $user_name $group_id $group_name
if [ 0 -eq $row_cnt ];then
#cmd=$(echo "$TEMPLATE_CMD" | >>$file/Dockerfile.auto)
cmd=$(sed "s:ARGS::;s:^:echo :;" <<< "$TEMPLATE_CMD")
if [ "$SHELL" = 'bash' ];then
echo $cmd | sed "s;echo ;sed -i \x22$REPLACE_POS i;;s;$;\x22 $file/Dockerfile.auto;" | $SHELL
else
echo $cmd | sed "s;echo ;sed -i \x22$REPLACE_POS i;;s;$;\x22 $file/Dockerfile.auto;"
fi
else
col_cnt=$(echo $user_info | awk '{print gsub("@","",$1)}')
fi
for (( i=1;i<=$row_cnt;i++));do
args="$(seq $col_cnt | while read j;do
#echo $file $i $j $user_info $(echo $user_info | cut -d'@' -f$j | awk -v i=$(($i+1)) -v FS=',' '{print $i}')
echo $(echo $user_info | cut -d'@' -f$j | awk -v i=$(($i+1)) -v FS=',' '{print $i}')
done | xargs -n4)"
cmd=$(sed "s:ARGS:$args:;s:^:echo :;" <<< "$TEMPLATE_CMD")
#cmd=$(sed "s:ARGS:$args:;s:^:echo :;s:$: $file/Dockerfile.auto:" <<< "$TEMPLATE_CMD")
if [ "$SHELL" = 'bash' ];then
echo $cmd | sed "s;echo ;sed -i \x22$REPLACE_POS i;;s;$;\x22 $file/Dockerfile.auto;" | $SHELL
else
echo $cmd | sed "s;echo ;sed -i \x22$REPLACE_POS i;;s;$;\x22 $file/Dockerfile.auto;"
fi
done
done
sed -i '/DOCKERFILE_INSTALL_USER/d' $tgt/Dockerfile.auto
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | grep $OS_VERSION)
}
main(){
ENV_REPO=$1;shift
DEBUG=$1;shift
if [ "$DEBUG" = '--debug' ];then
SHELL=: #なんもしない
else
SHELL=bash #じっこうする
fi
[ -z $ENV_REPO ] && usage
export -f execute
find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | grep -Po '[a-z]+(-[0-9]{1,}){1,}' | sort | uniq | while read tgt;do execute $tgt ;done
}
main "$@"
<file_sep>- zip関数javaないからから始まる色々な言語実装例
- https://teratail.com/questions/28356
- 動作確認一括チェックスクリプト
- https://bitbucket.org/megmogmog1965/java8streamapi/src/develop/src/java8streamapi/LastProcess.java
- collection java のまとめ
- https://static.rainfocus.com/oracle/oow16/sess/1462848741026001JnVD/ppt/CollectorsFair_J1_2016_final.pdf
- これでファイルヘッダ項目からクラス生成できる。文字列組み立てで。collection.joiningとかでパタン生成。
- https://qiita.com/ukiuni@github/items/2c55a64f45272f4faef5
- リフレクションapiでjvmにロード済みかどうか判定できる。
- beanクラス動的作成可能
- https://chibash.github.io/essays/jvst.html
- ロード済みであれば、動的生成されたメンバーないしメソッドを使ってstream処理などできそう。便利。
- 関数リストを引数にもらって、対象リストに対して加工して、結果をリスト in マップで返却。キーは関数名単位。
- マルチマップというのがあるのか便利。
- partition byと相性良さけ
- https://blog.devneko.net/2013/09/javamap.html?m=1
- https://blog.y-yuki.net/entry/2016/10/03/003000
- unnestできてave出来たら、いいな
- https://qiita.com/sano1202/items/64593e8e981e8d6439d3
- toCollectionがあるぞ
- https://docs.oracle.com/javase/jp/8/docs/api/java/util/stream/Collectors.html
- このイメージで入れ子にしてunnestできそう
- https://stackoverflow.com/questions/33253858/java-streams-group-a-list-into-a-map-of-maps/33254512
- ダイナミックjava したい 複数の引数が行けるか。list map 行けるか
- https://m-shige1979.hatenablog.com/entry/2017/02/08/080000
- https://docs.oracle.com/javase/jp/6/api/java/lang/Class.html#newInstance()
- https://qiita.com/KeithYokoma/items/9e692808095acf560bc9
- https://itsakura.com/java-reflect
- https://teratail.com/questions/11369
- javaからpython呼び出ししたい
英語を日本語にしたい
- https://qiita.com/ota-meshi/items/76f4a65e9bd2fe0e2f68
- http://www.asnm4.com/2019/06/python%E3%81%8B%E3%82%89google-translate%E3%82%92%E4%BD%BF%E3%81%86/
- lsなどの実行結果に対して集合演算ハンディにしたい
- https://nim-lang.org/docs/sets.html
- 変数宣言でvarが変更可能で、letが変更不可。javascriptみたいな雰囲気。
```
var a = toCountTable("aaabbc")
let b = toCountTable("bcc")
a.merge(b)
doAssert a == toCountTable("aaabbbccc")
```
- nim 多値タプルの配列はtable変換すると、ハッシュエントリになる。便利。Pythonの変換と同じ。
- nim 多値引数受け取れる。
- https://nim-lang.org/docs/tables.html#basic-usage-table
- ロゼッタコード
- https://rosettacode.org/wiki/Permutations
- csharp Linux 導入
- https://banikojp.blogspot.com/2018/04/linuxc.html?m=1
- sudachi 導入
- https://kurage.cc/blog-sudachi/
- https://github.com/WorksApplications/Sudachi
- elasticsearch Kibana Kuromori
- https://mattintosh.hatenablog.com/entry/20190205/1549371581
- perl タグクラウド
- http://www2u.biglobe.ne.jp/~MAS/perl/waza/tagcloud.html
- go api 検索エンジン
- https://github.com/astaxie/build-web-application-with-golang/blob/master/ja/preface.md
- https://github.com/tenntenn/goweb-book/blob/master/README.md
- https://github.com/tenntenn/qiitaexporter/blob/master/README.md
- https://future-architect.github.io/articles/20200327/
- go
- https://schoo.jp/teacher/2869
- python 入れ子
2つ目のforを後ろに持ってきて、式を前で受ける。
```
arr = [[1,2,3], [4,5,6], [7,8,9]]
[i for j in arr for i in j]
```
- perl このパターンマッチは色々応用できそうな気がする
- https://perldoc.jp/variable/%24%5EN
- python lambdaと内包式の使い方を行列データ扱うときで比較したい。自作関数入れられるかどうか。
- https://note.nkmk.me/python-list-comprehension/
- c# チュートリアル
- https://docs.microsoft.com/ja-jp/learn/paths/csharp-first-steps/?WT.mc_id=docs-twitter-machiy
- Elmoとword2vec
- https://speakerdeck.com/tagucci/elmo-for-searching-similar-keywords
- javascript最近のやつ
- https://future-architect.github.io/typescript-guide/
- JavaScript 型変換
- https://mtane0412.com/convert-array-to-object-using-reduce/
- JavaScript でgroup by はfindとreduceのコンボかmapとhashのコンボどっちかなのかな。
- JavaScript ハマりそう
- https://www.webprofessional.jp/map-reduce-functional-javascript/
- 型変換に使えそう
- https://ginpen.com/2018/12/23/array-reduce/
- perlでサブマッチ抜き出すことなどをしたかった
- nodejs sqlite3連携 標準入力からファイル名複数配列で受け取って、fsに渡す。
- https://qiita.com/shirokuman/items/509b159bf4b8dd1c41ef
- https://www.gesource.jp/weblog/?p=8289
- js mapでシェルげいしておく。
- https://developer.mozilla.org/ja/docs/Web/JavaScript/Reference/Global_Objects/Array/map
- ruby の変数一覧
- https://docs.ruby-lang.org/ja/latest/doc/symref.html
- pythonのzipは使い慣れておきたい。
- 可視化ツール選定しておきたい。用途別に。
- https://blog.insightdatascience.com/ai-for-3d-generative-design-17503d0b3943
- 機械学習のイメージ湧きやすい。
- 逆引きマスタを大量に作成する必要がありそうだ。
- 検索条件を分解した際の出現位置の差をどう導くかなど。
セミコロンの左側単位での前回行の右側との差が前回行の文字列の長さに等しいか等しくないかで区別できる。いやできないな。
検索条件を分解したときのグラム数を求めておく必要がある。
セミコロンの左側単位でセミコロンの右側の数字が連続しているものが完全一致。欠番していないかチェックする。インデックス時は、文章単位、グラム順に一意の番号を振ればこのロジックはいける。
- 分解は2つあって
- 検索条件を分割するのとテーブルデータを作成する際にgram化するのと。など。
- n gram でないときは文字列の長さも必要か。
- 自然言語処理 フロー乗ってて 導入は良さげ
- https://towardsdatascience.com/named-entity-recognition-ner-with-bert-in-spark-nlp-874df20d1d77
- graphqの自動テスト
- https://qiita.com/pocke/items/bfe120f07bd8d94724a7
- 文脈を意識しないといけんのだな。わかってきたぞ。テーブル持つときは、元文書IDの参照を持たせてあげる。
- バイグラム
- http://gihyo.jp/dev/serial/01/make-findspot/0005?page=2
- https://qiita.com/greymd/items/3515869d9ed2a1a61a49#%23%23%20%E5%85%A8%E6%96%87%E6%A4%9C%E7%B4%A2%E3%82%A8%E3%83%B3%E3%82%B8%E3%83%B3%E3%82%92%E4%BD%9C%E3%82%8B
- 入力単語に対する類義語の距離は分析関数のレンジでエミュできる。どこまでのサジェストをするか引数にレンジ値を渡して動的に変更できる。
- word2vecインストール
- https://qiita.com/kenta1984/items/93b64768494f971edf86
- perl 論理えんざし
- https://tutorial.perlzemi.com/blog/20100816136168.html
- perlのオプションCSDで日本語扱える
- https://qiita.com/ohtsuka1317/items/92c339a65533e1e6c6fc
- perl 実戦的
- https://qiita.com/teckl/items/98de382787401d2392c3
- perl 部分マッチ文字列抜き出し
- https://qiita.com/ngyuki/items/d5dde70cf2de952cfb87
- gpu大切なんだな
- https://ainow.ai/2020/03/11/183804/
- errorログを抜いてtrsに書き込みたい。
- ていしゅうきエラー監視ジョブはtrsから対象抜き出して自動復旧したい。
- perl 変数
- awkのセパレーターと比較してちょっと整理しておく。
- https://perldoc.jp/index/variable
- bash からの書き換えはrubyでも楽しそう
- https://qiita.com/jnchito/items/dedb3b889ab226933ccf
- ruby ファイル読み込み
- https://www.buildinsider.net/language/rubytips/0021
- ruby map関数
- https://uxmilk.jp/21695
- ruby group by
- https://docs.ruby-lang.org/ja/latest/method/Enumerable/i/group_by.html
- https://doruby.jp/users/pe/entries/ruby%E3%81%A7%E9%85%8D%E5%88%97%E3%81%8B%E3%82%89%E9%87%8D%E8%A4%87%E3%81%99%E3%82%8B%E5%80%A4%E3%82%92%E6%8A%BD%E5%87%BA%E3%81%99%E3%82%8B%E6%96%B9%E6%B3%95%E3%82%92%E6%8E%A2%E3%81%97%E3%81%9F%E9%9A%9B%E3%81%AB%E8%A6%8B%E3%81%A4%E3%81%91%E3%81%9Fgroup_by%E3%83%A1%E3%82%BD%E3%83%83%E3%83%89%E3%81%8C%E4%BE%BF%E5%88%A9%E3%81%A0%E3%81%A3%E3%81%9F
- ruby 配列メソッド
- https://qiita.com/jnchito/items/118cca7ac2f01e1ca6a0
- python小技
- https://qiita.com/Namibillow/items/954c7f9f53682d6dd9c9
- sedぽくぅて面白いレーベン
- https://ja.m.wikipedia.org/wiki/%E3%83%AC%E3%83%BC%E3%83%99%E3%83%B3%E3%82%B7%E3%83%A5%E3%82%BF%E3%82%A4%E3%83%B3%E8%B7%9D%E9%9B%A2
- bashスクリプトpythonに書き換えるか。while read で変数外上書きされないのが忘れた頃にハマるんだよな。
- 楽しそうだないいなこれ。word2vec
- https://qiita.com/Hironsan/items/11b388575a058dc8a46a
- この環境ディレクトリ自体を学習題材に使用しよう。
- 自然言語処理のデータセット。コマンド乗ってる。
- https://towardsdatascience.com/improving-sentence-embeddings-with-bert-and-representation-learning-dfba6b444f6b
- 環境ディレクトリ名を与えれば、全てできるようにしておきたい。
- 名寄せの背景分かり易い
- https://pompom168.hatenablog.com/entry/2019/08/09/144054
- 自然言語処理データセット
- https://qiita.com/daimonji-bucket/items/56143b2abbfadb4429af
- 言葉が難しいけどある単語にグループ値を振りたいための話。
- https://qiita.com/daimonji-bucket/items/47f806624a5924f2d47b
- カフェラッテ。名寄せ。
- https://buildersbox.corp-sansan.com/entry/2020/03/10/110000
- docker-composeの操作背景が分かり易い
- https://qiita.com/sanpo_shiho/items/fc8082f3d303c04cca2e
- アプリのインストールはAPP_NAMEに依存しないように管理する。
- バージョン番号はハイフン数字1回以上のグループの1回以上の繰り返しで表現できる。
- アプリが複数個ある場合、バージョン番号がそれぞれに付随していないものを洗い出す。
- たしかバージョン番号を気にせず、常に最新をインストールするようなソフトもあったのでバージョン指定可能かどうかをまず確認した方がいいか。
- Repoからバージョン番号を気にしていないものを洗い出す。
- 元のDockerfileをDockerfile.asisにリネームして、Dockerfile.autoをDockerfileにリネームして一時的にビルドファイルをスイッチできるようにする。
引数の順序入れ替えて元に戻せるようにする。スイッチ元とスイッチ先を指定した方いいか。
- jqの強みはエスケープ機能が関数として組まれていることかもしれない。
- 検索ワードをストップワードに自動追加するしくみがほしい
- vmコマンドライン操作便利
- https://qiita.com/tukiyo3/items/5ecea7f95cb961f07194
- ネットワークのパケット送受信量を計測
- https://qiita.com/suin/items/d4428e65a2cc1d956581
- プロセスの起動時刻を調査
- https://qiita.com/todaemon/items/82edbad20e37039d5162
- 便利そう
- https://qiita.com/usiusi360/items/7b47be9d0ab5b1acd608
- プロセスId単位でプロセスの起動日時を控える。対象のプロセスidが検索に引っかからないなら、プロセス終了起動時刻を取得。
- https://qiita.com/isaoshimizu/items/ee555b99582f251bd295
- ファイルの詳細情報を取得するコマンド。更新頻度出すのに使えそう。
- https://qiita.com/reoring/items/275ae83dfaa65bac568c
- コンテナ手動かすやつ
- https://www.google.com/amp/s/employment.en-japan.com/engineerhub/entry/2019/02/05/103000%3Famp%3D1
- レポに存在するファイルが環境ディレクトリ内のDockerfile内に存在するかチェックし、なければリネームする。
- docker composeはローカルイメージ見てくれる
- https://qiita.com/zembutsu/items/9e9d80e05e36e882caaa
- flutendのサンプル
- https://www.jkawamoto.info/blog-ja/docker-compose-for-logging-service/
- oracleとsqlserver とibmもうちょいいいい感じにしておく。
- neckなのはプログラムがバイナリ提供のものを使用しているのか、ソースからビルドしているものなのかを区別していないこと。レポジジかえればいいか。makeがあるかないかでくべつできる。
- Dockerfile.sub Dockerfileは環境ディレクトリを他環境からこぴっておしまいになり力尽きている際には以下のような感じで初期化しておく
```
>Dockerfile.sub
>Dockerfile
```
- rubyでxmlファイル処理するコマンドまとめる。
- 各環境ディレクトリの引数を一元管理するためのマークダウンファイルが欲しい。
- 適用されている値を一覧で管理したい。優先度を管理する。
- 必要十分条件にするためには十分条件が必要。必要条件を定義しないといけない。十分条件は絞る必要はあまりない。必要条件はテキストマイニングと呼ばれるもので、規則を見つけてパタン定義。
- gradleはsdkからインスこすればいい。ホストからマウントする必要ない。
- プログラム更新頻度を調べたい。バージョン版数への関心を更新頻度に反比例させたい。メンテが大変になるので。
- プログラム単位でデフォルトバージョンを管理したい
- Dockerfile.subの実行順序
- patchの名称はデフォルトでpre-patch
1. pre-patch(あれば)
2. install
3. post-patch(あれば)
4. config
5. healthcheck
- もう事前ビルドしてマウントだな。
- https://qiita.com/IanMLewis/items/badc55b5d8e188ace34a
- repo管理しててよかった。
- MySQL build しくってたのがここでリカバレるかも。必要そうなライブラリが乗ってて、ビルド手順乗っている。
- https://mroonga.org/ja/docs/install/others.html
- mysqlは5.6!!
- テンプーレトにキャッシュ削除するスクリプトを追加する。
- centos
- https://easyramble.com/yum-clean-and-update.html
- Ubuntu
- https://qiita.com/SUZUKI_Masaya/items/1fd9489e631c78e5b007
- gpuを利用したディープラーニング
- https://techable.jp/archives/118651
- rust製のコマンド
- https://qiita.com/navitime_tech/items/c249269a3b47666c784b
- groongaのバージョン組み合わせ作れるだけ作ってみる
- ruby groonga
- http://ranguba.org/ja/
- スクリプトに参照を持たせ始めるタイミングと処理順序を意識し始めるタイミングは同じことが改めてわかった。処理順序をファイルで管理する。ファイル名に縛られないために。
- ダイナミックリンクディレクトリのエントリディレクトリをシステム単位でキャッシュ作成対象として認識させておく際に、
```
/etc/ld.conf.d/プロセス名.conf
```
といった名称で、
例えば、
```
/etc/ld.conf.d/postgres.conf
```
のように。環境個別に設定を利かすのがよさげ。記載するプロセス名はscript-envに格納しているディレクトリ名から選択する。
- カンマ区切り文字列をbashでunnestがレコードの意味付けを保ったままできたので、sketchに投入しておく
- 各環境ディレクトリ特有の動作確認スクリプトをヘルスチェックスクリプトととしてレポに登録しておきたい。リグレッションで使い勝手いいように。
- 名前はconfirmじゃなくてhealthcheckがいいか。
- windowsパッチファイルで複数ファイルに対してサクラエディタ のマクロよく適用させているから、まとめておきたい。visualbox上で再現撮っておきたい。
- Postgres-pgroongaにルビーのメモリに関するスクリプトあった。
- makeのマルチスレッド数こんな感じで制御するのを変数化して、デフォルトと環境個別で制御したい。
make -j$(grep '^processor' /proc/cpuinfo | wc -l)
- コンテナの使用頻度を出したい。それをリスト化してスケジューリングしたい。常に全てのイメージができている必要はないので。
- 各OSコンテナにvim,dev,tool,networkが入ったコンテナを用意しておく。
- メンテスクリプトはその場限りのパッチには先頭にPを運用していくパッチには先頭にOをつけ、分類する。
- md-step.mdにビルド時間も抽出して埋め込みたい。
- 遅延時間を表示したい
- gron commandの使い方。
- mediamからサジェストきた。面白い。
- JSON Processing Pipelines with gron
- https://medium.com/capital-one-tech/json-processing-pipelines-with-gron-6fbd531155d7
- script-repoは肥大化していく一方なので、各環境ディレクトリごとに必要なものをインストールし終えたら、削除するようにする。
- 初回ログイン時にscript-repoを再クローンして差分更新を行えば、ディスクイメージは不用意に増やさずに、差分リフレッシュができる。💩
- oracleでユーザー定義ファンクションで任意の文章を受け取ってjava拡張で組み込んだMecab解析結果をコレクションとして返却するファンクションを作成したい。
- 便利そう。おもいついた。
- twiiter認証文言テンプーレトこれ試してみようかな→rejectされたので、とても悲しい。メールアドレス1こ残っているけど大切にしないとなー。とにかく悲しい。熟考してから送信すること。
- https://note.com/mogya/n/nbd9a720f8a5b
- sqlserver bulk imort
- https://qiita.com/ExA_DEV/items/2d0cdff5bdd43591f7ce
- flockコマンドで排他制御できる実例 プロセスがシーケンシャルに起動されていることがflockコマンドのおかげでわかる
- https://b.ueda.tech/?post=02709
- phpバインドもあるgroonga
- https://www.clear-code.com/blog/category/groonga/2.html
- ruby バインドのrrooongaもあるそう
- https://qiita.com/groonga/items/7642e9e22bcda4b4327f
- perlのflockとbashのflock
- http://perl.no-tubo.net/2006/10/22/flock%e3%82%92%e4%bd%bf%e3%81%a3%e3%81%9f%e5%bc%b7%e5%9b%ba%e3%81%aa%e6%8e%92%e4%bb%96%e5%87%a6%e7%90%86/
- perlテストデータ
- http://tsucchi.github.io/slides/yokohamapm/11/
- json日本語データ作成はpython経由がハンディかな
- https://qiita.com/ohbarye/items/452fefa2be5d56268b50
- elasticsearch 日時指定検索
- http://togattti.hateblo.jp/entry/2017/08/17/174953
- elasticsearch import export ダンプコマンド別途あり
- https://qiita.com/nakazii-co-jp/items/3199433d685d0600c6d6
- https://qiita.com/datake914/items/2313894c684a7cba992c
- compose用のレポジトリ作成しようかな
- script-jam
- https://github.com/YoshinoriN/docker-redmine-orchestration
- oracle import export mysqlと同じような仕組みだろう
- https://qiita.com/toshihirock/items/86931e3c52dc47287dd2
- oracleのインポートエクスポートまとめる
- sqlserverのインポートエクスポートまとめる
- Nodejsとgroonga連携まとめる
- コピペチェッカー使い方調べる
- postgresへdbeaverから繋ぐ
- マイリーダー用にjsonファイルからマークダウンファイルに変換するコマンド必要なので、
スクラッチから作る。
- https://gist.github.com/mignonstyle/083c9e1651d7734f84c99b8cf49d57fa
- 検索スピード上げるツール
- https://qiita.com/youwht/items/7f5686a30eed16864954
- Linux版はいけなかったので、wikiないし辞書データをgroongaに投入してコマンドで引けるようにする。
- ビルド対象を保持期間日数で管理するようにする
- 最終的には環境をダイナミックに作りたい。特にライブラリの依存関係。ソースからビルドが安全か。
- OS名とそのバージョン 単一
- アプリとそのバージョン 単一ないし複数
- メモリ量 システムデフォルトとコンテナ個別
- ユーザー名 システムデフォルトとコンテナ個別
<file_sep># 参考文献
http://hro-blog.blogspot.com/2016/02/linuxusb.html
https://qiita.com/miyamotok0105/items/2baf80cf1c300503bf5d
基本rootユーザーで作業
# マシンにusbを差し込む
# usbのデバイス名を確認する
# パーテションALL削除
洗い替えの運用のみ
パーティションの先頭番号から削除しないこと
削除すると以下のように照合順序がおかしいことになる
```
[root💜centos (金 1月 17 07:42:25) /home/aine/script_env/java]$fdisk /dev/sda1
Welcome to fdisk (util-linux 2.23.2).
Changes will remain in memory only, until you decide to write them.
Be careful before using the write command.
コマンド (m でヘルプ): p
Disk /dev/sda1: 7758 MB, 7758955008 bytes, 15154209 sectors
Units = sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 512 bytes
I/O サイズ (最小 / 推奨): 512 バイト / 512 バイト
Disk label type: dos
ディスク識別子: 0x6e697373
パーティションテーブルに見えません
おそらく間違ったデバイスを選択しています。
デバイス ブート 始点 終点 ブロック Id システム
/dev/sda1p2 ? 1917848077 2462285169 272218546+ 73 不明
/dev/sda1p3 ? 1818575915 2362751050 272087568 2b 不明
/dev/sda1p4 ? 2844524554 2844579527 27487 61 SpeedStor
パーティションテーブル項目がディスクの順序と一致しません
コマンド (m でヘルプ):
```
pコマンド
dコマンド
wコマンド
```
$fdisk /dev/sda1
Welcome to fdisk (util-linux 2.23.2).
Changes will remain in memory only, until you decide to write them.
Be careful before using the write command.
コマンド (m でヘルプ): p
Disk /dev/sda1: 7758 MB, 7758955008 bytes, 15154209 sectors
Units = sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 512 bytes
I/O サイズ (最小 / 推奨): 512 バイト / 512 バイト
Disk label type: dos
ディスク識別子: 0x6e697373
パーティションテーブルに見えません
おそらく間違ったデバイスを選択しています。
デバイス ブート 始点 終点 ブロック Id システム
/dev/sda1p2 ? 1917848077 2462285169 272218546+ 73 不明
コマンド (m でヘルプ):
コマンド (m でヘルプ): d
Selected partition 2
Partition 2 is deleted
コマンド (m でヘルプ):
コマンド (m でヘルプ): w
パーティションテーブルは変更されました!
ioctl() を呼び出してパーティションテーブルを再読込みします。
WARNING: Re-reading the partition table failed with error 22: 無効な引数です.
The kernel still uses the old table. The new table will be used at
the next reboot or after you run partprobe(8) or kpartx(8)
ディスクを同期しています。
```
# パーティション作成
```
$fdisk /dev/sda1
Welcome to fdisk (util-linux 2.23.2).
Changes will remain in memory only, until you decide to write them.
Be careful before using the write command.
コマンド (m でヘルプ): p
Disk /dev/sda1: 7758 MB, 7758955008 bytes, 15154209 sectors
Units = sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 512 bytes
I/O サイズ (最小 / 推奨): 512 バイト / 512 バイト
Disk label type: dos
ディスク識別子: 0x6e697373
デバイス ブート 始点 終点 ブロック Id システム
コマンド (m でヘルプ): n
Partition type:
p primary (0 primary, 0 extended, 4 free)
e extended
Select (default p): p
パーティション番号 (1-4, default 1): 1
最初 sector (2048-15154208, 初期値 2048):
初期値 2048 を使います
Last sector, +sectors or +size{K,M,G} (2048-15154208, 初期値 15154208):
初期値 15154208 を使います
Partition 1 of type Linux and of size 7.2 GiB is set
コマンド (m でヘルプ): p
Disk /dev/sda1: 7758 MB, 7758955008 bytes, 15154209 sectors
Units = sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 512 bytes
I/O サイズ (最小 / 推奨): 512 バイト / 512 バイト
Disk label type: dos
ディスク識別子: 0x6e697373
デバイス ブート 始点 終点 ブロック Id システム
/dev/sda1p1 2048 15154208 7576080+ 83 Linux
コマンド (m でヘルプ): w
パーティションテーブルは変更されました!
ioctl() を呼び出してパーティションテーブルを再読込みします。
WARNING: Re-reading the partition table failed with error 22: 無効な引数です.
The kernel still uses the old table. The new table will be used at
the next reboot or after you run partprobe(8) or kpartx(8)
ディスクを同期しています。
```
作成されているか確認
```
[root💜centos (金 1月 17 07:45:41) /home/aine/script_env/java]$fdisk -l
Disk /dev/nvme0n1: 500.1 GB, 500107862016 bytes, 976773168 sectors
Units = sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 512 bytes
I/O サイズ (最小 / 推奨): 512 バイト / 512 バイト
Disk label type: dos
ディスク識別子: 0x000114a2
デバイス ブート 始点 終点 ブロック Id システム
/dev/nvme0n1p1 * 2048 7813119 3905536 83 Linux
/dev/nvme0n1p2 7813120 15624191 3905536 82 Linux swap / Solaris
/dev/nvme0n1p3 15624192 976758783 480567296 83 Linux
Disk /dev/sda: 7759 MB, 7759462400 bytes, 15155200 sectors
Units = sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 512 bytes
I/O サイズ (最小 / 推奨): 512 バイト / 512 バイト
Disk label type: dos
ディスク識別子: 0x04330c43
デバイス ブート 始点 終点 ブロック Id システム
/dev/sda1 63 15154271 7577104+ 7 HPFS/NTFS/exFAT
```
# ファイルシステムタイプを変更
基本 ext4で。
```
$mkfs.ext4 /dev/sda1
mke2fs 1.42.9 (28-Dec-2013)
Filesystem label=
OS type: Linux
Block size=4096 (log=2)
Fragment size=4096 (log=2)
Stride=0 blocks, Stripe width=0 blocks
474208 inodes, 1894276 blocks
94713 blocks (5.00%) reserved for the super user
First data block=0
Maximum filesystem blocks=1939865600
58 block groups
32768 blocks per group, 32768 fragments per group
8176 inodes per group
Superblock backups stored on blocks:
32768, 98304, 163840, 229376, 294912, 819200, 884736, 1605632
Allocating group tables: done
Writing inode tables: done
Creating journal (32768 blocks): done
Writing superblocks and filesystem accounting information: done
```
# デバイスをマウント
ファイルシステムから操作できることを確認
```
$mount -t ext4 -w /dev/sda1 /mnt/usb1
$ls -la /mnt/usb1
合計 24
drwxr-xr-x. 3 root root 4096 1月 17 07:57 .
drwxr-xr-x. 3 root root 4096 1月 17 07:51 ..
drwx------. 2 root root 16384 1月 17 07:57 lost+found
```
# コピー元ディレクトリを/mntに作成
マウント先ディレクトリの一つ上からはusb1と表示される
```
$cd /mnt
$mkdir cp-src
$ll
合計 8
drwxr-xr-x. 2 root root 4096 1月 17 08:00 cp-src
drwxr-xr-x. 3 root root 4096 1月 17 07:57 usb1
```
# コピー元ディレクトリにコピー物を集める
```
$find /home/aine/Dow* -type f | grep 履歴書 | grep -v テンプレート | xargs -I@ cp @ /mnt/cp-src/
```
# rsyncでコピー
コピー前
```
$tree
.
|-- cp-src
| |-- \350\201\267\345\213\231\345\261\245\346\255\264\346\233\270.pdf
| `-- \345\261\245\346\255\264\346\233\270.pdf
`-- usb1
`-- lost+found
3 directories, 2 files
```
コピー
```
$rsync -av cp-src usb1
sending incremental file list
cp-src/
cp-src/履歴書.pdf
cp-src/職務履歴書.pdf
sent 227,496 bytes received 58 bytes 455,108.00 bytes/sec
total size is 227,245 speedup is 1.00
```
コピー後
```
$tree
.
|-- cp-src
| |-- \350\201\267\345\213\231\345\261\245\346\255\264\346\233\270.pdf
| `-- \345\261\245\346\255\264\346\233\270.pdf
`-- usb1
|-- cp-src
| |-- \350\201\267\345\213\231\345\261\245\346\255\264\346\233\270.pdf
| `-- \345\261\245\346\255\264\346\233\270.pdf
`-- lost+found
4 directories, 4 files
```
# usbをマシンから外す
# 再び差し込む
# ファイルViewerで確認
コピーしたファイルないしディレクトリが存在しているか確認
<file_sep>#!/bin/bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
REPO="$1";shift
[ -z $REPO ] && usage
while read tgt;do
#確認
echo $HOME/$REPO/$tgt/Dockerfile
#作成
touch $HOME/$REPO/$tgt/user.md
>$HOME/$REPO/$tgt/user.md
#抽出
sed -n '/RUN groupadd/,/root/p' $HOME/$REPO/$tgt/Dockerfile >$HOME/script-repo/script-env-$tgt-user.sh
#置換
sed -i -r 's;RUN\s{1,};;g' $HOME/script-repo/script-env-$tgt-user.sh
#挿入
sed -i '1i#!/usr/bin/env bash' $HOME/script-repo/script-env-$tgt-user.sh
#権限付与
chmod 755 $HOME/script-repo/script-env-$tgt-user.sh
#リネーム
echo $HOME/script-repo/script-env-$tgt-user.sh | xargs -I@ bash -c 'echo mv @ $(echo @ | perl -pe "s;(.*-[0-9]+);\1-script-env;g;s;/script-env-;/;;s;script-env;install;g")' | bash
done < <(ls -l $HOME/$REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log)
#追記
while read tgt;do
echo $tgt
#FILE=$(echo $tgt | perl -pe "s/(?<=[0-9])-script-env//g;s;-user\.sh;/user\.md;g;s/script-repo/$REPO/g")
##確認
#echo $FILE
##作成
#touch $FILE
#>$FILE
##抽出・書込
#grep -oP '\-g [0-9]+ [a-zA-Z]+|\-g [a-zA-Z]+ -u [0-9]+ [a-zA-Z]+|[a-zA-Z]+_pwd' $tgt | perl -pe 's/.*-u //g;s/-g //g;s/ /\n/g' | xargs -n5 | perl -pe 's/root_pwd/0 root 0 root root_pwd/g' | \
# sed -r '/^$/d;s;^|$| ;|;g;1i|ユーザーID|ユーザー名|グループID|グループ名|パスワード|' | sed '2i|:-:|:-:|:-:|:-:|:-:|' >$FILE
done < <(find $HOME/script-repo | grep user)
<file_sep># 参考文献
https://sites.google.com/a/chromium.org/chromedriver/getting-started</br>
https://www.ytyng.com/blog/ubuntu-chromedriver/</br>
https://developers.google.com/web/updates/2017/04/headless-chrome?hl=ja</br>
https://dev.classmethod.jp/etc/chromium-ubuntu-headless/</br>
https://worklog.be/archives/3422</br>
https://vaaaaaanquish.hatenablog.com/entry/2017/06/06/194546</br>
# dockerイメージ作成
```
time docker build -t centos-7-6-18-10-selenium-chromium . | tee log
```
# dockerコンテナ作成
```
docker run --privileged --shm-size=12gb -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id -v /etc/localtime:/etc/localtime -v /sys/fs/cgroup:/sys/fs/cgroup:ro -itd --name centos-7-6-18-10-selenium-chromium centos-7-6-18-10-selenium-chromium
```
# dockerコンテナ潜入
```
docker exec --user kuraine -it selenium-chromium /bin/bash
```
# seleniumをpip3コマンドでいんすこ
rootユーザーで実行
```
$pip3 install --upgrade pip
$pip3 install selenium
```
# 動作確認
カレントディレクトリにtest.pngが保存されるので、VSCODEで開くと見れる!
```
#!/usr/local/bin/python3
# -*- coding: utf-8 -*-
import sys
import time
sys.path.append('/usr/local/lib/python3.7/site-packages')
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
options = Options()
options.add_argument('--headless')
options.add_argument('--no-sandbox')
options.add_argument('--disable-gpu')
options.add_argument('--window-size=1280,1024')
driver = webdriver.Chrome('/usr/local/bin/chromedriver',chrome_options=options)
driver.get('http://www.google.com/')
driver.save_screenshot('test.png')
driver.quit()
```

<file_sep># phpインスト
```
sudo yum -y install http://rpms.famillecollet.com/enterprise/remi-release-7.rpm && \
sudo yum -y install --enablerepo=remi,remi-php73 php php-devel php-mbstring php-pdo php-gd php-xml php-mcrypt
```
# 参考文献
https://mattintosh.hatenablog.com/entry/20140815/1408057200
https://www.activestate.com/products/komodo-ide/download-ide/#edit
https://shimz.me/blog/category/komodo-edit
IME日本語入力辞書登録
http://tatsu-web.com/jisyo/
日本語から英語に
https://code.google.com/archive/p/google-ime-user-dictionary-ja-en/
日本語からHTMLタグに
https://gist.github.com/suneo3476/5462528
VIMからIMEそうさ
https://qiita.com/ka_/items/dad955fe7423318b7dae
# Dockerfileよりイメージ作成
```
time docker build -t centos-7-6-18-10-php-komodo . | tee log
```
# dockerコンテナ削除
```
docker ps -qa | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
# dockerイメージ削除
```
docker images | awk '$1=="<none>"{print $3}' | xargs -I@ docker rmi @
```
# dockerコンテナ作成
```
docker run --privileged --shm-size=8gb --name centos-7-6-18-10-php-komodo -itd -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id centos-7-6-18-10-php-komodo
```
# dockerコンテナ潜入
```
docker exec -it centos-7-6-18-10-php-komodo /bin/bash
```
# dockerコンテナ潜入後実行
```
$cd ~/Komodo-Edit-11.1.1-18206-linux-x86_64
$./install.sh
Enter directory in which to install Komodo. Leave blank and
press 'Enter' to use the default [~/Komodo-Edit-11].
Install directory:
==============================================================================
Komodo Edit 11 has been successfully installed to:
/home/php/Komodo-Edit-11
You might want to add 'komodo' to your PATH by adding the
install dir to you PATH. Bash users can add the following
to their ~/.bashrc file:
export PATH="/home/php/Komodo-Edit-11/bin:$PATH"
Or you could create a symbolic link to 'komodo', e.g.:
ln -s "/home/php/Komodo-Edit-11/bin/komodo" /usr/local/bin/komodo
Documentation is available in Komodo or on the web here:
http://docs.activestate.com/komodo
Please send us any feedback you have through one of the
channels below:
<EMAIL>
irc://irc.mozilla.org/komodo
https://github.com/Komodo/KomodoEdit/issues
Thank you for using Komodo.
==============================================================================
```
# komodo起動
```
[php@d46d7bac3844 ~]$komodo
[1] 535
```






<file_sep>[oracle19cr3](https://www.oracle.com/technetwork/jp/database/enterprise-edition/downloads/index.html)
[clob型のcollect集約関数の実装について](https://docs.oracle.com/cd/E82638_01/addci/using-user-defined-aggregate-functions.html#GUID-D7E77319-DC23-4CF0-B746-27ED7BE9240D)
[超高速な機械学習を Oracle Database で実現!](https://www.slideshare.net/oracle4engineer/hikalab-20171026)
- sqlplusから接続
```
sqlplus sys/ORACLE_PWD@ORCLCDB as sysdba
sqlplus sys/ORACLE_PWD@ORCLPDB01 as sysdba
sqlplus sys/ORACLE_PWD@ORCLPDB02 as sysdba
sqlplus user01/ORACLE_PWD@ORCLPDB01
sqlplus user02/ORACLE_PWD@ORCLPDB01
sqlplus user01/ORACLE_PWD@ORCLPDB02
sqlplus user02/ORACLE_PWD@ORCLPDB02
sqlplus user03/ORACLE_PWD@ORCLPDB02
```
ないしは
```
sqlplus sys/ORACLE_PWD@localhost:1521/ORCLCDB as sysdba
sqlplus sys/ORACLE_PWD@localhost:1521/ORCLPDB01 as sysdba
sqlplus sys/ORACLE_PWD@localhost:1521/ORCLPDB02 as sysdba
sqlplus user01/ORACLE_PWD@localhost:1521/ORCLPDB01
sqlplus user02/ORACLE_PWD@localhost:1521/ORCLPDB01
sqlplus user01/ORACLE_PWD@localhost:1521/ORCLPDB02
sqlplus user02/ORACLE_PWD@localhost:1521/ORCLPDB02
sqlplus user03/ORACLE_PWD@localhost:1521/ORCLPDB02
```
実行例
```
[oracle@30bf33351a1f ~]$sqlplus sys/ORACLE_PWD@localhost:1521/ORCLCDB as sysdba
SQL*Plus: Release 19.0.0.0.0 - Production on 土 9月 14 21:47:03 2019
Version 19.3.0.0.0
Copyright (c) 1982, 2019, Oracle. All rights reserved.
Oracle Database 19c Enterprise Edition Release 19.0.0.0.0 - Production
Version 19.3.0.0.0
に接続されました。
SYS@localhost:1521/ORCLCDB> show con_name
CON_NAME
------------------------------
CDB$ROOT
SYS@localhost:1521/ORCLCDB> show pdbs
CON_ID CON_NAME OPEN MODE RESTRICTED
------------- ------------------------------ ---------- ----------
2 PDB$SEED READ ONLY NO
3 ORCLPDB01 READ WRITE NO
4 ORCLPDB02 READ WRITE NO
SYS@localhost:1521/ORCLCDB> Oracle Database 19c Enterprise Edition Release 19.0.0.0.0 - Production
Version 19.3.0.0.0との接続が切断されました。
```
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
or
$0 script-env --debug
EOS
exit 0
}
execute(){
OS_VERSION=$1;shift
EDITOR=$1;shift
[ -z $OS_VERSION ] && usage
OS_NAME=$(echo $OS_VERSION | perl -pe 's/^([a-z]+)-(.*)$/\1/g')
IMAGE_VERSION=$(echo $OS_VERSION | perl -pe 's/^([a-z]+)-(.*)$/\2/g')
if [ "centos" == $OS_NAME ];then
IMAGE_VERSION=$(echo $IMAGE_VERSION | perl -pe 's/-/\./;s/-/\./;s/-//;')
fi
if [ "ubuntu" == $OS_NAME ];then
IMAGE_VERSION=$(echo $IMAGE_VERSION | perl -pe 's/-/\./;')
fi
TEMPLATE_FILE=$(find $HOME/$ENV_REPO -name "docker-template-Dockerfile-$OS_NAME")
EDITOR_LIST="$(ls $HOME/script-env/env-editor-* | grep $OS_NAME | awk -v FS='-' -v OFS='-' '{$1="";$2="";$3="";$4="";print $0}' | sed -r 's/^-{1,}//g' | sort | uniq)"
while read tgt;do
#テンプレートファイルのDOCKERFILE_EDITORの置換
for n in "$EDITOR_LIST";do
#エディタが決まるまでは、ちとめんどい。
cmd="$(echo "echo" $tgt "| grep -Po '("$(echo $n | tr ' ' '|')")$'")"
EDITOR="$(echo $cmd | $SHELL)"
if [ "$SHELL" = 'bash' ];then
:
else
echo $EDITOR $cmd
fi
if [ -z "$EDITOR" ];then
#echo 森鴎外いない
#vimの場合またはenv-editor未定義かつ明示的にvimと環境ディレクトリに明記していない場合
cmd=$(echo "sed -i '/DOCKERFILE_EDITOR/d' $tgt/Dockerfile.auto")
if [ "$SHELL" = 'bash' ];then
echo $cmd | $SHELL
else
echo $cmd
fi
else
#echo 森鴎外
#env-editor定義の環境ディレクトリの場合
cmd=$(echo "sed -i '/DOCKERFILE_EDITOR/r $(find $HOME/$ENV_REPO -maxdepth 1 -type f -name "env-editor*" | grep $OS_NAME | grep -P "$EDITOR$")' $tgt/Dockerfile.auto")
if [ "$SHELL" = 'bash' ];then
echo $cmd | $SHELL
else
echo $cmd
fi
fi
#あればあとしまつ
cmd=$(echo "sed -i '/DOCKERFILE_EDITOR/d' $tgt/Dockerfile.auto")
if [ "$SHELL" = 'bash' ];then
echo $cmd | $SHELL
else
echo $cmd
fi
done
done < <(find $HOME/$ENV_REPO -type d | grep -v docker-log | grep $OS_VERSION | grep -vP mnt)
}
main(){
ENV_REPO=$1;shift
DEBUG=$1;shift
if [ "$DEBUG" = '--debug' ];then
SHELL=: #なんもしない
else
SHELL=bash #じっこうする
fi
[ -z $ENV_REPO ] && usage
export -f execute
find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | grep -Po '[a-z]+(-[0-9]{1,}){1,}' | sort | uniq | while read tgt;do execute $tgt ;done
}
main "$@"
<file_sep>- インストーラの配置先は
```
/usr/local/src
```
```
cd /usr/local/src
tar xvf eclipse-inst-linux64.tar.gz
cd eclipse-installer/
/usr/local/src/eclipse-installer/eclipse-inst 1>launch-eclipse-installer.log 2>&1 &
```
- 実行バイナリがインストールされたあと
```
/usr/local/src/eclipse/eclipse 1>launch-eclipse.log 2>&1 &
```
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
BASE_URL="https://github.com/ukijumotahaneniarukenia"
ENV_REPO=$1;shift
[ -z $ENV_REPO ] && usage
MD_FILE_NAME=env-image.md
OUTPUT_FILE_NAME=app-env-image-list.md
>$HOME/$ENV_REPO/$OUTPUT_FILE_NAME
while read tgt;do
OS_NAME=$(echo $tgt | perl -pe 's;.*/;;g;' | perl -pe 's/^([a-z]+)-(.*)$/\1/g')
IMAGE_VERSION=$(echo $tgt | perl -pe 's;.*/;;g;' | perl -pe 's/^([a-z]+)-(.*)$/\2/g;s/((?:[0-9]+-){1,})(.*)/\1/;s/-$//;')
if [ -s $tgt/$MD_FILE_NAME ];then
{
echo "[$(echo $tgt | perl -pe 's;.*/;;g')]($BASE_URL/$ENV_REPO/blob/master/$(echo $tgt | perl -pe 's;.*/;;g')/$MD_FILE_NAME)"
cat $tgt/$MD_FILE_NAME | perl -pe 's/.*=//g'
} | xargs
else
if [ "centos" == $OS_NAME ];then
IMAGE_VERSION=$(echo $IMAGE_VERSION | perl -pe 's/-/\./;s/-/\./;s/-//;')
fi
if [ "ubuntu" == $OS_NAME ];then
IMAGE_VERSION=$(echo $IMAGE_VERSION | perl -pe 's/-/\./;')
fi
echo "[$(echo $tgt | perl -pe 's;.*/;;g')]($BASE_URL/$ENV_REPO/blob/master/$(echo $tgt | perl -pe 's;.*/;;g')/$MD_FILE_NAME)" $IMAGE_VERSION
fi \
| perl -pe 's/(?<=md\)) /|/;s/^/|/;s/$/|/'
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt') | sort | sed '1i|環境ディレクトリ名|ベースイメージ|' | sed '2i|:--|:-:|' >>$HOME/$ENV_REPO/$OUTPUT_FILE_NAME
<file_sep># go言語のデータベースある
- https://godoc.org/modernc.org/ql
# 参考文献
- https://github.com/astaxie/build-web-application-with-golang/blob/master/ja/preface.md
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
APACHE_HTTP_VERSION=X-X-X
<file_sep>#!/bin/bash
usage(){
cat <<EOS
Usage:
$0 'script-repo.git' 'RUN cd /usr/local/src/script-repo && echo ./\$OS_VERSION-install-default-user.sh | bash' script-env
EOS
exit 0
}
TGT_WORD="$1";shift
EMBEDED_WORD="$1";shift
REPO="$1";shift
[ -z "$TGT_WORD" ] && usage
[ -z "$EMBEDED_WORD" ] && usage
[ -z "$REPO" ] && usage
while read tgt;do
TGT_FILE=$(echo $tgt | tr ':' '\n' | sed -n '1p')
TGT_ROWN=$(echo $tgt | tr ':' '\n' | sed -n '2p')
printf "sed -i \x27%si%s\x27 %s\n" "$(($TGT_ROWN+1))" "$EMBEDED_WORD" "$TGT_FILE"
done < <(grep -n -P "$TGT_WORD" -r $HOME/$REPO | grep -P 'Dockerfile') | grep -vP "${0:2}"
<file_sep>#!/usr/bin/env bash
template(){
n=$1;shift;
l=$1;shift;
DD=$(printf "%02d" $(($n%31)))
HH=$(printf "%02d" $(($n/31)))
if [ $DD = '00' ];then
cat <<EOS
10 $HH 31 * * $HOME/$ENV_REPO/docker-crontab-wrapper.sh 1 $l $SCRIPT_REPO
EOS
else
cat <<EOS
10 $HH $DD * * $HOME/$ENV_REPO/docker-crontab-wrapper.sh 1 $l $SCRIPT_REPO
EOS
fi
}
usage(){
cat <<EOS
Usage:
$0 script-env script-repo
EOS
exit 0
}
ENV_REPO=$1;shift
SCRIPT_REPO=$1;shift
[ -z $ENV_REPO ] && usage
[ -z $SCRIPT_REPO ] && usage
while read tgt;do
echo $tgt | perl -pe 's;.*/;;' | perl -nlE 's/(?:[a-z]+(?:-[0-9]{1,}){1,})(.*)/\1/ and say' | tr '-' '\n' | sed /^$/d
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt') | sort | uniq | \
while read tgt;do
find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | sed 's;.*/;;' | grep -P "(?<=-)$tgt" | xargs -n3 | nl | \
while read n {a..c};do
#echo $tgt $n $(eval echo '$'{a..c}|xargs -n1;)
eval echo '$'{a..c}|xargs -n1 >docker-build-$tgt-list-$(printf "%02d" $n);
done
done
ls docker-build*list* | grep -vP 'list$' | nl | \
while read n l ;do
template $n $l
done | sort -k3 >docker-crontab-BY-ONE-DAY
<file_sep># 参考文献
- https://www.mlab.im.dendai.ac.jp/~yamada/ir/MorphologicalAnalyzer/mecab.html
- https://javazuki.com/articles/mecab-install.html
- https://github.com/naoa/docker-termextract/blob/master/Dockerfile
- lsp-javaの設定参考例
- https://blog.uedder.com/develope-java-with-vim.html
# 動作確認
```
$echo みなさんこんにちわ | mecab
みなさん 名詞,代名詞,一般,*,*,*,みなさん,ミナサン,ミナサン
こんにちわ 感動詞,*,*,*,*,*,こんにちわ,コンニチワ,コンニチワ
EOS
```
# java拡張の動作確認
```
$cd /usr/local/src/mecab-java-0.996
$java test
0.996
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
BOS/EOS,*,*,*,*,*,*,*,*
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
BOS/EOS,*,*,*,*,*,*,*,*
EOS
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
太郎は二郎にこの本を渡した。
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
BOS/EOS,*,*,*,*,*,*,*,*
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
BOS/EOS,*,*,*,*,*,*,*,*
EOS
nbest:0
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:1
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二 名詞,数,*,*,*,*,二,ニ,ニ
郎 名詞,一般,*,*,*,*,郎,ロウ,ロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:2
太郎 名詞,固有名詞,地域,一般,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:3
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,一般,*,*,*,二郎,ニロウ,ニロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:4
太郎 名詞,固有名詞,地域,一般,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二 名詞,数,*,*,*,*,二,ニ,ニ
郎 名詞,一般,*,*,*,*,郎,ロウ,ロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:5
太郎 名詞,固有名詞,地域,一般,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,一般,*,*,*,二郎,ニロウ,ニロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:6
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二 名詞,数,*,*,*,*,二,ニ,ニ
郎 名詞,一般,*,*,*,*,郎,ロウ,ロー
に 助詞,副詞化,*,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:7
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ニロウ,ニロー
に 助詞,格助詞,一般,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:8
太郎 名詞,固有名詞,人名,名,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二郎 名詞,固有名詞,人名,名,*,*,二郎,ジロウ,ジロー
に 助詞,副詞化,*,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
nbest:9
太郎 名詞,固有名詞,地域,一般,*,*,太郎,タロウ,タロー
は 助詞,係助詞,*,*,*,*,は,ハ,ワ
二 名詞,数,*,*,*,*,二,ニ,ニ
郎 名詞,一般,*,*,*,*,郎,ロウ,ロー
に 助詞,副詞化,*,*,*,*,に,ニ,ニ
この 連体詞,*,*,*,*,*,この,コノ,コノ
本 名詞,一般,*,*,*,*,本,ホン,ホン
を 助詞,格助詞,一般,*,*,*,を,ヲ,ヲ
渡し 動詞,自立,*,*,五段・サ行,連用形,渡す,ワタシ,ワタシ
た 助動詞,*,*,*,特殊・タ,基本形,た,タ,タ
。 記号,句点,*,*,*,*,。,。,。
EOS
```
<file_sep># dockerコンテナ作成
```
docker run --shm-size=2gb --name ubuntu-19-10-vim -itd -v /etc/localtime:/etc/localtime -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X1-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id ubuntu-19-10-vim
```
# dockerイメージ作成
```
time docker build -t ubuntu-19-10-vim . | tee log
```
# dockerコンテナ潜入
```
docker exec -it ubuntu-19-10-vim /bin/bash
```
# vimビルドに必要なパッケージをインストール
```
$grep '^deb ' /etc/apt/sources.list | sed 's/^deb/deb-src/g' > /etc/apt/sources.list.d/deb-src.list && apt-get update
$apt-get build-dep -y vim
$apt-get install -y python-dev
$apt-get install -y tcl-dev
$apt-get install -y perl5*-dev
```
# vimインストール
ビルドやりなおす場合
```
$ls -l /usr/local/bin | grep -P '^l' | awk '{print $9}' | xargs -I@ echo unlink /usr/local/bin/@ | sh
$make clean distclean
```
python関連でエラー起きる場合
Makefileのある場所を探して
```
$find / -name "*Makefile*" 2>/dev/null | grep python
/usr/lib/python3.7/config-3.7m-x86_64-linux-gnu/Makefile
```
以下のオプションを追加
```
--with-python-config-dir=/usr/lib/python3.7/config-3.7m-x86_64-linux-gnu
```
ビルド
```
cd /usr/local/src && \
git clone https://github.com/vim/vim.git && \
cd vim && \
./configure --with-features=huge --with-x --with-python-config-dir=/usr/lib/python3.7/config-3.7m-x86_64-linux-gnu --enable-multibyte --enable-luainterp=dynamic --enable-gpm --enable-cscope --enable-fontset --enable-fail-if-missing --prefix=/usr/local --enable-pythoninterp=dynamic --enable-python3interp=dynamic --enable-rubyinterp=dynamic --enable-tclinterp=dynamic --enable-perlinterp=dynamic --enable-gui=auto --enable-gtk2-check && \
make -j12 && \
make -j12 install && \
ln -fsr /usr/local/bin/vim /usr/bin/vi
```
# 各言語ごとにプラグインを設定
python環境
```
pip3 install python-language-server
```
c-cpp環境
http://kutimoti.hatenablog.com/entry/2018/05/20/110732
```
apt install -y llvm clang clang-tools
```
<file_sep>GIT_VERSION=2-24-1
GO_VERSION=1-13-5
PYTHON_VERSION=3-7-4
<file_sep># 参考文献
- https://www.jetbrains.com/pycharm/download/#section=linux
- https://sdsawtelle.github.io/blog/output/large-data-files-pandas-sqlite.html
# 機械学習はこのサイト導入に良さげ
- https://www.kkaneko.jp/dblab/index.html
# pycharm起動
```
$pyc
```
<file_sep>ENV BLU=false
ENV DB2INST1_PASSWORD=<PASSWORD>
ENV DB2INSTANCE=db2inst1
ENV DBNAME=testdb
ENV ENABLE_ORACLE_COMPATIBILITY=true
ENV LICENSE=accept
ENV SAMPLEDB=true
<file_sep># dockerイメージ作成
```
time docker build -t ubuntu-18-04-swift-vscode . | tee log
```
# dockerコンテナ削除
```
docker ps -qa | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
# dockerイメージ削除
```
docker images | awk '$1=="<none>"{print $3}' | xargs -I@ docker rmi @
```
# dockerコンテナ起動
```
docker run --privileged --shm-size=8gb --name ubuntu-18-04-swift-vscode -itd -v /etc/localtime:/etc/localtime -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id ubuntu-18-04-swift-vscode
```
# dockerコンテナ潜入
```
docker exec -it ubuntu-18-04-swift-vscode /bin/bash
```
# 動作確認
- https://github.com/ukijumotahaneniarukenia/Hatena-Textbook
- https://qiita.com/naokits/items/8f09ffc8bbc78ade366c
```
$cat a.swift
#!/usr/bin/env swift
let hello = "こんにちは"
print(hello)
// 遊び心で。。
let 挨拶 = "お世話になります"
print(挨拶)
$chmod 700 a.swift
$./a.swift
こんにちは
お世話になります
```
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|hadoop|1001|hadoop|hadoop_pwd|
<file_sep>#!/bin/bash
usage(){
cat <<EOS
Usage:
$0 EXPOSE script-env
EOS
exit 0
}
N="$1";shift
REPO="$1";shift
[ -z $N ] && usage
[ -z $REPO ] && usage
while read tgt;do
grep -r -n -P "$N" $HOME/$REPO/$tgt | grep -P env\.md | awk -v FS=':' -v N=${N,,} '{FILE_NAME=$1;gsub(/md-env.md/,"env-"N".md",FILE_NAME);print "rm -rf "FILE_NAME}'
grep -r -n -P "$N" $HOME/$REPO/$tgt | grep -P env\.md | perl -pe 's/(.*)(?<=EXPOSE=)(.*)/\2/g' | perl -pe 's/ -p/\n-p/g' | awk -v N=${N,,} -v FILE=$HOME/$REPO/$tgt '{
print "echo \x27"$0"\x27>>"FILE"/env-"N".md"
}'
done < <(ls -l $HOME/$REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log)
#ファイル存在チェック
#ls -l $HOME/$REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log | while read tgt;do echo $HOME/$REPO/$tgt/env-${N,,}.md;done | xargs -I@ ls @
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env script-repo
EOS
exit 0
}
DEFAULT_INSTALLER_REPO=script-repo
ENV_REPO=$1;shift
INSTALLER_REPO=$1;shift
[ -z "$ENV_REPO" ] && usage
if [ -z "$INSTALLER_REPO" ]; then
:
else
DEFAULT_INSTALLER_REPO=$INSTALLER_REPO
fi
while read tgt;do
{
#md-doc.mdファイルを配備
echo cp $HOME/$ENV_REPO/md-doc.md $HOME/$ENV_REPO/$tgt/md-doc.md
echo "sed -i 's;XXX;$tgt;g' $HOME/$ENV_REPO/$tgt/md-doc.md"
echo "sed -i 's;HHH;$HOME;g' $HOME/$ENV_REPO/$tgt/md-doc.md"
echo "sed -i 's;ENV_REPO;$ENV_REPO;g' $HOME/$ENV_REPO/$tgt/md-doc.md"
RT="$(echo "grep '' $HOME/$ENV_REPO/$tgt/env-expose.md | xargs" | bash 2>/dev/null)"
#デフォルト設定を適用
[ -z "$RT" ] && printf "sed -i 's;EXPOSE;%s;' %s\n" "$(echo "grep EXPOSE $HOME/$ENV_REPO/$tgt/md-env.md" | bash 2>/dev/null | sed 's;.*=;;' | sort | uniq)" $HOME/$ENV_REPO/$tgt/md-doc.md
#環境個別の設定を適用
[ -z "$RT" ] || printf "sed -i 's;EXPOSE;%s;' %s\n" "$RT" $HOME/$ENV_REPO/$tgt/md-doc.md
RT="$(echo "grep '' $HOME/$ENV_REPO/$tgt/env-shm-size.md" | bash 2>/dev/null)"
#デフォルト設定を適用
[ -z "$RT" ] && printf "sed -i 's;SHM_SIZE;%s;' %s\n" "$(echo "grep SHM_SIZE $HOME/$ENV_REPO/md-env.md" | bash | sed 's;.*=;;' | sort | uniq)" $HOME/$ENV_REPO/$tgt/md-doc.md
#環境個別の設定を適用
[ -z "$RT" ] || printf "sed -i 's;SHM_SIZE;%s;' %s\n" "$(echo $RT | sed 's/SHM_SIZE=//g')" $HOME/$ENV_REPO/$tgt/md-doc.md
RT="$(echo "grep '' $HOME/$ENV_REPO/$tgt/env-build-arg.md | xargs" | bash 2>/dev/null)"
#デフォルト設定を適用
[ -z "$RT" ] && printf "sed -i 's;BUILD_ARG;%s;' %s\n" "$(echo "grep build-arg $HOME/$ENV_REPO/md-env.md" | sort | uniq | bash)" $HOME/$ENV_REPO/$tgt/md-doc.md
#環境個別の設定を適用
[ -z "$RT" ] || printf "sed -i 's;BUILD_ARG;%s;' %s\n" "$(echo $RT | sed 's; ; --build-arg ;g;s;^;--build-arg ;')" $HOME/$ENV_REPO/$tgt/md-doc.md
printf "sed -i 's;INSTALLER_REPO;%s;' %s\n" $DEFAULT_INSTALLER_REPO $HOME/$ENV_REPO/$tgt/md-doc.md
} | bash
done < <(ls -l $HOME/$ENV_REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log)
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 ARG script-env script-repo
EOS
exit 0
}
CHK_WORD=$1;shift
ENV_REPO=$1;shift
INSTALLER_REPO=$1;shift
[ -z $CHK_WORD ] && usage
[ -z $ENV_REPO ] && usage
[ -z $INSTALLER_REPO ] && usage
while read tgt;do
if [ -f $tgt/Dockerfile.sub ];then
#Dockerfile.subが存在する場合、INSTALLER_REPOに存在するバージョンのでkey:valueペアを作成し、各環境ディレクトリのenv-build-arg.mdに追記
KEY=$(grep $CHK_WORD $tgt/Dockerfile.sub | awk '{print $2}')
WORD=$(grep $CHK_WORD $tgt/Dockerfile.sub | awk '{print $2}' | perl -pe 's/VERSION//' | perl -pe 's/_/-/g')
while read f;do
[ -z ${WORD} ] && continue
if [[ ${f} =~ ${WORD,,}.* ]]; then
{
echo ${KEY}
echo ${BASH_REMATCH[0]} | perl -pe 's/\.sh//g' | grep -Po '(-[0-9]+){1,}' | perl -pe 's/^-//'
} | xargs -n2 | tr ' ' '='
else
:
fi
done < <(find $HOME/$INSTALLER_REPO -type f | grep -vP '\.git')
else
#何もしないsubファイルを作成してほしいい
:
fi
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt')
<file_sep>あんていしてる
dbeaver
コンテナ起動後、**postgres**ユーザーで実行755なので、実行できる
```
$bash /usr/local/src/script-repo/centos-7-6-18-10-healthcheck-postgres-12-X-with-python.sh
```
そのあとdbeaverに接続
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|5432|
|Database|testdb|
|ユーザー名|postgres|
|パスワード|<PASSWORD>|
<file_sep>CODEBLOCKS_VERSION=X-X
GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|mssql|1001|mssql|mssql_pwd|
<file_sep>DBEAVER_VERSION=X-X-X
ELASTICSEARCH_VERSION=7-5-1
GIT_VERSION=2-24-1
JAVA_VERSION=11
KIBANA_VERSION=7-5-1
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
<file_sep># Apache solr
- 全文検索エンジン
- バックアップとリストアできる。
- https://www.slideshare.net/mobile/techblogyahoo/apache-solr-62053171
- https://academy.gmocloud.com/know/20160106/1509
- https://qiita.com/n_slender/items/eb629cfbc53d38eac2f1
- https://qiita.com/reflet/items/add376c6046b4e7048cf
- https://qiita.com/n_slender/items/93216bae005ac96b75c4
- https://kshigeru.blogspot.com/2012/01/solr-wikipedia-data-import.html
- そこそこおおきいデータセット
- https://www.findbestopensource.com/article-detail/free-large-data-corpus
<file_sep>- 事象
```
$ ls -l /usr/local/src/spark-2.4.5-bin-hadoop2.7/sbin/start-master.sh
-rwxr-xr-x. 1 hadoop hadoop 2050 2月 3 04:47 /usr/local/src/spark-2.4.5-bin-hadoop2.7/sbin/start-master.sh
$ start-master.sh
starting org.apache.spark.deploy.master.Master, logging to /usr/local/src/spark-2.4.5-bin-hadoop2.7/logs/spark-hadoop-org.apache.spark.deploy.master.Master-1-docker-container-ubuntu-16-04-postgres-python-vim.out
failed to launch: nice -n 0 /usr/local/src/spark-2.4.5-bin-hadoop2.7/bin/spark-class org.apache.spark.deploy.master.Master --host docker-container-ubuntu-16-04-postgres-python-vim --port 7077 --webui-port 8080
at org.apache.spark.SecurityManager.<init>(SecurityManager.scala:79)
at org.apache.spark.deploy.master.Master$.startRpcEnvAndEndpoint(Master.scala:1073)
at org.apache.spark.deploy.master.Master$.main(Master.scala:1058)
at org.apache.spark.deploy.master.Master.main(Master.scala)
Caused by: java.lang.StringIndexOutOfBoundsException: begin 0, end 3, length 2
at java.base/java.lang.String.checkBoundsBeginEnd(String.java:3319)
at java.base/java.lang.String.substring(String.java:1874)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:52)
... 15 more
20/05/12 07:25:32 INFO ShutdownHookManager: Shutdown hook called
full log in /usr/local/src/spark-2.4.5-bin-hadoop2.7/logs/spark-hadoop-org.apache.spark.deploy.master.Master-1-docker-container-ubuntu-16-04-postgres-python-vim.out
```
- 原因
javaのマイクロバージョンのちがい??イケてる環境もあるので、よくわからん。
```
curl -sSLO https://ftp.jaist.ac.jp/pub/apache/spark/spark-2.4.5/spark-2.4.5-bin-hadoop2.7.tgz
```
```
$ java -version
openjdk version "1.8.0_252"
OpenJDK Runtime Environment (build 1.8.0_252-8u252-b09-1~16.04-b09)
OpenJDK 64-Bit Server VM (build 25.252-b09, mixed mode)
```
- 対策
- このエラー自体は起きてるらしいし、3系ではなおってるので、切り替える。
- 予防
- 行ける環境探せばいい
<file_sep># 標準入出力
# ファイル入出力
# 型変換
# データ構造
<file_sep>フォルダ構成関連
- https://upura.hatenablog.com/entry/2018/12/28/225234
ソケットエラー関連
- https://rcmdnk.com/blog/2015/07/03/computer-linux/
データベースクライアントツール
ベンダー問わないのが対応範囲が広いのがすごい。
- https://qiita.com/Ryooota/items/755e249f5241d2c964e6
- https://qiita.com/12345/items/48f6856e32fd618ea307
- 言語別トークナイズ管理マスタgroonga
- apache spark環境似てる。フローが参考になる。
- http://www.intellilink.co.jp/article/column/bigdata-kk04.html
- 分かち書きpythonによる使用例
- http://testpy.hatenablog.com/entry/2016/10/04/010000
- グリッドレイアウトこれがいいかなー。簡単そう。
- https://coliss.com/articles/build-websites/operation/javascript/magic-grid-for-dynamic-grid.html
- nuxt.jsでできるそう。グリッドレイアウト。
- https://yoshinorin.net/2019/07/20/nuxt-js-photo-gallery-by-vue-magic-grid/
- nuxt.jsインストール
- https://ja.nuxtjs.org/guide/installation
- magic-gridは単一ファイルに収まりそう
- https://on-ze.com/archives/7644
- ブログテンプーレトあった
- https://ulab.hatenablog.com/entry/20191006/1570336147
- axiosの使い方。mvc的な機能分割を提供してくれるプラグイン。
- https://public-constructor.com/nuxtjs-with-axios/
- nuxtフレームワークでのaxiosライブラリを使用したコンパクトなアプリ
- https://qiita.com/mgr/items/f2193fd21765be1d34c2
- nuxt.jsでのpost実装例 nuxt.js axios apiでぐぐる
- https://qiita.com/cortyuming/items/3257ccc7936acf1a7989
- nuxt.jsでのmvc感出てる
- https://noumenon-th.net/programming/2019/07/15/axios-module/
<file_sep># フォントインストール
- Dockerfileで完結できればいいが、今の所思いつかない。
```
python3: error while loading shared libraries: libpython3.7m.so.1.0: cannot open shared object file: No such file or directory
```
- コンテナ起動後、あたたかみのある手動実行
- 一般ユーザーで実行
```
mkdir -p ~/.fonts && \
cd ~/.fonts && curl -LO https://github.com/adobe-fonts/source-han-code-jp/archive/2.011.zip && \
unzip 2.011.zip && \
cd ~/.fonts && git clone https://github.com/adobe-type-tools/opentype-svg.git && \
cd source-han-code-jp-2.011 && \
python3 -m venv afdko_env && \
source afdko_env/bin/activate && \
pip3 install afdko && \
cp ../opentype-svg/*.py . && \
cp -r ../opentype-svg/util . && \
cp -r ../opentype-svg/imgs . && \
cp -r ../opentype-svg/fonts . && \
sed -i 's;addSVGtable.py;~/.fonts/source-han-code-jp-2.011/addSVGtable.py;g' commands.sh && \
sed -i 's;for wt in ExtraLight Light Normal Regular Medium Bold Heavy;for wt in ExtraLight Light Normal Medium Bold Heavy;g' commands.sh && \
./commands.sh && \
find ~/.fonts/source-han-code-jp-2.011 -name "*otf" | xargs -I@ cp @ ~/.fonts && \
fc-cache -fv
```
<file_sep>安定している
|key|value|
|:-:|:-:|
|Host|localhost|
|Port|5432|
|Database|testdb|
|ユーザー名|postgres|
|パスワード|<PASSWORD>|
<file_sep>GIT_VERSION=2-24-1
POSTGRES_VERSION=12-0
PYTHON_VERSION=3-7-4
<file_sep># dockerホスト環境
```
[aine💖centos (土 9月 28 19:53:59) ~/unko/x]$cat /etc/redhat-release
CentOS Linux release 7.7.1908 (Core)
[aine💖centos (土 9月 28 19:56:33) ~/unko]$docker --version
Docker version 19.03.2, build 6a30dfc
```
# ideaで日本語入力はできた
表示位置が微妙。
# dockerコンテナ削除
```
docker ps -qa | xargs -I@ bash -c 'docker stop @ && docker rm @'
```
# dockerイメージ作成
```
time docker build -t centos-7-6-18-10-x . | tee log
```
# dockerコンテナ削除
```
docker images | awk '$1=="<none>"{print $3}' | xargs -I@ docker rmi @
```
# dockerコンテナ作成
```
docker run --privileged --shm-size=8gb --name xxx -itd -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/dbus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id -p 28787:8787 centos-7-6-18-10-x
```
```
docker run --privileged --shm-size=8gb --name xxx -itd -v /run/udev:/run/udev -v /run/systemd:/run/systemd -v /tmp/.X11-unix:/tmp/.X11-unix -v /var/lib/dbus:/var/lib/bus -v /var/run/dbus:/var/run/dbus -v /etc/machine-id:/etc/machine-id -v /etc/X11/xorg.conf.d/00-keyboard.conf:/etc/X11/xorg.conf.d/00-keyboard.conf centos-7-6-18-10-x
```
# Xアプリ転送許可設定
dockerコンテナ内のXアプリをdockerホストに転送許可する。 ローカルネットワーク内で存在する全てのマシンからのX転送を許可している。マシン単位で設定もできる。
```
xhost +local:
```
# dockerコンテナ潜入
```
docker exec -it xxx /bin/bash
```
# 課題
X経由のブラウザ日本語入力
ideaのターミナルで絵文字が表示できない。
絵文字フォント設定
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 ENV script-env
EOS
exit 0
}
CHK_WORD=$1;shift
ENV_REPO=$1;shift
[ -z $CHK_WORD ] && usage
[ -z $ENV_REPO ] && usage
while read tgt;do
if [ -f "$tgt/env-env.md" ];then
:
else
echo "touch $tgt/env-env.md" | bash
fi
if [ -f "$tgt/Dockerfile.sub" ];then
RT="$(grep $CHK_WORD "$tgt/Dockerfile.sub")"
if [ -z "$RT" ];then
:
else
while read n;do
echo $tgt $n | awk '{PRE=$1;$1="";gsub("^ ","",$0);print "echo \x27"$0"\x27>>"PRE"/env-env.md"}'
done < <(echo "$RT" | perl -pe 's/ENV\s{1,}//')
fi
else
:
fi
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt')
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
ENV_REPO=$1;shift
[ -z $ENV_REPO ] && usage
while read tgt;do
echo $tgt | perl -pe 's;.*/;;' | perl -nlE 's/(?:[a-z]+(?:-[0-9]{1,}){1,})(.*)/\1/ and say' | tr '-' '\n' | sed /^$/d
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt') | sort | uniq | \
while read tgt;do
find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt' | sed 's;.*/;;' | grep -P "(?<=-)$tgt" | xargs -n3 | nl | \
while read n {a..c};do
#echo $tgt $n $(eval echo '$'{a..c}|xargs -n1;)
eval echo '$'{a..c}|xargs -n1 >docker-build-$tgt-list-$(printf "%02d" $n);
done
done
ls docker-build*list* | grep -vP 'list$'
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
RUBY_VERSION=2-7-1
<file_sep>APACHE_SOLR_VERSION=8-5-1
DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
<file_sep>CODELITE_VERSION=13-0-8
GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
REPO=$1
[ -z $REPO ] && usage
while read tgt;do
#echo $tgt;
while read n;do
if [ -f $tgt/$n ];then
echo "mv $tgt/$n $tgt/md-$n" #| bash
echo "git rm $tgt/$n" #| bash
else
:
fi
done < <(ls *md | grep -v README.md | perl -pe 's/^md-//;')
done < <(find $HOME/$REPO -mindepth 1 -type d | grep -vP '\.git|docker-log' )
<file_sep>GIT_VERSION=2-24-1
GRADLE_VERSION=6-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
PYTHON_VERSION=3-7-4
<file_sep># トラシュー
- 事象
```
fatal: could not read Username for 'https://github.com': No
```
- 原因
- md-doc.mdの引数にREPO環境変数が設定されていない
- 対応
- md-doc.mdの自動生成を行う
- 予防
- ビルド前に実行するようにcronに仕込む
- 事象
```
make[2]: *** No rule to make target `../auto/config.mk'. Stop.
```
- 原因
- おそらく同時実行数が多すぎるため。単一実行では行けているので。
- 対応
- ビルド対象リストを分割し、起動時刻をずらす。
- 予防
- 時刻別に同時実行数を一覧で確認できるようにする。
- 事象
```
bash: 36-デフォルトユーザー以外を使用している環境のユーザー登録スクリプトの作成.sh: そのようなファイルやディレクトリはありません
使用法: grep [OPTION]... PATTERN [FILE]...
Try 'grep --help' for more information.
```
- 原因
- フルパスで指定していない
- 対応
- フルパスで記載
- 予防
- linterとかあるけど、大量にエラーはかれて精神乱れそう。
- 事象
- git repoのクローンが終わらない
- 原因
- 同時にコネクション貼りすぎ
- 対応
- docker buildできていないやつを再実行するようにスクリプトを修正
- 予防
- 時刻別に同時実行数を一覧で確認できるようにする。
- 事象
- font cannot install
```
You are using pip version 19.0.3, however version 20.1 is available.
You should consider upgrading via the 'pip install --upgrade pip' command.
cp: cannot stat ‘../opentype-svg/util’: No such file or directory
The command '/bin/sh -c cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font.sh | bash' returned a non-zero code: 1
real 5m45.217s
user 0m0.595s
sys 0m0.705s
aine@centos ~/script-env/centos-7-6-18-10-python-pycharm$cd -
```
- 原因
- I dont know
- 対応
- free
- 予防
- not-free
<file_sep># 参考文献
OpenJDKからいんすこ
https://openjdk.java.net/install/index.html
https://jdk.java.net/
https://jdk.java.net/13/
```
cd /usr/local/src && \
curl -LO https://download.java.net/java/GA/jdk13/5b8a42f3905b406298b72d750b6919f6/33/GPL/openjdk-13_linux-x64_bin.tar.gz && \
tar -zxvf openjdk-13_linux-x64_bin.tar.gz
```
# jshell
https://qiita.com/HomMarkHunt/items/6579cc3587972909d85f
https://twitter.com/EclipseJavaIDE/status/1147044351278747648
```
[java@441a10f53644 /]$jshell
| JShellへようこそ -- バージョン13
| 概要については、次を入力してください: /help intro
jshell>
jshell>
jshell>
jshell>
jshell> System.out.println("Hello World");
Hello World
```
# 絵文字をjavaで表示
https://qiita.com/carimatics/items/48ca30434f192549283c
https://github.com/vdurmont/emoji-java
Mavenからインストール



ディレクトリ構造
```
[java@441a10f53644 ~/IdeaProjects/untitled]$tree
.
|-- lib
| |-- emoji-java-5.1.1-javadoc.jar
| |-- emoji-java-5.1.1-sources.jar
| |-- emoji-java-5.1.1.jar
| |-- json-20170516-javadoc.jar
| |-- json-20170516-sources.jar
| `-- json-20170516.jar
|-- out
| `-- production
| `-- untitled
| `-- emo.class
|-- src
| `-- emo.java
`-- untitled.iml
5 directories, 9 files
```
javaファイル作成
```emo.java
import com.vdurmont.emoji.EmojiManager;
public class emo {
public static void main(String[] args) {
EmojiManager.getForAlias("name_badge");
System.out.println(EmojiManager.getForAlias("name_badge").getUnicode());
}
}
```
コンパイル
```
javac -d ../out/production/untitled -classpath /home/java/IdeaProjects/untitled/out/production/untitled:/home/java/IdeaProjects/untitled/lib/emoji-java-5.1.1.jar:/home/java/IdeaProjects/untitled/lib/json-20170516.jar emo.java
```
実行
```
/home/java/jdk-13/bin/java -javaagent:/home/java/idea-IC-192.6603.28/lib/idea_rt.jar=46144:/home/java/idea-IC-192.6603.28/bin -Dfile.encoding=UTF-8 -classpath /home/java/IdeaProjects/untitled/out/production/untitled:/home/java/IdeaProjects/untitled/lib/emoji-java-5.1.1.jar:/home/java/IdeaProjects/untitled/lib/json-20170516.jar emo
```
実行例
```
[java@441a10f53644 ~/IdeaProjects/untitled/src]$/home/java/jdk-13/bin/java -javaagent:/home/java/idea-IC-192.6603.28/lib/idea_rt.jar=46144:/home/java/idea-IC-192.6603.28/bin -Dfile.encoding=UTF-8 -classpath /home/java/IdeaProjects/untitled/out/production/untitled:/home/java/IdeaProjects/untitled/lib/emoji-java-5.1.1.jar:/home/java/IdeaProjects/untitled/lib/json-20170516.jar emo
📛
```
<file_sep># go言語のデータベースある
- https://godoc.org/modernc.org/ql
# 参考文献
- https://github.com/astaxie/build-web-application-with-golang/blob/master/ja/preface.md
# go tool
- https://mattn.kaoriya.net/software/lang/go/20120216093718.htm
# 開発環境
- https://qiita.com/masakurapa/items/fb61b5fc07393059fb28
# トラシュー
- https://qiita.com/mumoshu/items/0d2f2a13c6e9fc8da2a4
<file_sep>#!/bin/bash
usage(){
cat <<EOS
Usage:
$0 SHM_SIZE script-env
EOS
exit 0
}
N="$1";shift
REPO="$1";shift
[ -z $N ] && usage
[ -z $REPO ] && usage
while read tgt;do
grep -r -n -P "$N" $HOME/$REPO/$tgt | grep -P env\.md | awk -v FS=':' -v N=$(echo ${N,,} | perl -pe 's/_/-/g' ) '{FILE_NAME=$1;gsub(/md-env.md/,"env-"N".md",FILE_NAME);print "rm -rf "FILE_NAME}'
grep -r -n -P "$N" $HOME/$REPO/$tgt | grep -P env\.md | awk -v FS=':' -v N=$(echo ${N,,} | perl -pe 's/_/-/g' ) -v FILE=$HOME/$REPO/$tgt '{
print "echo \x27"$3"\x27>>"FILE"/env-"N".md"
}'
done < <(ls -l $HOME/$REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log)
#ファイル存在チェック
#ls -l $HOME/$REPO | grep -P '^d' | awk '{print $9}' | grep -v docker-log | while read tgt;do echo $HOME/$REPO/$tgt/env-${N,,}.md;done | xargs -I@ ls @
<file_sep># 参考文献
db2のセットアップマニュアル
- https://www.ibm.com/support/producthub/db2/docs/content/SSEPGG_11.5.0/com.ibm.db2.luw.qb.server.doc/doc/t0008875.html
コミニュティエディションでて、無償で検証つくれるようになったよー
- https://www.ibmbigdatahub.com/blog/simplifying-db2-downloads-help-clients-and-developers-get-started?_ga=2.116263366.1982163311.1579705509-1850039665.1572074127&cm_mc_uid=89503398108915720741268&cm_mc_sid_50200000=74238601579705509031
- https://www.ibm.com/cloud/blog/announcements/ibm-db2-developer-community-edition
開発マニュアル
- https://www.ibm.com/support/knowledgecenter/ja/ssw_ibm_i_71/rzaha/udftable.htm
以下はメモ
```
root@858be2c9f9b1:/# chmod 777 /home/software
root@8bb04c92ca56:/# cd /home/software
root@8bb04c92ca56:/home/software# gunzip *.gz
root@8bb04c92ca56:/home/software# tar -xvf v11.5_linuxx64_dec.tar
root@8bb04c92ca56:/home/software# rm v11.5_linuxx64_dec.tar
root@8bb04c92ca56:/home/software# chmod 777 server_dec
root@8bb04c92ca56:/home/software# mv server_dec ibm-db2
root@8bb04c92ca56:/home/software# cd ibm-db2
root@8bb04c92ca56:/home/software/ibm-db2# ./db2prereqcheck -v 192.168.127.12
apt-get -y install libaio1 zlib1g-dev libnuma-dev libpam0g-dev file gcc make
root@8bb04c92ca56:/home/software/ibm-db2# dpkg --add-architecture i386
root@8bb04c92ca56:/home/software/ibm-db2# apt-get update
root@8bb04c92ca56:/home/software/ibm-db2# apt install -y binutils libaio1 libstdc++6:i386 libpam0g:i386
root@8bb04c92ca56:/home/software/ibm-db2# ./db2prereqcheck -v 192.168.127.12
==========================================================================
Sun Feb 2 23:38:10 2020
Checking prerequisites for DB2 installation. Version "192.168.127.12". Operating system "Linux"
Validating "Linux distribution " ...
Required minimum "UBUNTU" version: "16.04"
Actual version: "18.04"
Requirement matched.
Validating "kernel level " ...
Required minimum operating system kernel level: "3.10.0".
Actual operating system kernel level: "3.10.0".
Requirement matched.
Validating "C++ Library version " ...
Required minimum C++ library: "libstdc++.so.6"
Standard C++ library is located in the following directory: "/usr/lib/x86_64-linux-gnu/libstdc++.so.6.0.25".
Actual C++ library: "CXXABI_1.3.1"
Requirement matched.
Validating "32 bit version of "libstdc++.so.6" " ...
Found the 32 bit "/usr/lib/i386-linux-gnu/libstdc++.so.6" in the following directory "/usr/lib/i386-linux-gnu".
Requirement matched.
Validating "libaio.so version " ...
DBT3553I The db2prereqcheck utility successfully loaded the libaio.so.1 file.
Requirement matched.
Validating "libnuma.so version " ...
DBT3610I The db2prereqcheck utility successfully loaded the libnuma.so.1 file.
Requirement matched.
Validating "/lib/i386-linux-gnu/libpam.so*" ...
Requirement matched.
DBT3533I The db2prereqcheck utility has confirmed that all installation prerequisites were met.
これたたいて
./db2setup
できたレスポンスファイルを使用していんすこ
root@8bb04c92ca56:/home/software/ibm-db2# cp ./db2/linuxamd64/samples/db2server.rsp .
root@8bb04c92ca56:/home/software/ibm-db2# apt install -y vim
root@8bb04c92ca56:/home/software/ibm-db2# chmod u+w db2server.rsp
root@8bb04c92ca56:/home/software/ibm-db2# ./db2setup -r db2server.rsp
root@b5348375618d:~# ls
db2server.rsp
root@b5348375618d:/home/software/ibm-db2# ./db2setup -r /root/db2server.rsp
```
<file_sep>SHM_SIZE=2gb
GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
<file_sep># 参考文献
- https://www.mlab.im.dendai.ac.jp/~yamada/ir/MorphologicalAnalyzer/mecab.html
- https://javazuki.com/articles/mecab-install.html
- https://github.com/naoa/docker-termextract/blob/master/Dockerfile
# 動作確認
```
$echo みなさんこんにちわ | mecab
みなさん 名詞,代名詞,一般,*,*,*,みなさん,ミナサン,ミナサン
こんにちわ 感動詞,*,*,*,*,*,こんにちわ,コンニチワ,コンニチワ
EOS
```
<file_sep># マイインストーラの実行
- rootユーザーでmy-installer.shを実行
# docker install
- インストール手順公式HP
- https://docs.docker.com/install/linux/docker-ce/centos/
- より簡単にいんすこできるようになった
- https://github.com/docker/docker-install
- my-installer.shに組み込んでおきたい部分
# dockerホスト環境構築
- バージョン確認
```
$cat /etc/redhat-release
CentOS Linux release 7.7.1908 (Core)
```
# visudoでdockerグループに属するユーザーはroot権限をもつように修正
- my-installer.shに組み込んでおきたい部分
```
[root♥centos (金 10月 04 20:39:17) /home/aine]$visudo
## Allows people in group wheel to run all commands
%wheel ALL=(ALL) ALL
%docker ALL=(ALL) ALL
```
# レポジトリ構成
- script-template
- プログラム単位のテンプーレトファイルを管理
- ディレクトリ構成はscript-sketchと同じ
- script-sketch
- プログラム単位にディレクトリを切って運用
- プログラム単位のディレクトリ名はscript-envの単一アプリ名
- ファイル名は
- 5桁ゼロうめ連番-プログラム名-用途名-non-alias名-alias名
- alias名は~/.bashrcに追加するので、短くていい感じの名前をつけてあげる
- エイリアスにするので、実行権限を付与しておく
- script-env
- os名-バージョン-アプリないしソフト名とそのバージョン単一ないし複数-エディタ単一
- エディタデフォルトはvim
- script-repo
- プログラムのインストーラーシェル
- プログラムのローンチシェル
- 環境を高速に起動できるように自動化を意識
- script-search
- 上記レポジトリ全てを全文検索できるようにする
- Web化する
- 全文検索サーバgroonga
- http://blog.createfield.com/entry/2014/04/21/120023
- 全文検索サーバfess
- https://fess.codelibs.org/ja/
- https://qiita.com/Takumon/items/993609a4fc0fbb70c903
- 全文検索サーバmroonga
- https://mroonga.org/ja/docs/install.html
# md-doc.md
- dockerコンテナ操作を記載
# md-dev.md
- 開発マニュアルを記載
# md-env.md
- コンテナ個別の環境を記載
# md-trs.md
- 環境構築の際のトラブルシュートを記載
# md-ref.md
- 参考文献を記載
# md-man.md
- 上記マークダウンファイル以外に該当する内容を記載
# crontabで定期実行スクリプト作成
- 同時実行プロセス数はファイル単位で管理することにした。
- http://dqn.sakusakutto.jp/2012/06/cron_crontab9.html
- https://zenpou.hatenadiary.org/entry/20080715/1216133151
- https://qiita.com/mazgi/items/15e1fe7e130584343810
- https://qiita.com/onomame/items/71646c5517a39bcd01cc
バックアップ
```
$crontab -l>~/script_env/docker-build-crontab
```
編集
```
$vi ~/script_env/docker-build-crontab
```
反映
```
crontab < ~/script_env/docker-build-crontab
```
確認
```
$crontab -l
```
プロセス確認
```
$ps aux | grep cron
```
起動ログの確認
```
$sudo less /var/log/cron
```
最後は手動push
秘密鍵をなくさない保証があれば、自動pushやりたいが、
定期的にOS換えるかもしれないので。今は手動pushでいいか。
- http://tm.root-n.com/unix:command:git:cron_git_push
強制終了
```
su root
ps aux | grep 'docker build' | awk '{print $2}' | xargs kill
```
<file_sep>GIT_VERSION=2-24-1
PERL_VERSION=5-30-0
PYTHON_VERSION=3-7-4
<file_sep>DBEAVER_VERSION=X-X-X
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
GIT_VERSION=2-24-1
PGROONGA_VERSION=X-X-X
POSTGRES_VERSION=12-X
PYTHON_VERSION=3-7-4
<file_sep>|ユーザーID|ユーザー名|グループID|グループ名|パスワード|
|:-:|:-:|:-:|:-:|:-:|
|1001|apache|1001|apache|apache_pwd|
<file_sep>ubuntuはコアダンプ吐くので、centosで
```
$monodevelop 1>$HOME/launch-monodevelop.log 2>&1 &
```
<file_sep>#!/usr/bin/env bash
usage(){
cat <<EOS
Usage:
$0 script-env
EOS
exit 0
}
ENV_REPO="$1";shift
[ -z $ENV_REPO ] && usage
#自動生成したDockerfileと既存Dockerfileのうち既存にしかないものを抽出
while read tgt;do
if [ -f $tgt/Dockerfile.sub.done ];then
#Dockerfile.sub.doneが存在する場合はDockerfile.subを作成しない
: #echo 1 $tgt
elif [ -f $tgt/Dockerfile.sub.undone ];then
#Dockerfile.sub.undoneが存在する場合は着手したが完了していない状態なので、Dockerfile.subを作成しない。
: #echo 1 $tgt
else
#echo 0 $tgt
echo "cd $tgt && comm -23 --nocheck-order <(sort Dockerfile.asis | sed -r '/^$/d') <(sort Dockerfile.auto | sed -r '/^$/d')>Dockerfile.sub"
fi
done < <(find $HOME/$ENV_REPO -mindepth 1 -type d | grep -vP '\.git|docker-log|mnt')
<file_sep>DBEAVER_VERSION=X-X-X
GIT_VERSION=2-24-1
JAVA_VERSION=11
MAVEN_VERSION=3-6-3
POSTGRES_VERSION=12-0
PYTHON_VERSION=3-7-4
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
SWIFT_VERSION=X-X-X
<file_sep>FROM ubuntu:18.04
ARG OS_VERSION
ARG REPO
ARG DEBIAN_FRONTEND=noninteractive
ARG GIT_VERSION
ARG PYTHON_VERSION
ENV LANG=ja_JP.UTF-8
ENV DISPLAY=:0.0
RUN sed -i 's@<EMAIL>/pub/Linux@g' /etc/apt/sources.list && \
apt update && \
apt upgrade -y
RUN apt install -y git
RUN cd /usr/local/src && git clone https://github.com/ukijumotahaneniarukenia/$REPO.git
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-docker-host-user.sh | bash
RUN cd /usr/local/src/$REPO && echo './$OS_VERSION-install-default-user.sh ' | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-repository-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-dev-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-tool-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-network-pkg.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-system.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-python-$PYTHON_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-pre-patch-git-$GIT_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-git-$GIT_VERSION.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-locale.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-ld.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-fcitx_mozc.sh | bash
COPY subrun.sh /root/subrun.sh
#RUN bash /root/subrun.sh
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-aws-workspaces-client.sh | bash
USER kuraine
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim-user.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-install-vim_plug.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-dotfile.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-env.sh | bash
RUN cd /usr/local/src/$REPO && echo ./$OS_VERSION-config-font-RictyDiminished.sh | bash
WORKDIR /home/kuraine
COPY run.sh /etc/init/run.sh
ENTRYPOINT ["/etc/init/run.sh","ubuntu"]
<file_sep>#!/usr/bin/env bash
yum install -y https://packages.groonga.org/centos/groonga-release-latest.noarch.rpm
yum install -y http://repo.mysql.com/mysql80-community-release-el7.rpm
yum install -y --enablerepo=epel mysql80-community-mroonga
#これはむりだった
#expect -c "
#spawn yum install -y --enablerepo=epel mysql80-community-mroonga
#expect \"Enter password:\"
#send -- \"\n\"
#expect \"Enter password:\"
#send -- \"\n\"
#"
<file_sep>GIT_VERSION=2-24-1
PYTHON_VERSION=3-7-4
RACKET_VERSION=7-5
| 86ef282c6dd8986a8d78c859bb0da6a6807f677e | [
"Markdown",
"Dockerfile",
"Shell"
] | 141 | Dockerfile | ukijumotahaneniarukenia/script_env | b6fec8845a20e71aa66a9cb509f7199a3c1e8058 | 06c8ebe8940559e29f2d11700c7eacfd634544bf | |
refs/heads/master | <repo_name>erik-rosen/self-driving-car-project-lane-finding<file_sep>/README.md
## Advanced lane finding

**Advanced Lane Finding Project**
The goals / steps of this project are the following:
* Compute the camera calibration matrix and distortion coefficients given a set of chessboard images.
* Apply a distortion correction to raw images.
* Apply a perspective transform to rectify binary image ("birds-eye view").
* Use color transforms, gradients, etc., to create a thresholded binary image that marks lane lines in an image.
* Detect lane pixels and fit to find the lane boundary.
* Determine the curvature of the lane and vehicle position with respect to center.
* Warp the detected lane boundaries back onto the original image.
* Output visual display of the lane boundaries and numerical estimation of lane curvature and vehicle position.
[//]: # (Image References)
[image1]: ./output_images/undistort_output.png "Undistorted"
[image2]: ./output_images/undistort_output2.png "Road Transformed"
[image3]: ./output_images/straight_lines2.jpg "Binary example"
[image4]: ./output_images/straight_lines1.jpg "Birdseye view of road, top right"
[image5]: ./output_images/test1.jpg "Processed image"
[image6]: ./output_images/test2.jpg "Processed image - good performance"
[image7]: ./output_images/test5.jpg "Processed image - poor performance"
[image8]: ./output_images/test6.jpg "Processed image - poor performance"
[image9]: ./output_images/test4.jpg "Processed image - good performance"
[video1]: ./project_video_dog.mp4 "Video"
### Camera Calibration
#### 1. Briefly state how you computed the camera matrix and distortion coefficients. Provide an example of a distortion corrected calibration image.
The code for this step is contained in the first code cell of the IPython notebook located in `./camera_calibration.ipynb`
I start by preparing "object points", which will be the (x, y, z) coordinates of the chessboard corners in the world. I assume the chessboard is fixed on the (x, y) plane at z=0, such that the object points are the same for each calibration image. `objp` will therefore be a replicated array of coordinates, and `objpoints` will be appended with a copy of it every time we detect all chessboard corners in a test image. `imgpoints` will be appended with the (x, y) pixel position of each of the corners in the image plane with each successful chessboard detection.
I load in the images in the `./camera_cal/` directory in a for-loop, and use OpenCV's `findChessboardCorners()` to extract the corners in the chessboard in the image. If a chessboard is found, I apply OpenCV's `cornerSubPix()` function which refines the position of the corner to subpixel accuracy.
I then used the output `objpoints` and `imgpoints` to compute the camera calibration and distortion coefficients using the `cv2.calibrateCamera()` function. I applied this distortion correction to the test image using the `cv2.undistort()` function and obtained this result:
![alt text][image1]
I serialize the computed camera matrix and the distortion coefficients into a pickle file that I can load in when I run the lanefinding pipeline. This saves me from having to run the calibration code (extract corners, compute the camera matrix and distortion coefficients) everytime I run the pipeline.
### Pipeline (single images)
#### 1. Provide an example of a distortion-corrected image.
In "./lane_finder.ipynb" I start by loading in the camera matrix and distortion coefficents that we saved earlier in the pickle file. I also define the perspective transfrom (more on this later). All of these are passed to the constructor of my instatiation of a `LaneFinder` object, `l`. The LaneFinder object is defined in `./lanefinder.py` and contains all methods used in the pipeline, the main one being `findLanes()`, defined on line 105. This method takes an input frame, extracts the lane lines from that frame, computes their parameters, and outputs a frame illustrating the intermediary steps in the pipeline, as well as the estimated curvature of the lane lines extracted, and the offset of the car from the center oof the lane.
The first step in the `findLanes()` method is correcting the raw input image from camera distortion using openCV's `undistort()` function, which takes the camera matrix and the distortion coefficients which were passed to the constructor of the `LaneFinder` object. The result is shown below.
![alt text][image2]
#### 2. Describe how (and identify where in your code) you performed a perspective transform and provide an example of a transformed image.
The second step in the `findLanes()` method is to compute a "birdseye" view of the road using OpenCV's `warpPerspective()` method. See line 112 in `lanefinder.py`. The perspective tranform matrix used is passed to the constructor of the `LaneFinder` object.
In our case this matrix is created by manually defining a set of points in the undistorted image. Each point was picked such that the top side and bottom sides of the resulting shape are horizontal, and each point is on the outside of a lane line on the road. The top side points are positioned such that the lane lines are on the verge of being difficult to distingish around that distance, and the bottom side points are picked such that the bottom side is at the bottom of the original image.
The destination points where selected such that: 1) the resulting shape was perfectly rectangular, 2) there were no "undefined"/dark sections in the resulting view and 3) the car bonnet was not visible.
This resulted in the following source and destination points:
| Source | Destination |
|:-------------:|:-------------:|
| 578, 460 | 280, 0 |
| 702, 460 | 1000, 0 |
| 1100, 720 | 1000, 748 |
| 180, 720 | 280, 748 |
You can see this in line 23-25 in the first cell of `./lane_finder.ipynb`.
I verified that my perspective transform was working as expected by checking that the resulting lane lines in the transformed image were parallel (see top right for birdseye view image tranformed from the original "driver seat" view):
![alt text][image4]
#### 3. Describe how (and identify where in your code) you used color transforms, gradients or other methods to create a thresholded binary image. Provide an example of a binary image result.
The next step in the `findLanes()` method is to compute a lane segmented binary image. See line 116 of `./lanefinder.py`, which calls the method `laneLineSegmentationLaplace()`.
I experimented with a number of techniques to effectively segment lane lines. The one which performed best can be found in rows 205-222 of `./lanefinder.py`, `laneLineSegmentationLaplace()`.
This method takes the birdseye view image of the road, and creates a weighted image from the S-channel and grayscale version of the input frame. This accentuates both the yellow and the white lane lines in the resulting grayscale image well. See the image in the top left in the figure below for the resulting output.
Since the width of the lane lines are known, I apply a Laplacian filter with a kernel size which best corresponds to the lane line width (i.e. maximizes it's response when applied to a lane line). To avoid having to use a very large kernel (computationally expensive), I downsample the input images to a tenth of its original size. This allows me to use a much smaller kernel and get the same effect.
The magnitude of the Laplacian filter output is sensitive to lighting conditions. To mitigate for that when thresholding, I linearly scale the output such that the minimum of the output is set to 0, and the maximum of the output is 255. I assume that if a road lane is present, this will likely result in the minimum output in the image. See the second image from the top left in the figure below for the resulting Laplacian filter output.
I then apply a threshold to the scaled Laplacian filter output to produce a binary image (in our case each pixel is either 0 or 255). See the third image from the top left in the figure below for the resulting output.
![alt text][image3]
#### 4. Describe how (and identify where in your code) you identified lane-line pixels and fit their positions with a polynomial?
The next step in the `findLanes()` method is to find the actual lane pixels in the binary image produced in the previous step. See lines 120-127 in `lanefinder.py`.
Depending on whether the lane line was confidently found in the prior frame, we search around the previously best fit polynomial using `search_around_poly()`, if not, we use sliding windows `find_lane_pixels_sliding_window_one_side()` to identify lane pixels from the thresholded image. These two methods are defined in lines 388-452 and 308-386 respectively in `lanefinder.py`. They return the identified lane line pixels and an image marking the effective lane pixel search areas.
The identified laneline pixels are passed to the `updateFit()` method belonging to the Line objects (instantiated as `leftLane` and `rightLane` belonging to the `LaneFinder` object).
This method can be found on lines 35-95 in `lanefinder.py`. This function first fits a second order polynomial to the the identified lane pixels.
If the number of identified lane line pixels are too few, if the radius of curvature of the interpolated points is lower than 60 meters, and if the difference between the previous best fit is too great, we reject the fit and mark the line as currently undetected.
If the detection is accepted, we update the best fit coefficients. The best fit coefficients are computed as the average of the last 5 accepted fits.
![alt text][image8]
#### 5. Describe how (and identify where in your code) you calculated the radius of curvature of the lane and the position of the vehicle with respect to center.
After updating the best fit coefficients in `updateFit()`, the radius of curvature and offset of the lane line from the vehicle centre is computed. See lines 87-93 in `lanefinder.py` The position of the vehicle relative to the center of the lane bounded by the lane lines is computed from the offset estimate of the left and right lane line relative to the center of the car on line 142 in `lanefinder.py`.
These metrics can all be seen in the image text overlay above, and the code to draw this overlay is defined on lines 137-145.
#### 6. Provide an example image of your result plotted back down onto the road such that the lane area is identified clearly.
As part of `updateFit()` that was run earlier, we store the points of the curve of the best fit second order polynomial the `Line` objects. The next step in the `findLanes()` method is to draw the lane line curves and the lane polygon on the birdseye view image. This is done on lines 150-154 and 166 in `lanefinder.py`. The result is the top right image seen in the figure below.
On line 155 we use the inverse perspective transform which we computed in the constructor of the LaneFinder object to warp the lane lines and lane overlays to how they look from the "driver view". On line 166, we blend the road-warped color overlay with the undistorted "driver view" image, resulting in the bottom image seen in the figure below.
![alt text][image6]
---
### Pipeline (video)
Here's a [link to my video result](./output_project_video_dog.mp4)
---
### Discussion
#### 1. Briefly discuss any problems / issues you faced in your implementation of this project. Where will your pipeline likely fail? What could you do to make it more robust?
The lane line segmentation of the current pipeline does not perform very well when the contrast between road and lane line marking is small, when the lane line is occluded or missing from the frame, when part of the road is in shadow, there are speckles on the road, or the color of the road changes - see below for examples where the pipeline fails or does not perform well:
![alt text][image5]
![alt text][image9]
![alt text][image7]
If I were to take this project futher, I would focus on improving the lane line segmentation part of the pipeline, likely exploring training and applying a convolutional neural net for the lane line segmentation task, which is the state of the art at the time of writing: https://paperswithcode.com/paper/learning-lightweight-lane-detection-cnns-by
In order for the lane line finder to be useful for autonomous driving, it will need to be running with a sufficiently high frame rate throughput - the current framerate throughput of the implementation is likely too low for practical application. Improving the performance of the pipeline would be the second thing I would focus on.
<file_sep>/lanefinder.py
import numpy as np
import cv2
# Define a class to receive the characteristics of each line detection
class Line():
def __init__(self, side):
# was the line detected in the last iteration?
self.side = side
self.detected = False
# x values of the last n fits of the line
self.recent_xfitted = []
#points of the best fitted line over the last n iterations
self.bestpts = np.int32( [ np.array([np.transpose(np.vstack(([], [])))])])
#polynomial coefficients averaged over the last n iterations
self.best_fit = np.array([0,0,0], dtype='float')
#polynomial coefficients for the most recent fit
self.current_fit = np.array([0,0,0], dtype='float')
#radius of curvature of the line in some units
self.radius_of_curvature = None
#distance in meters of vehicle center from the line
self.line_base_pos = None
#difference in fit coefficients between last and new fits
self.diffs = np.array([0,0,0], dtype='float')
#x values for detected line pixels
self.allx = None
#y values for detected line pixels
self.ally = None
#number of good fits currently averaged over - will never exceed self.average_over_n_fits
self.number_of_fits = 0
self.average_over_n_fits = 5
# Define conversions in x and y from pixels space to meters
self.ym_per_pix = 30/720 # meters per pixel in y dimension
self.xm_per_pix = 3.7/700 # meters per pixel in x dimension
def updateFit(self, allx, ally):
self.allx = allx
self.ally = ally
# Initial sanity check - if the number of points found are too few, skip
if allx.size<3:
self.detected = False
return None
# Fit a second order polynomial to the points
self.current_fit = np.polyfit(ally,allx,2)
# also fit one that is scaled to meters
current_fit_meters = np.polyfit(ally*self.ym_per_pix, allx*self.xm_per_pix, 2)
y_eval=720*self.ym_per_pix
curverad = (1+(2*current_fit_meters[0]*y_eval+current_fit_meters[1])**2)**1.5 / (2*abs(current_fit_meters[0]))
if self.number_of_fits > 0:
self.diffs = np.absolute(self.current_fit - self.best_fit)
# Sanity checks
if (curverad<60):
self.detected = False
return None
if (np.sum(self.diffs)>1000):
self.detected = False
return None
self.detected = True
if (self.number_of_fits < self.average_over_n_fits):
self.number_of_fits=self.number_of_fits + 1
#Average over n last fits - if there were none, adjust accordingly
weight_current_fit = 1 / self.number_of_fits
weight_of_historical_fit = 1 - weight_current_fit
self.best_fit = [self.current_fit[0]*weight_current_fit + self.best_fit[0]*weight_of_historical_fit,
self.current_fit[1]*weight_current_fit + self.best_fit[1]*weight_of_historical_fit,
self.current_fit[2]*weight_current_fit + self.best_fit[2]*weight_of_historical_fit]
# Generate x values for plotting
ploty = np.linspace(0, 720-1, 720 )
bestx = self.best_fit[0]*ploty**2 + self.best_fit[1]*ploty + self.best_fit[2]
# Transform into points for plotting
if self.side == 'left':
self.bestpts = np.int32( [ np.array([np.transpose(np.vstack((bestx, ploty)))])])
else:
self.bestpts = np.int32( [ np.array( [ np.flipud( np.transpose( np.vstack( [bestx, ploty] ) ) ) ] ) ] )
# Update radius of curvature
best_fit_meters = np.polyfit(ploty*self.ym_per_pix, bestx*self.xm_per_pix, 2)
y_eval=720*self.ym_per_pix
self.radius_of_curvature = (1+(2*best_fit_meters[0]*y_eval+best_fit_meters[1])**2)**1.5 / (2*abs(best_fit_meters[0]))
# Update base offset from centre - TODO: Clean up with parameters
self.line_base_pos = (self.best_fit[0]*720**2 + self.best_fit[1]*720 + self.best_fit[2] - 1280/2) * self.xm_per_pix
return None
class LaneFinder():
def __init__(self,cameraMatrix,distCoeffs,perspectiveTransformMatrix):
self.cameraMatrix = cameraMatrix
self.distCoeffs = distCoeffs
self.perspectiveTransformMatrix = perspectiveTransformMatrix
self.inversePerspectiveTransform = np.linalg.inv(perspectiveTransformMatrix)
self.leftLane = Line('left')
self.rightLane = Line('right')
def findLanes(self, frame):
frameSize = (frame.shape[1],frame.shape[0])
#Undistort image
frame = cv2.undistort(frame, self.cameraMatrix, self.distCoeffs, None, self.cameraMatrix)
#Get the birdseye view:
birdsEyeImage = cv2.warpPerspective(frame, self.perspectiveTransformMatrix, frameSize, flags=cv2.INTER_LINEAR)
#Perform Lane Segmentation:
#grayscale, abs_sobelx, thresh_sobel, extracted_lines = self.laneLineSegmentation(birdsEyeImage)
grayscale, laplacian, extracted_lines = self.laneLineSegmentationLaplace(birdsEyeImage)
#Find lane pixels
#leftx, lefty, rightx, righty, extracted_lane_pixels = self.find_lane_pixels_sliding_window(extracted_lines)
if (self.leftLane.detected):
leftx, lefty, left_img = self.search_around_poly(extracted_lines.copy(),self.leftLane)
else:
leftx, lefty, left_img = self.find_lane_pixels_sliding_window_one_side(extracted_lines.copy(),self.leftLane)
if (self.rightLane.detected):
rightx, righty, right_img = self.search_around_poly(extracted_lines.copy(),self.rightLane)
else:
rightx, righty, right_img = self.find_lane_pixels_sliding_window_one_side(extracted_lines.copy(),self.rightLane)
extracted_lane_pixels = np.hstack((left_img, right_img))
#Update lines
self.leftLane.updateFit(leftx,lefty)
self.rightLane.updateFit(rightx,righty)
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(frame,'L curv: '+ str(self.leftLane.radius_of_curvature) ,(900,30), font, 1, (255, 255, 255), 2, cv2.LINE_AA)
cv2.putText(frame,'R curv: '+ str(self.rightLane.radius_of_curvature) ,(900,60), font, 1, (255, 255, 255), 2, cv2.LINE_AA)
cv2.putText(frame,'L offset: '+ str(self.leftLane.line_base_pos) ,(900,90), font, 1, (255, 255, 255), 2, cv2.LINE_AA)
cv2.putText(frame,'R offset: '+ str(self.rightLane.line_base_pos) ,(900,120), font, 1, (255, 255, 255), 2, cv2.LINE_AA)
if (self.leftLane.number_of_fits>0 and self.rightLane.number_of_fits>0 ):
center_offset = (self.leftLane.line_base_pos+self.rightLane.line_base_pos)*0.5
else:
center_offset = 0
cv2.putText(frame,'Offset in Lane: '+ str(center_offset) ,(900,150), font, 1, (255, 255, 255), 2, cv2.LINE_AA)
#cv2.putText(frame,'L diffs: '+ str(np.sum(self.leftLane.diffs)) ,(900,180), font, 1, (255, 255, 255), 2, cv2.LINE_AA)
#cv2.putText(frame,'R diffs: '+ str(np.sum(self.rightLane.diffs)) ,(900,210), font, 1, (255, 255, 255), 2, cv2.LINE_AA)
#Draw best averaged best fit lanes
poly = np.hstack((self.leftLane.bestpts[0], self.rightLane.bestpts[0]))
color_overlay = np.zeros_like(frame)
cv2.polylines(color_overlay, self.leftLane.bestpts, False, color=[255,0,0], thickness=30, lineType=cv2.LINE_AA)
cv2.polylines(color_overlay, self.rightLane.bestpts, False, color=[0,0,255], thickness=30, lineType=cv2.LINE_AA)
cv2.fillPoly(color_overlay, np.array([poly], dtype=np.int32), (0,255, 0))
warped_color_overlay = cv2.warpPerspective(color_overlay, self.inversePerspectiveTransform, (frame.shape[1],frame.shape[0]), flags=cv2.INTER_NEAREST)
#Stitch together images
processingrow = np.hstack((
cv2.cvtColor(grayscale,cv2.COLOR_GRAY2RGB),
#cv2.cvtColor(abs_sobelx,cv2.COLOR_GRAY2RGB),
#cv2.cvtColor(thresh_sobel,cv2.COLOR_GRAY2RGB),
cv2.cvtColor(laplacian,cv2.COLOR_GRAY2RGB),
cv2.cvtColor(np.uint8(extracted_lines),cv2.COLOR_GRAY2RGB),
extracted_lane_pixels,
cv2.addWeighted(birdsEyeImage, 1, color_overlay, 0.3, 0,-1)
))
processingrow = cv2.resize(processingrow, (frameSize[0],frameSize[1]//4), interpolation = cv2.INTER_AREA)
result = np.vstack((processingrow,cv2.addWeighted(frame, 1, warped_color_overlay, 0.3, 0,-1)))
return result
def laneLineSegmentation(self, birdsEyeImage):
# S-channel is good at finding yellow lines and is largely invariant to lighting conditions
# S-channel isn't great at finding the white lines though, so meld it together with
# a greyscale image to find both white and yellow lines
sChannel = cv2.cvtColor(birdsEyeImage,cv2.COLOR_RGB2HSV)[:,:,1]
gray = cv2.cvtColor(birdsEyeImage,cv2.COLOR_RGB2GRAY)
weighted = cv2.addWeighted(sChannel, 1, gray, 1, 0,-1)
# Extract edges of the vertical lines
sobelx = cv2.Sobel(weighted, cv2.CV_32F, 1, 0, 15)
# Take the absolute value of the x gradients
abs_sobelx = np.absolute(sobelx)
# Threshold gradients
thresh_sobel = np.zeros_like(abs_sobelx)
thresh_sobel[(abs_sobelx > 40)] = 1
# We know that lies are 20-40 pixels wide. By dilating the thresholded edges, we join the edges together
# We then erode the dilated lines by more than that such that any edges that are not within a certain
# distance of another edge will be removed.
#dilation_size = 40
#erosion_size = 70
# Create structure elements
#dilationStructure = cv2.getStructuringElement(cv2.MORPH_RECT, (dilation_size, 1))
#erosionStructure = cv2.getStructuringElement(cv2.MORPH_RECT, (erosion_size, 1))
# Apply morphology operations
#dilated = cv2.dilate(thresh_sobel, dilationStructure)
#eroded = cv2.erode(dilated, erosionStructure)
#extracted_lines = eroded * 255
#return weighted, abs_sobelx, thresh_sobel*255, extracted_lines
return weighted, abs_sobelx, thresh_sobel*255, thresh_sobel*255
def laneLineSegmentationLaplace(self, birdsEyeImage):
size = (birdsEyeImage.shape[1],birdsEyeImage.shape[0])
downsample = cv2.resize(birdsEyeImage, (birdsEyeImage.shape[0]//10,birdsEyeImage.shape[1]//10), interpolation = cv2.INTER_AREA)
sChannel = cv2.cvtColor(downsample,cv2.COLOR_RGB2HSV)[:,:,1]
gray = cv2.cvtColor(downsample,cv2.COLOR_RGB2GRAY)
weighted = cv2.addWeighted(sChannel, 1, gray, 1, 0,-1)
#laplacian = cv2.filter2D(weighted,cv2.CV_32F,kernel)
laplacian = cv2.Laplacian( weighted, ddepth = cv2.CV_32F, ksize=7 )
l_min = np.amin(laplacian)
l_max = np.amax(laplacian)
laplacian = np.uint8((laplacian - l_min) * 255 / (l_max - l_min))
thresh_laplacian = np.zeros_like(laplacian)
thresh_laplacian[(laplacian < 100)] = 255
# TODO: Implement
return cv2.resize(weighted,size,interpolation = cv2.INTER_NEAREST), cv2.resize(laplacian,size,interpolation = cv2.INTER_NEAREST), cv2.resize(thresh_laplacian,(size),interpolation = cv2.INTER_NEAREST)
def find_lane_pixels_sliding_window(self, binary_warped):
# Take a histogram of the bottom half of the image
histogram = np.sum(binary_warped[binary_warped.shape[0]//2:,:], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((binary_warped, binary_warped, binary_warped))
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]//2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# HYPERPARAMETERS
# Choose the number of sliding windows
nwindows = 9
# Set the width of the windows +/- margin
margin = 120
# Set minimum number of pixels found to recenter window
minpix = 30
# Set height of windows - based on nwindows above and image shape
window_height = np.int(binary_warped.shape[0]//nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
leftx_current = leftx_base
rightx_current = rightx_base
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = binary_warped.shape[0] - (window+1)*window_height
win_y_high = binary_warped.shape[0] - window*window_height
### Find the four below boundaries of the window ###
win_xleft_low = leftx_current - margin # Update this
win_xleft_high = leftx_current + margin # Update this
win_xright_low = rightx_current - margin # Update this
win_xright_high = rightx_current + margin # Update this
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_xleft_low,win_y_low),
(win_xleft_high,win_y_high),(0,255,0), 2)
cv2.rectangle(out_img,(win_xright_low,win_y_low),
(win_xright_high,win_y_high),(0,255,0), 2)
### Identify the nonzero pixels in x and y within the window ###
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
### If you found > minpix pixels, recenter next window ###
### (`right` or `leftx_current`) on their mean position ###
if len(good_left_inds)>minpix:
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds)>minpix:
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Concatenate the arrays of indices (previously was a list of lists of pixels)
try:
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
except ValueError:
# Avoids an error if the above is not implemented fully
pass
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
return leftx, lefty, rightx, righty, out_img
def find_lane_pixels_sliding_window_one_side(self, binary_warped, line):
# Split the image in half
midpoint = np.int(binary_warped.shape[1]//2)
if(line.side=='left'):
binary_warped= binary_warped[:, :midpoint]
else:
binary_warped= binary_warped[:, midpoint:]
# Take a histogram of the bottom half of the image
histogram = np.sum(binary_warped[binary_warped.shape[0]//2:,:], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((binary_warped, binary_warped, binary_warped))
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]//2)
x_base = np.argmax(histogram)
# HYPERPARAMETERS
# Choose the number of sliding windows
nwindows = 9
# Set the width of the windows +/- margin
margin = 120
# Set minimum number of pixels found to recenter window
minpix = 30
# Set height of windows - based on nwindows above and image shape
window_height = np.int(binary_warped.shape[0]//nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
x_current = x_base
# Create empty lists to receive left and right lane pixel indices
lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = binary_warped.shape[0] - (window+1)*window_height
win_y_high = binary_warped.shape[0] - window*window_height
### Find the four below boundaries of the window ###
win_x_low = x_current - margin # Update this
win_x_high = x_current + margin # Update this
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_x_low,win_y_low),
(win_x_high,win_y_high),(0,255,0), 2)
### Identify the nonzero pixels in x and y within the window ###
good_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_x_low) & (nonzerox < win_x_high)).nonzero()[0]
# Append these indices to the lists
lane_inds.append(good_inds)
### If you found > minpix pixels, recenter next window ###
### (`right` or `leftx_current`) on their mean position ###
if len(good_inds)>minpix:
x_current = np.int(np.mean(nonzerox[good_inds]))
# Concatenate the arrays of indices (previously was a list of lists of pixels)
try:
lane_inds = np.concatenate(lane_inds)
except ValueError:
# Avoids an error if the above is not implemented fully
pass
# Extract left and right line pixel positions
if(line.side=='right'):
x = nonzerox[lane_inds] + 640
else:
x = nonzerox[lane_inds]
y = nonzeroy[lane_inds]
return x, y, out_img
def search_around_poly(self, binary_warped, line):
# Split the image in half
midpoint = np.int(binary_warped.shape[1]//2)
if(line.side=='left'):
binary_warped = binary_warped[:, :midpoint]
else:
binary_warped = binary_warped[:, midpoint:]
# HYPERPARAMETER
# Choose the width of the margin around the previous polynomial to search
# The quiz grader expects 100 here, but feel free to tune on your own!
margin = 100
# Grab activated pixels
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
### TO-DO: Set the area of search based on activated x-values ###
### within the +/- margin of our polynomial function ###
### Hint: consider the window areas for the similarly named variables ###
### in the previous quiz, but change the windows to our new search area ###
if (line.side=='right'):
fit_x = line.best_fit[0]*(nonzeroy**2) + line.best_fit[1]*nonzeroy + line.best_fit[2] - midpoint
else:
fit_x = line.best_fit[0]*(nonzeroy**2) + line.best_fit[1]*nonzeroy + line.best_fit[2]
lane_inds = ((nonzerox > (fit_x - margin)) & (nonzerox < (fit_x + margin)))
# Again, extract left and right line pixel positions
x = nonzerox[lane_inds]
y = nonzeroy[lane_inds]
# Fit new polynomials
#fitx, right_fitx, ploty = fit_poly(binary_warped.shape, leftx, lefty, rightx, righty)
## Visualization ##
# Create an image to draw on and an image to show the selection window
out_img = np.dstack((binary_warped, binary_warped, binary_warped))*255
window_img = np.zeros_like(out_img)
# Color in left and right line pixels
out_img[nonzeroy[lane_inds], nonzerox[lane_inds]] = [255, 0, 0]
ploty = np.linspace(0, binary_warped.shape[0]-1, binary_warped.shape[0])
fitx = line.best_fit[0] * ploty**2 + line.best_fit[1] * ploty + line.best_fit[2]
if (line.side=='right'):
fitx = fitx - midpoint
# Generate a polygon to illustrate the search window area
# And recast the x and y points into usable format for cv2.fillPoly()
line_window1 = np.array([np.transpose(np.vstack([fitx-margin, ploty]))])
line_window2 = np.array([np.flipud(np.transpose(np.vstack([fitx+margin,
ploty])))])
line_pts = np.hstack((line_window1, line_window2))
# Draw the lane onto the warped blank image
cv2.fillPoly(window_img, np.int_([line_pts]), (0,255, 0))
result = cv2.addWeighted(out_img, 1, window_img, 0.3, 0)
if(line.side=='right'):
x = x + midpoint
return x, y, result | f234656513477e82241998057298f6a98cefa247 | [
"Markdown",
"Python"
] | 2 | Markdown | erik-rosen/self-driving-car-project-lane-finding | b0665c3851b9c5ff86414bd9f3dbe585ef7df152 | ba9a3d7bf6aaa7ae82e119cff9e205d468cb630a | |
refs/heads/master | <file_sep>tc qdisc add dev ens4 root netem loss 40% && sleep 10
<file_sep>tc qdisc del dev ens4 root netem loss 40%
| 8af2d5a5ce443740606a924f088a3778ebc42a29 | [
"Shell"
] | 2 | Shell | KellyGriffin/PlayYAMLS | 432fa4a608a92f93dba01bfe10f104dd2c497379 | 43f2b5d7043651dd45ab66d5cfb750fa5f63d9ad | |
refs/heads/master | <repo_name>mick60316/HID_Game_Controller<file_sep>/README.md
# Arduino
1. 需要將藍芽HID修改為GamePad
2. 利用G-Sensor模擬類比搖桿
<file_sep>/HID_Gamepadv2/HID_Gamepadv2.ino
#include <SoftwareSerial.h>
#include <Wire.h>
#include <Timer.h>
Timer timer;
SoftwareSerial BTSerial(A2, A3); // RX | TX
int I2C_Address = 0xA7 >> 1; // ADXL345 的 I2C 地址
int X0, X1, Y0, Y1, Z1, Z0;
float X, Y, Z;
long cont = 0;
byte buffer_move [] = {0xFD, 0x06, 127, 0, 0, 0, 0x00, 0x00,
};//data = {0xFD,0x06,x1,y1,x2,y2,btnByte1,btnByte1}
int fps = 10 ;
void setup()
{
Serial.begin(115200);
timer.every(50, TimerCounter);//us
BTSerial.begin(115200);
Wire.begin(); //初始化 I2C
setReg(0x2D, 0xA); // (打開電源, 設定輸出資料速度為 100 Hz)
buffer_move[2] = 0;
BTSerial.write(buffer_move , sizeof(buffer_move));
}
void loop()
{
timer.update();
X0 = getData(0x32); // 取得 X 軸 低位元資料
X1 = getData(0x33); // 取得 X 軸 高位元資料
X = ((X1 << 8) + X0) / 256.0;
Y0 = getData(0x34); // 取得 Y 軸 低位元資料
Y1 = getData(0x35); // 取得 Y 軸 高位元資料
Y = ((Y1 << 8) + Y0) / 256.0;
Z0 = getData(0x36); // 取得 Z 軸 低位元資料
Z1 = getData(0x37); // 取得 Y 軸 高位元資料
Z = ((Z1 << 8) + Z0) / 256.0;
X = X * 10;
Z = Z * 10;
Serial.print(X);
Serial.print(" ");
Serial.println(X * 12.7);
float angleValue = X * 12.7;
if (angleValue < 15 && angleValue > -15) // Go Forword
{
buffer_move[2] = 0;
}
else // Turn left or right
{
buffer_move[2] = angleValue;
}
if (Z > 1)
{
buffer_move[6] = 8;
}
else
{
buffer_move[6] = 0;
}
setMsgToAndroid();
delay(17);
}
void setMsgToAndroid ()
{
BTSerial.write(buffer_move , sizeof(buffer_move));
}
void setReg(int reg, int data) {
Wire.beginTransmission(I2C_Address);
Wire.write(reg); // 指定佔存器
Wire.write(data); // 寫入資料
Wire.endTransmission();
}
int getData(int reg) {
Wire.beginTransmission(I2C_Address);
Wire.write(reg);
Wire.endTransmission();
Wire.requestFrom(I2C_Address, 1);
if (Wire.available() <= 1) {
return Wire.read();
}
}
void TimerCounter()
{
cont++;
// Serial.println(cont);
}
| ef5a3e737936b15432287e1ee461174ecf91e87f | [
"Markdown",
"C++"
] | 2 | Markdown | mick60316/HID_Game_Controller | 0e9188be023683e5771f9b16522748ad691e0e8a | cdb472a27921f44ff7e5d06d3a7d35a5ca959625 | |
refs/heads/master | <file_sep>/***********************************************************
Starter code for Assignment 3
This code was originally written by <NAME> for
CSC418, SPRING 2005
implements scene_object.h
***********************************************************/
#include <cmath>
#include <iostream>
#include "scene_object.h"
#include "bmp_io.h"
bool UnitSquare::intersect( Ray3D& ray, const Matrix4x4& worldToModel,
const Matrix4x4& modelToWorld ) {
// TODO: implement intersection code for UnitSquare, which is
// defined on the xy-plane, with vertices (0.5, 0.5, 0),
// (-0.5, 0.5, 0), (-0.5, -0.5, 0), (0.5, -0.5, 0), and normal
// (0, 0, 1).
//
// Your goal here is to fill ray.intersection with correct values
// should an intersection occur. This includes intersection.point,
// intersection.normal, intersection.none, intersection.t_value.
//
// HINT: Remember to first transform the ray into object space
// to simplify the intersection test.
Vector3D normal(0,0,1);
Point3D new_origin = worldToModel * ray.origin;
Vector3D new_dir = worldToModel * ray.dir;
//backface cull; modify this if necessary
if(new_origin.m_data[2] * new_dir.m_data[2] > 0) return false;
double scale_factor = -new_origin.m_data[2]/new_dir.m_data[2];
Vector3D scaled_dir = scale_factor*new_dir;
Point3D POI = new_origin + scaled_dir;
if(std::abs(POI.m_data[0]) > 0.5 || std::abs(POI.m_data[1]) > 0.5) return false; //make sure it's not outside the bounds
if(ray.intersection.none == true || distance(ray.intersection.point, ray.origin) > distance(modelToWorld * POI, ray.origin)) {
ray.intersection.point = modelToWorld * POI;
ray.intersection.untransformed = POI;
ray.intersection.none = false;
ray.intersection.normal = transNorm(worldToModel,normal);
ray.intersection.normal.normalize();
return true;
}
else return false;
}
bool UnitSphere::intersect( Ray3D& ray, const Matrix4x4& worldToModel,
const Matrix4x4& modelToWorld ) {
// TODO: implement intersection code for UnitSphere, which is centred
// on the origin.
//
// Your goal here is to fill ray.intersection with correct values
// should an intersection occur. This includes intersection.point,
// intersection.normal, intersection.none, intersection.t_value.
//
// HINT: Remember to first transform the ray into object space
// to simplify the intersection test.
//not sure if this is right
Point3D original_origin = ray.origin;
ray.origin = worldToModel * ray.origin;
ray.dir = worldToModel * ray.dir;
Point3D origin(0,0,0);
Vector3D o_vec = ray.origin - origin;
Vector3D d_vec = ray.dir;
double d;
//Equation of the intersection of a line and a sphere, we don't need both intersections, just the nearest one
double d1 = (-(d_vec.dot(o_vec)) - std::sqrt( std::pow(d_vec.dot(o_vec),2) - d_vec.dot(d_vec) * ( o_vec.dot(o_vec) - 1)))/(d_vec.dot(d_vec));
double d2 = (-(d_vec.dot(o_vec)) + std::sqrt( std::pow(d_vec.dot(o_vec),2) - d_vec.dot(d_vec) * ( o_vec.dot(o_vec) - 1)))/(d_vec.dot(d_vec));
//This is true if d is NaN
if(d1!=d1 || d2!=d2)
{
ray.dir = modelToWorld * ray.dir;
ray.origin = original_origin;
return false;
}
//We want the smaller intersection
if(d1 < d2)
d = d1;
else
d = d2;
//Intersection is behind
if(d < 0)
{
ray.dir = modelToWorld * ray.dir;
ray.origin = original_origin;
return false;
}
Point3D temp = ray.origin + d * ray.dir;
if(ray.intersection.none == true || distance(ray.intersection.point, original_origin) > distance(modelToWorld * temp, original_origin)) {
ray.intersection.point = modelToWorld * temp;
ray.intersection.untransformed = temp;
ray.intersection.none = false;
ray.intersection.normal = transNorm(worldToModel, temp-origin);
ray.intersection.normal.normalize();
//restore
ray.dir = modelToWorld * ray.dir;
ray.origin = original_origin;
return true;
}
ray.dir = modelToWorld * ray.dir;
ray.origin = original_origin;
return false;
}
bool UnitCylinder::intersect( Ray3D& ray, const Matrix4x4& worldToModel,
const Matrix4x4& modelToWorld ) {
//UnitCylinder should have r=1, centered on the z-axis, going from z=-0.5 to z=0.5
Point3D new_origin = worldToModel * ray.origin;
Vector3D new_dir = worldToModel * ray.dir;
double c = std::pow(new_origin.m_data[0], 2) + std::pow(new_origin.m_data[1], 2) - 1;
double b = 2*(new_origin.m_data[0]*new_dir.m_data[0] + new_origin.m_data[1]*new_dir.m_data[1]);
double a = std::pow(new_dir.m_data[0], 2) + std::pow(new_dir.m_data[1], 2);
if(a == 0) return false;
double discriminant = std::pow(b, 2) - 4*a*c;
if(discriminant < 0) return false;
//two roots
double scale_factor1 = (-b + std::sqrt(discriminant))/(2*a);
double scale_factor2 = (-b - std::sqrt(discriminant))/(2*a);
double scale_factor;
//chosen root must be positive because it is a ray and not a line, and it must be the closest one
if(scale_factor1 < 0 && scale_factor2 < 0) return false;
else if(scale_factor1 < 0) scale_factor = scale_factor2;
else if(scale_factor2 < 0) scale_factor = scale_factor1;
else scale_factor = (scale_factor1 < scale_factor2)? scale_factor1 : scale_factor2; //both are positive, so choose the smaller one
Vector3D scaled_dir = scale_factor*new_dir;
Point3D POI = new_origin + scaled_dir;
Vector3D normal(POI.m_data[0],POI.m_data[1],0); //
normal.normalize();
if(std::abs(POI.m_data[2]) > 0.5) return false; //make sure it's not outside the bounds
//std::cout << "This: " << (distance(modelToWorld * POI, ray.origin)) << std::endl;
if(ray.intersection.none == true || distance(ray.intersection.point, ray.origin) > distance(modelToWorld * POI, ray.origin)) {
//std::cout << "a: " << a << ", b: " << b << ", c: " << c << std::endl;
ray.intersection.point = modelToWorld * POI;
ray.intersection.none = false;
ray.intersection.normal = transNorm(worldToModel,normal);
ray.intersection.normal.normalize();
return true;
}
else return false;
}
bool UnitCircle::intersect( Ray3D& ray, const Matrix4x4& worldToModel,
const Matrix4x4& modelToWorld ) {
Vector3D normal(0,0,1);
Point3D new_origin = worldToModel * ray.origin;
Vector3D new_dir = worldToModel * ray.dir;
//backface cull; modify this if necessary
if(new_origin.m_data[2] * new_dir.m_data[2] > 0) return false;
double scale_factor = -new_origin.m_data[2]/new_dir.m_data[2];
Vector3D scaled_dir = scale_factor*new_dir;
Point3D POI = new_origin + scaled_dir;
if(std::sqrt(pow(POI.m_data[0],2)+pow(POI.m_data[1],2)) > 1) return false; //make sure it's not outside the bounds
if(normal.dot(new_dir) > 0)
return false;
if(ray.intersection.none == true || distance(ray.intersection.point, ray.origin) > distance(modelToWorld * POI, ray.origin)) {
ray.intersection.point = modelToWorld * POI;
ray.intersection.untransformed = POI;
ray.intersection.none = false;
ray.intersection.normal = transNorm(worldToModel,normal);
ray.intersection.normal.normalize();
return true;
}
else return false;
}
//intersection point is in object space; x and y values between -0.5 and 0.5
Colour UnitSquare::get_color( Point3D intersection ) {
unsigned long int width_index = (intersection.m_data[0] + 0.5)*this->i_width;
unsigned long int height_index = (intersection.m_data[1] + 0.5)*this->i_height;
unsigned long int array_index = width_index + i_width*height_index;
return Colour ((int)(rarray[0][array_index]), (int)(garray[0][array_index]), (int)(barray[0][array_index]));
}
Colour UnitSphere::get_color( Point3D intersection ) {
//placeholder
return Colour(1,0,0);
}
Colour UnitCylinder::get_color( Point3D intersection ) {
//placeholder
return Colour(1,0,0);
}
Colour UnitCircle::get_color( Point3D intersection ) {
//placeholder
return Colour(1,0,0);
}
bool SceneObject::t_map(char *filename) {
this->rarray = new unsigned char *[5000]; this->garray = new unsigned char *[5000]; this->barray = new unsigned char *[5000];
if(!bmp_read(filename, &(this->i_width), &(this->i_height), this->rarray, this->garray, this->barray)) this->t_mapped = true;
return this->t_mapped;
}
<file_sep>CC = g++
CFLAGS = -g -O2
LIBS = -lm
raytracer: raytracer.o util.o light_source.o scene_object.o bmp_io.o
$(CC) $(CFLAGS) -o raytracer \
raytracer.o util.o light_source.o scene_object.o bmp_io.o $(LIBS)
clean:
-rm -f core *.o
-rm raytracer
run:
make && ./raytracer && feh view1.bmp view2.bmp view3.bmp
gitclean:
git clean -f && git reset --hard HEAD && git pull
<file_sep>/***********************************************************
Starter code for Assignment 3
This code was originally written by <NAME> for
CSC418, SPRING 2005
implements light_source.h
***********************************************************/
#include <cmath>
#include "light_source.h"
//turn lighting components on or off
#define AMB 1
#define DIFF 1
#define SPEC 1
void PointLight::shade( Ray3D& ray, bool shouldshade, float phong_factor ) {
Vector3D light_to_intersection = ray.intersection.point - this->_pos;
light_to_intersection.normalize();
double intersection_dot_normal = -light_to_intersection.dot(ray.intersection.normal);
Vector3D light_dir = _pos - ray.intersection.point;
light_dir.normalize();
Vector3D reflect_dir = 2 * (light_dir.dot(ray.intersection.normal)) * ray.intersection.normal - light_dir;
reflect_dir.normalize();
Vector3D view_dir = -ray.dir;
view_dir.normalize();
#if AMB
ray.col = ray.col + ray.intersection.mat->ambient*this->_col_ambient;
#endif
if(shouldshade)
{
#if DIFF
if(intersection_dot_normal > 0)
ray.col = ray.col + phong_factor*intersection_dot_normal*ray.intersection.mat->diffuse*this->_col_diffuse;
#endif
#if SPEC
double specular_component = view_dir.dot(reflect_dir);
if(specular_component > 0)
{
double specular_component_full = std::pow(specular_component,ray.intersection.mat->specular_exp);
ray.col = ray.col + phong_factor*specular_component_full*ray.intersection.mat->specular*this->_col_specular;
}
#endif
}
ray.reflect_dir = ray.dir - 2*(ray.dir.dot(ray.intersection.normal)) * ray.intersection.normal;
ray.reflect_dir.normalize();
//ray.bounce = true;
ray.col.clamp();
}
<file_sep>/***********************************************************
Starter code for Assignment 3
This code was originally written by <NAME> for
CSC418, SPRING 2005
Implementations of functions in raytracer.h,
and the main function which specifies the
scene to be rendered.
***********************************************************/
#include "raytracer.h"
#include "bmp_io.h"
#include <cmath>
#include <iostream>
#include <cstdlib>
#define AA 1
#define REFL 1
#define MAX_REFLECT_BOUNCES 4
#define MAX_REFRAC_BOUNCES 4
#define MAX_GLOSSINESS_RAYS 8
#define NUM_FRAMES 1
int frames_rendered = 1;
Raytracer::Raytracer() : _lightSource(NULL) {
_root = new SceneDagNode();
}
Raytracer::~Raytracer() {
delete _root;
}
SceneDagNode* Raytracer::addObject( SceneDagNode* parent,
SceneObject* obj, Material* mat ) {
SceneDagNode* node = new SceneDagNode( obj, mat );
node->parent = parent;
node->next = NULL;
node->child = NULL;
// Add the object to the parent's child list, this means
// whatever transformation applied to the parent will also
// be applied to the child.
if (parent->child == NULL) {
parent->child = node;
}
else {
parent = parent->child;
while (parent->next != NULL) {
parent = parent->next;
}
parent->next = node;
}
return node;;
}
LightListNode* Raytracer::addLightSource( LightSource* light ) {
LightListNode* tmp = _lightSource;
_lightSource = new LightListNode( light, tmp );
return _lightSource;
}
void Raytracer::rotate( SceneDagNode* node, char axis, double angle ) {
Matrix4x4 rotation;
double toRadian = 2*M_PI/360.0;
int i;
for (i = 0; i < 2; i++) {
switch(axis) {
case 'x':
rotation[0][0] = 1;
rotation[1][1] = cos(angle*toRadian);
rotation[1][2] = -sin(angle*toRadian);
rotation[2][1] = sin(angle*toRadian);
rotation[2][2] = cos(angle*toRadian);
rotation[3][3] = 1;
break;
case 'y':
rotation[0][0] = cos(angle*toRadian);
rotation[0][2] = sin(angle*toRadian);
rotation[1][1] = 1;
rotation[2][0] = -sin(angle*toRadian);
rotation[2][2] = cos(angle*toRadian);
rotation[3][3] = 1;
break;
case 'z':
rotation[0][0] = cos(angle*toRadian);
rotation[0][1] = -sin(angle*toRadian);
rotation[1][0] = sin(angle*toRadian);
rotation[1][1] = cos(angle*toRadian);
rotation[2][2] = 1;
rotation[3][3] = 1;
break;
}
if (i == 0) {
node->trans = node->trans*rotation;
angle = -angle;
}
else {
node->invtrans = rotation*node->invtrans;
}
}
}
//A truly random number generator
int randomest_number()
{
//This is not very random, but adding a mersenne twister is hard and this is not
int rand = (std::rand()) % 10000;
return rand-5000;
}
void Raytracer::translate( SceneDagNode* node, Vector3D trans ) {
Matrix4x4 translation;
translation[0][3] = trans[0];
translation[1][3] = trans[1];
translation[2][3] = trans[2];
node->trans = node->trans*translation;
translation[0][3] = -trans[0];
translation[1][3] = -trans[1];
translation[2][3] = -trans[2];
node->invtrans = translation*node->invtrans;
}
void Raytracer::scale( SceneDagNode* node, Point3D origin, double factor[3] ) {
Matrix4x4 scale;
scale[0][0] = factor[0];
scale[0][3] = origin[0] - factor[0] * origin[0];
scale[1][1] = factor[1];
scale[1][3] = origin[1] - factor[1] * origin[1];
scale[2][2] = factor[2];
scale[2][3] = origin[2] - factor[2] * origin[2];
node->trans = node->trans*scale;
scale[0][0] = 1/factor[0];
scale[0][3] = origin[0] - 1/factor[0] * origin[0];
scale[1][1] = 1/factor[1];
scale[1][3] = origin[1] - 1/factor[1] * origin[1];
scale[2][2] = 1/factor[2];
scale[2][3] = origin[2] - 1/factor[2] * origin[2];
node->invtrans = scale*node->invtrans;
}
Matrix4x4 Raytracer::initInvViewMatrix( Point3D eye, Vector3D view,
Vector3D up ) {
Matrix4x4 mat;
Vector3D w;
view.normalize();
up = up - up.dot(view)*view;
up.normalize();
w = view.cross(up);
mat[0][0] = w[0];
mat[1][0] = w[1];
mat[2][0] = w[2];
mat[0][1] = up[0];
mat[1][1] = up[1];
mat[2][1] = up[2];
mat[0][2] = -view[0];
mat[1][2] = -view[1];
mat[2][2] = -view[2];
mat[0][3] = eye[0];
mat[1][3] = eye[1];
mat[2][3] = eye[2];
return mat;
}
void Raytracer::traverseScene( SceneDagNode* node, Ray3D& ray ) {
SceneDagNode *childPtr;
// Applies transformation of the current node to the global
// transformation matrices.
_modelToWorld = _modelToWorld*node->trans;
_worldToModel = node->invtrans*_worldToModel;
if (node->obj) {
// Perform intersection.
if (node->obj->intersect(ray, _worldToModel, _modelToWorld)) {
ray.intersection.mat = node->mat;
if(node->obj->t_mapped)
{
//replace the colors of the material with those found in the texture map
Colour map_colour = node->obj->get_color(ray.intersection.untransformed);
ray.intersection.mat->diffuse /*= ray.intersection.mat->diffuse = ray.intersection.mat->specular */= map_colour;
//std::cout << ray.intersection.mat->ambient << "\n\n";
}
}
}
// Traverse the children.
childPtr = node->child;
while (childPtr != NULL) {
traverseScene(childPtr, ray);
childPtr = childPtr->next;
}
// Removes transformation of the current node from the global
// transformation matrices.
_worldToModel = node->trans*_worldToModel;
_modelToWorld = _modelToWorld*node->invtrans;
}
void Raytracer::computeShading( Ray3D& ray ) {
LightListNode* curLight = _lightSource;
for (;;) {
if (curLight == NULL) break;
// Implement shadows here if needed.
Point3D light_pos = curLight->light->get_position();
Point3D intersection_point = ray.intersection.point;
Vector3D shadow_dir = light_pos - intersection_point;
shadow_dir.normalize();
Ray3D shadow_ray(intersection_point + .1*shadow_dir,shadow_dir);
shadow_ray.dir.normalize();
traverseScene(_root, shadow_ray);
Point3D shadow_intersection_point = shadow_ray.intersection.point;
//Disables specular/diffuse lighting when false
bool use_phong = false;
float phong_factor = 1.0;
//Checks if the ray pointing towards the light hits another object, or a object far away
if(shadow_ray.intersection.none || distance(intersection_point,light_pos) < distance(shadow_intersection_point,intersection_point))
use_phong = true;
if(!shadow_ray.intersection.none && shadow_ray.intersection.mat->transparency)
{
use_phong = true;
phong_factor = shadow_ray.intersection.mat->transparency;
}
curLight->light->shade(ray,use_phong,phong_factor);
curLight = curLight->next;
}
}
void Raytracer::initPixelBuffer() {
int numbytes = _scrWidth * _scrHeight * sizeof(unsigned char);
_rbuffer = new unsigned char[numbytes];
_gbuffer = new unsigned char[numbytes];
_bbuffer = new unsigned char[numbytes];
for (int i = 0; i < _scrHeight; i++) {
for (int j = 0; j < _scrWidth; j++) {
_rbuffer[i*_scrWidth+j] = 0;
_gbuffer[i*_scrWidth+j] = 0;
_bbuffer[i*_scrWidth+j] = 0;
}
}
}
void Raytracer::flushPixelBuffer( char *file_name ) {
bmp_write( file_name, _scrWidth, _scrHeight, _rbuffer, _gbuffer, _bbuffer );
delete _rbuffer;
delete _gbuffer;
delete _bbuffer;
}
Colour Raytracer::shadeRay( Ray3D& ray ) {
Colour col(0.0, 0.0, 0.0);
traverseScene(_root, ray);
// Don't bother shading if the ray didn't hit
// anything.
if (!ray.intersection.none) {
computeShading(ray);
col = 1.1*ray.col;
//For the glossiness
float random = 0.0;
int i;
for ( i = 0; i < MAX_GLOSSINESS_RAYS; i++)
{
if(ray.intersection.mat->glossiness)
{
if( i == 0)
col = ((1-ray.intersection.mat->reflectiveness))*col;
random = ray.intersection.mat->glossiness * randomest_number()/50000.0;
}
else //don't shoot any glossiness rays because the material isn't glossy
i = MAX_GLOSSINESS_RAYS;
//Could probably be done better
Vector3D reflection_dir= ray.reflect_dir + ( Vector3D(random,random,random));
reflection_dir.normalize();
Ray3D reflection((ray.intersection.point + (.01*reflection_dir)), reflection_dir);
reflection.reflect_bounce = ray.reflect_bounce + 1; //make sure we finish sometime
if(reflection.reflect_bounce <= MAX_REFLECT_BOUNCES) {
Colour colorReflect = 1.1*ray.intersection.mat->reflectiveness*shadeRay(reflection);
colorReflect.clamp();
if(!ray.intersection.mat->glossiness)
col = ((1.-ray.intersection.mat->reflectiveness)*col + colorReflect);
else
{
col[0] += (colorReflect)[0]/MAX_GLOSSINESS_RAYS;
col[1] += (colorReflect)[1]/MAX_GLOSSINESS_RAYS;
col[2] += (colorReflect)[2]/MAX_GLOSSINESS_RAYS;
}
}
col.clamp();
}
if(ray.intersection.mat->refrac_ratio)
{
double n_mat_in;
double n_mat_out;
//snells law
//We are going out of the material
if ( ray.intersection.normal.dot(ray.dir))
{
n_mat_in = 1;
n_mat_out = ray.intersection.mat->refrac_ratio;
}
else //We are going in the material
{
n_mat_in = ray.intersection.mat->refrac_ratio;
n_mat_out = 1;
}
double cosT1 = -ray.intersection.normal.dot(ray.dir);
double sinT2 = n_mat_in / n_mat_out * std::sqrt(1 - std::pow(cosT1,2));
double cosT2 = std::sqrt(1 - std::pow(sinT2,2));
double T2 = std::asin(sinT2);
Vector3D refrac_dir = (n_mat_in / n_mat_out) * ray.dir + ((n_mat_in / n_mat_out)*cosT1 - cosT2) * ray.intersection.normal;
refrac_dir.normalize();
Ray3D refraction(ray.intersection.point + (.01*refrac_dir), refrac_dir);
refraction.refrac_bounce = ray.refrac_bounce + 1;
refraction.dir.normalize();
if(ray.intersection.mat->transparency && refraction.refrac_bounce <= MAX_REFRAC_BOUNCES)
{
Colour colorRefrac = shadeRay(refraction);
colorRefrac.clamp();
col = (1 - ray.intersection.mat->transparency) * col + ray.intersection.mat->transparency * colorRefrac;
}
}
col.clamp();
}
// You'll want to call shadeRay recursively (with a different ray,
// of course) here to implement reflection/refraction effects.
return col;
}
void Raytracer::render( int width, int height, Point3D eye, Vector3D view,
Vector3D up, double fov, char* fileName ) {
Matrix4x4 viewToWorld;
_scrWidth = width;
_scrHeight = height;
double factor = (double(height)/2)/tan(fov*M_PI/360.0);
if(frames_rendered == 1)
initPixelBuffer();
viewToWorld = initInvViewMatrix(eye, view, up);
//Sample 4 times per pixel for Anti Aliasing at (.25,25),(.25,.75),(.75,.25),(.75,.75)
enum Sampleplace { TopLeft=0, TopRight, BottomRight, BottomLeft };
Sampleplace place = TopLeft;
//conintue sampling
bool continueloop;
// Construct a ray for each pixel.
for (int i = 0; i < _scrHeight; i++) {
for (int j = 0; j < _scrWidth; j++) {
Colour col(0.0, 0.0, 0.0);
continueloop = true;
while(continueloop)
{
// Sets up ray origin and direction in view space,
// image plane is at z = -1.
Point3D origin(0, 0, 0);
Point3D imagePlane;
short y = 0; //Sampling from top side of the pixel
short x = 0; //Sampling from the right side of the pixel
if(place == TopLeft || place == TopRight)
y = 1;
if(place == TopRight || place == BottomRight)
x = 1;
/*
* couldn't get depth of field working
double rand_x = randomest_number()%5;
double rand_y = randomest_number()%5;
double DOF_distance = distance(DOF_point,eye);
std::cout << rand_x << " " << rand_y << std::endl;
*/
imagePlane[0] = (-double(width)/2 + 0.25 + 0.5*x + j)/factor;
imagePlane[1] = (-double(height)/2 + 0.25 + 0.5*y + i)/factor;
imagePlane[2] = -1;
//Shoot the ray
Ray3D ray(origin, imagePlane - origin);
ray.dir = viewToWorld * ray.dir;
ray.origin = viewToWorld * ray.origin;
ray.dir.normalize();
#if AA
//Temp result of the ray colour result
Colour col_temp = shadeRay(ray);
col[0] += col_temp[0]/4;
col[1] += col_temp[1]/4;
col[2] += col_temp[2]/4;
switch(place)
{
case TopLeft:
place = TopRight;
break;
case TopRight:
place = BottomRight;
break;
case BottomRight:
place = BottomLeft;
break;
case BottomLeft:
place = TopLeft;
continueloop=false;
//Save result
_rbuffer[i*width+j] += int(col[0]*255)/NUM_FRAMES;
_gbuffer[i*width+j] += int(col[1]*255)/NUM_FRAMES;
_bbuffer[i*width+j] += int(col[2]*255)/NUM_FRAMES;
break;
default:
std::cout << "what?" << std::endl;
break;
}
#else
col = shadeRay(ray);
continueloop = false;
_rbuffer[i*width+j] += int(col[0]*255)/NUM_FRAMES;
_gbuffer[i*width+j] += int(col[1]*255)/NUM_FRAMES;
_bbuffer[i*width+j] += int(col[2]*255)/NUM_FRAMES;
#endif
}
}
}
if(frames_rendered == NUM_FRAMES)
{
std::cout << "writing to file" << std::endl;
flushPixelBuffer(fileName);
}
frames_rendered++;
}
int main(int argc, char* argv[])
{
//For the glossy reflection
std::srand(time(NULL));
// Build your scene and setup your camera here, by calling
// functions from Raytracer. The code here sets up an example
// scene and renders it from two different view points, DO NOT
// change this if you're just implementing part one of the
// assignment.
Raytracer raytracer;
int width = 320;
int height = 240;
if (argc == 3) {
width = atoi(argv[1]);
height = atoi(argv[2]);
}
// Camera parameters.
Point3D eye(0, 0, 1);
Vector3D view(0, 0, -1);
Vector3D up(0, 1, 0);
double fov = 60;
// Defines a material for shading.
Material glass( Colour(0.3, 0.3, 0.3), Colour(0.9, 0.9, 0.9),
Colour(0.628281, 0.655802, 0.666065),
51.2, 0.85, 1.6, 0.9, 0.0 );
Material jade( Colour(0.2, 0.2, 0.2), Colour(0.54, 0.89, 0.63),
Colour(0.316228, 0.316228, 0.316228),
12.8, 0.0, 1.0, 0.15, 0.0 );
Material mirror( Colour(.5,.5,.5), Colour(0.5,0.5,0.5),
Colour(1.0,1.0,1.0),
10.0,0.0,1.0, .8, 0.0);
Material red( Colour(.6, .1, .1), Colour(0.7, 0.1, 0.1),
Colour(0.7, 0.6, 0.6),
12.8, 0.0, 1.0, 0.5, 1.0 );
Material blue( Colour(.5, .5, .9), Colour(0.2, 0.2, 0.8),
Colour(0.6, 0.6, 0.7),
12.8, 0.0, 1.0, 0.15, 0.3 );
Material teal( Colour(.3, .5, .5), Colour(0.24, 0.69, 0.63),
Colour(0.416228, 0.416228, 0.416228),
12.8, 0.0, 1.0, 0.15, 0.0 );
// Defines a point light source.
raytracer.addLightSource( new PointLight(Point3D(0, 0, 5),
Colour(0.9, 0.9, 0.9) ) );
// Add a unit square into the scene with material mat.
SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &glass );
SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade );
SceneDagNode* plane2 = raytracer.addObject( new UnitSquare(), &blue );
SceneDagNode* plane3 = raytracer.addObject( new UnitSquare(), &mirror );
SceneDagNode* circle = raytracer.addObject( new UnitCircle(), &red );
SceneDagNode* circle2 = raytracer.addObject( new UnitCircle(), &red);
SceneDagNode* cylinder = raytracer.addObject( new UnitCylinder(), &red);
plane->obj->t_map("simple_texture.bmp");
// Apply some transformations to the unit square.
double factor1[3] = { 1.5, 1.5, 1.5 };
double factor2[3] = { 6.0, 6.0, 6.0 };
raytracer.translate(sphere, Vector3D(-.5, -.5, -4));
raytracer.rotate(cylinder, 'y', 45);
raytracer.rotate(circle, 'y', 45);
raytracer.rotate(circle2, 'y', 45);
raytracer.translate(cylinder, Vector3D(5, 1, -5.5));
raytracer.translate(circle, Vector3D(5, 1, -5));
raytracer.translate(circle2, Vector3D(5, 1, -6));
raytracer.rotate(circle2, 'y', 180);
raytracer.rotate(sphere, 'x', 45);
raytracer.rotate(sphere, 'z', 45);
raytracer.scale(sphere, Point3D(0, 0, 0), factor1);
raytracer.translate(plane, Vector3D(0, 0, -8.9));
raytracer.scale(plane, Point3D(0, 0, 0), factor2);
raytracer.translate(plane2, Vector3D(0, -3, -6));
raytracer.rotate(plane2, 'x', 90);
raytracer.scale(plane2, Point3D(0, 0, 0), factor2);
raytracer.translate(plane3, Vector3D(-3, 0, -6));
raytracer.rotate(plane3, 'y', 90);
raytracer.scale(plane3, Point3D(0, 0, 0), factor2);
// Render the scene, feel free to make the image smaller for
// testing purposes.
while(frames_rendered <= NUM_FRAMES)
{
std::cout << "rendering frame" << std::endl;
raytracer.render(width, height, eye, view, up, fov, "view1.bmp");
raytracer.translate(cylinder, Vector3D(0, .15, 0));
raytracer.translate(circle, Vector3D(0, .15, 0));
raytracer.translate(circle2, Vector3D(0, .15, 0));
}
raytracer.translate(cylinder, Vector3D(0, -.15*NUM_FRAMES, 0));
raytracer.translate(circle, Vector3D(0, -.15*NUM_FRAMES, 0));
raytracer.translate(circle2, Vector3D(0, -.15*NUM_FRAMES, 0));
Point3D eye2(4, 2, 1);
Vector3D view2(-4, -2, -6);
frames_rendered = 1;
while(frames_rendered <= NUM_FRAMES)
{// Render it from a different point of view.
std::cout << "Rendering second frame\n";
raytracer.render(width, height, eye2, view2, up, fov, "view2.bmp");
raytracer.translate(cylinder, Vector3D(0, .15, 0));
raytracer.translate(circle, Vector3D(0, .15, 0));
raytracer.translate(circle2, Vector3D(0, .15, 0));
}
frames_rendered = 1;
raytracer.translate(cylinder, Vector3D(0, -.15*NUM_FRAMES, 0));
raytracer.translate(circle, Vector3D(0, -.15*NUM_FRAMES, 0));
raytracer.translate(circle2, Vector3D(0, -.15*NUM_FRAMES, 0));
Point3D eye3(-1, 6, -2.5);
Vector3D view3(0, -1.5, -1);
while(frames_rendered <= NUM_FRAMES)
{
std::cout << "rendering third frame" << std::endl;
raytracer.render(width, height, eye3, view3, up, fov, "view3.bmp");
raytracer.translate(cylinder, Vector3D(0, .15, 0));
raytracer.translate(circle, Vector3D(0, .15, 0));
raytracer.translate(circle2, Vector3D(0, .15, 0));
}
return 0;
}
| c94a32fa7d26a996803c99ddf878698819b3bbf6 | [
"Makefile",
"C++"
] | 4 | C++ | eisenjar/Graphics_Raytracer | bdc51da89a91e448a61d99dcd78ad9f2477a8ce3 | 8ec97f3ab607ca53fc1fd0ce96abb5b60fc89635 | |
refs/heads/master | <file_sep>package org.fulib.yaml;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A container for arbitrary attributes.
* Simple attributes can be set with {@link #put(String, Object)} and get with {@link #get(String)}.
* Collection attributes can be created with {@link #with(String, Object)}.
*/
public class YamlObject
{
// =============== Constants ===============
public static final String ID = ".id";
public static final String TYPE = "type";
// =============== Fields ===============
private LinkedHashMap<String, Object> map = new LinkedHashMap<>();
// =============== Constructors ===============
public YamlObject()
{
// empty
}
/**
* @param id
* the {@link #setId(String) id}
*
* @since 1.2
*/
public YamlObject(String id)
{
this.setId(id);
}
/**
* @param id
* the {@link #setId(String) id}
* @param type
* the {@link #setType(String) type}
*/
public YamlObject(String id, String type)
{
this(id);
this.setType(type);
}
// =============== Properties ===============
/**
* @return the map of property names to property values
*
* @deprecated since 1.2; use {@link #getProperties()} instead
*/
@Deprecated
public LinkedHashMap<String, Object> getMap()
{
return this.map;
}
/**
* @return the map of property names to property values
*
* @since 1.2
*/
public Map<String, Object> getProperties()
{
return this.map;
}
/**
* Convenience getter for {@link #get(String) get}({@link #ID}).
* In particular, roughly implemented as
*
* <pre><code>
* final Object id = this.get(ID);
* return id != null ? id.toString() : null;
* </code></pre>
*
* @return the ID
*
* @since 1.2
*/
public String getId()
{
final Object id = this.map.get(ID);
return id != null ? id.toString() : null;
}
/**
* Convenience setter for {@link #put(String, Object) put}({@link #ID}, {@code id}).
*
* @param id
* the ID
*
* @since 1.2
*/
public void setId(String id)
{
this.map.put(ID, id);
}
/**
* Convenience getter for {@link #get(String) get}({@link #TYPE}).
* In particular, roughly implemented as
*
* <pre><code>
* final Object type = this.get(TYPE);
* return type != null ? type.toString() : null;
* </code></pre>
*
* @return the type
*
* @since 1.2
*/
public String getType()
{
final Object type = this.map.get(TYPE);
return type != null ? type.toString() : null;
}
/**
* Convenience setter for {@link #put(String, Object) put}({@link #TYPE}, {@code type}).
*
* @param type
* the type
*
* @since 1.2
*/
public void setType(String type)
{
this.map.put(TYPE, type);
}
// =============== Methods ===============
/**
* @param property
* the property name
*
* @return the attribute value for the given property name, or {@code null} if not specified
*/
public Object get(String property)
{
return this.map.get(property);
}
/**
* Sets the attribute value for the given property name.
*
* @param property
* the property name
* @param value
* the attribute value
*
* @return this instance, to allow method chaining
*/
public YamlObject put(String property, Object value)
{
this.map.put(property, value);
return this;
}
/**
* Adds the item to the collection attribute with the given property name.
* In particular, the implementation is as follows:
*
* <ol>
* <li>If the property is not set, set the item as the attribute value as if
* {@link #put(String, Object)}({@code property}, {@code item}) was called</li>
* <li>If the property is already set and the value is a {@link List}, add the {@code item} to that list</li>
* <li>If the property is already set and the value is not a {@link List},
* create a new list with the old value and the {@code item} and set that as the new value</li>
* </ol>
*
* @param property
* the property name
* @param item
* the item to add or set
*
* @return this instance, to allow method chaining
*/
public YamlObject with(String property, Object item)
{
this.map.compute(property, (k, oldValue) -> {
if (oldValue == null) // not yet present
{
final List<Object> list = new ArrayList<>();
list.add(item);
return list;
}
final List<Object> list;
if (oldValue instanceof List) // old value was a list
{
list = (List<Object>) oldValue;
}
else // old value was an object
{
list = new ArrayList<>();
list.add(oldValue);
}
list.add(item);
return list;
});
return this;
}
@Override
public String toString()
{
final String id = this.getId();
return id != null ? id : super.toString();
}
}
<file_sep>plugins {
id 'java'
id 'maven-publish'
// https://plugins.gradle.org/plugin/com.palantir.revapi
id 'com.palantir.revapi' version '1.4.4'
// https://plugins.gradle.org/plugin/com.bmuschko.nexus
id 'com.bmuschko.nexus' version '2.3.1'
// https://plugins.gradle.org/plugin/io.codearte.nexus-staging
id 'io.codearte.nexus-staging' version '0.22.0'
}
group = 'org.fulib'
version = 'git describe --tags'.execute().text[1..-2] // strip v and \n
description = 'Yaml serialization for fulib object models. Used for storage and communication.'
modifyPom {
project {
name = project.name
description = project.description
url = 'https://github.com/fujaba/fulibYaml'
inceptionYear = '2018'
scm {
url = 'https://github.com/fujaba/fulibYaml'
}
licenses {
license {
name = 'MIT License'
url = 'http://www.opensource.org/licenses/mit-license.php'
}
}
developers {
developer {
id = 'zuendorf'
name = '<NAME>'
}
developer {
id = 'digitalhoax'
name = '<NAME>'
}
developer {
id = 'eicke123'
name = '<NAME>'
}
developer {
id = 'clashsoft'
name = '<NAME>'
}
}
}
}
sourceCompatibility = 1.8
// --------------- Dependencies ---------------
repositories {
mavenLocal()
mavenCentral()
}
dependencies {
// https://mvnrepository.com/artifact/org.hamcrest/hamcrest-library
testImplementation group: 'org.hamcrest', name: 'hamcrest-library', version: '2.2'
// https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter-api
testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter-api', version: '5.7.1'
// https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter-engine
testRuntimeOnly group: 'org.junit.jupiter', name: 'junit-jupiter-engine', version: '5.7.1'
}
// --------------- Misc. Settings ---------------
test {
useJUnitPlatform()
}
revapi {
oldVersion = '1.4.0'
}
<file_sep>package org.fulib.yaml.testmodel.subpackage;
public class Room
{
private String id;
public String getId()
{
return this.id;
}
public Room setId(String id)
{
this.id = id;
return this;
}
}
<file_sep>package org.fulib.yaml;
import java.beans.PropertyChangeEvent;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
public class EventYamler
{
public static final String TIME = "time";
public static final String SOURCE = "source";
public static final String SOURCE_TYPE = "sourceType";
public static final String PROPERTY = "property";
public static final String OLD_VALUE = "oldValue";
public static final String OLD_VALUE_TYPE = OLD_VALUE + "Type";
public static final String NEW_VALUE = "newValue";
public static final String NEW_VALUE_TYPE = NEW_VALUE + "Type";
public static final String HISTORY_KEY = "historyKey";
private YamlIdMap yamlIdMap;
public EventYamler(String packageName)
{
this(new YamlIdMap(packageName));
}
/**
* @since 1.2
*/
public EventYamler(YamlIdMap idMap)
{
this.yamlIdMap = idMap;
}
/**
* @since 1.2
*/
public YamlIdMap getYamlIdMap()
{
return this.yamlIdMap;
}
public EventYamler setYamlIdMap(YamlIdMap yamlIdMap)
{
this.yamlIdMap = yamlIdMap;
return this;
}
public String encode(PropertyChangeEvent e)
{
Object source = e.getSource();
StringBuilder buf = new StringBuilder("- ");
long timeMillis = System.currentTimeMillis();
Date date = new Date(timeMillis);
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
String timeString = dateFormat.format(date);
buf.append(TIME + ": ").append(timeString).append("\n");
String sourceKey = this.yamlIdMap.putObject(source);
buf.append(" " + SOURCE + ": ").append(sourceKey).append("\n");
String className = source.getClass().getSimpleName();
buf.append(" " + SOURCE_TYPE + ": ").append(className).append("\n");
String prop = e.getPropertyName();
buf.append(" " + PROPERTY + ": ").append(prop).append("\n");
String historyKey = sourceKey + "/" + prop;
Object oldValue = e.getOldValue();
if (oldValue != null)
{
Class<?> valueClass = oldValue.getClass();
if (valueClass == String.class)
{
String encapsulted = YamlGenerator.encapsulate((String) oldValue);
buf.append(" " + OLD_VALUE + ": ").append(encapsulted).append("\n");
}
else if (valueClass.getName().startsWith("java.lang."))
{
buf.append(" " + OLD_VALUE + ": ").append(oldValue).append("\n");
}
else
{
String valueKey = this.yamlIdMap.putObject(oldValue);
buf.append(" " + OLD_VALUE + ": ").append(valueKey).append("\n");
historyKey += "/" + valueKey;
className = oldValue.getClass().getSimpleName();
buf.append(" " + OLD_VALUE_TYPE + ": ").append(className).append("\n");
}
}
Object newValue = e.getNewValue();
if (newValue != null)
{
Class<?> valueClass = newValue.getClass();
if (valueClass == String.class)
{
String encapsulted = YamlGenerator.encapsulate((String) newValue);
buf.append(" " + NEW_VALUE + ": ").append(encapsulted).append("\n");
}
else if (valueClass.getName().startsWith("java.lang."))
{
buf.append(" " + NEW_VALUE + ": ").append(newValue).append("\n");
}
else
{
String valueKey = this.yamlIdMap.putObject(newValue);
buf.append(" " + NEW_VALUE + ": ").append(valueKey).append("\n");
Reflector reflector = this.yamlIdMap.getReflector(className);
Object attrValue = reflector.getValue(source, prop);
if (attrValue != null && Collection.class.isAssignableFrom(attrValue.getClass()))
{
historyKey += "/" + valueKey;
}
className = newValue.getClass().getSimpleName();
buf.append(" " + NEW_VALUE_TYPE + ": ").append(className).append("\n");
}
}
buf.append(" " + HISTORY_KEY + ": ").append(historyKey).append("\n");
buf.append("\n");
return buf.toString();
}
public Object decode(Object rootObject, String content)
{
Yamler yamler = new Yamler();
ArrayList<LinkedHashMap<String, String>> list = yamler.decodeList(content);
String firstKey = null;
for (LinkedHashMap<String, String> map : list)
{
// execute change
String sourceKey = map.get(SOURCE);
if (firstKey == null)
{
firstKey = sourceKey;
Object oldObject = this.yamlIdMap.getObject(firstKey);
if (oldObject == null)
{
this.yamlIdMap.putNameObject(firstKey, rootObject);
}
}
Object sourceObject = this.yamlIdMap.getObject(sourceKey);
String className = map.get(SOURCE_TYPE);
Reflector reflector = this.yamlIdMap.getReflector(className);
if (sourceObject == null)
{
sourceObject = reflector.newInstance();
this.yamlIdMap.putNameObject(sourceKey, sourceObject);
}
String property = map.get(PROPERTY);
String newValue = map.get(NEW_VALUE);
String newValueType = map.get(NEW_VALUE_TYPE);
if (newValueType == null)
{
reflector.setValue(sourceObject, property, newValue);
}
else
{
Object newValueObject = this.yamlIdMap.getObject(newValue);
if (newValueObject == null)
{
Reflector newValueReflector = this.yamlIdMap.getReflector(newValueType);
newValueObject = newValueReflector.newInstance();
this.yamlIdMap.putNameObject(newValue, newValueObject);
}
reflector.setValue(sourceObject, property, newValueObject);
}
}
return this.yamlIdMap.getObject(firstKey);
}
}
<file_sep># fulibYaml v1.0.0
# fulibYaml v1.0.1
# fulibYaml v1.0.2
# fulibYaml v1.0.3
# fulibYaml v1.1.0
* Bumped version number.
# fulibYaml v1.2.0
## New Features
+ Added the `ReflectorMap.canReflect` and `.discoverObjects` methods.
+ Added the `Reflector.getOwnProperties` and `.getAllProperties` methods.
+ Added the `YamlGenerator` class, a more streamlined API for converting events to YAML.
+ Added the `IdMap` class.
## Bugfixes
* Fixed an exception when `YamlIdMap` discovers objects of unknown types. #15
## Improvements
* `YamlIdMap` no longer appends auto-incremented numbers to IDs if not necessary. #16
## General
* Transitioned many APIs from concrete implementation types like `ArrayList` or `LinkedHashMap` to their respective
interfaces like `List` or `Map`.
> In places where this could not be done in-place, new APIs were added and the old ones deprecated.
> See the respective Javadocs for migration info.
* Deprecated some misplaced or accidentally public APIs.
> Check for deprecation warnings and see the respective Javadocs for migration info.
* General code cleanup and minor optimizations.
# fulibYaml v1.2.1
## Bugfixes
* The `ReflectorMap.discoverObjects` methods now use breadth-first search.
> This restores the previous order in which `YamlIdMap`s were serialized.
# fulibYaml v1.3.0
## New Features
+ Added support for enums in property values. #12
+ Added support for `byte`, `short`, `char` and corresponding wrapper types in property values.
+ Added the `YamlGenerator.quote` methods.
## Improvements
* The `Reflector.getValue` and `.setValue` methods no longer silently ignore exceptions within the invoked methods.
* The `Reflector.getValue` and `.setValue` methods now cache target methods to improve performance.
* Changed the algorithm used by `ReflectorMap.setValue`.
> It now tries to invoke all *public* `set` and `with` methods in the order in which they are in the bytecode.
> Coercions (varargs, string to primitives) are performed as needed.
> Note that it does not perform full overload resolution, so results may differ from calling the appropriate setter normally.
* The `Reflector.setValue` method now supports the `with` overloads generated by Fulib v1.2.
* The `YamlIdMap.encode` method can now handle collections of values.
## Bugfixes
* Fixed the `YamlGenerator.encapsulate(String, Appendable)` method appending the value twice.
# fulibYaml v1.4.0
## New Features
+ Added the `Reflector.removeValue` method for removing links and multi-attribute values using `without` methods. #18 #20
+ `YamlIdMap` now also serializes properties from superclasses and interfaces. #24
## Bugfixes
* Fixed an exception when serializing an object with an empty ID. #21 #22
* `Reflector.getOwnProperties` no longer returns properties from superclasses and interfaces. #23
# fulibYaml v1.5.0
## New Features
+ Added the `Yaml` class with a new simple API. #25
## Improvements
* `ReflectorMap` can now resolve fully qualified class names. #26
* `YamlIdMap` now uses fully qualified class names for encoding. #26
<file_sep>package org.fulib.yaml.testmodel;
public class Person
{
private String name;
private Day favoriteDay;
public String getName()
{
return this.name;
}
public void setName(String name)
{
this.name = name;
}
public Day getFavoriteDay()
{
return this.favoriteDay;
}
public void setFavoriteDay(Day favorityDay)
{
this.favoriteDay = favorityDay;
}
}
<file_sep>package org.fulib.yaml.testmodel;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
public class Offer
{
public static final String PROPERTY_id = "id";
private String id;
public String getId()
{
return id;
}
public Offer setId(String value)
{
if (value == null ? this.id != null : ! value.equals(this.id))
{
String oldValue = this.id;
this.id = value;
firePropertyChange("id", oldValue, value);
}
return this;
}
public static final String PROPERTY_price = "price";
private double price;
public double getPrice()
{
return price;
}
public Offer setPrice(double value)
{
if (value != this.price)
{
double oldValue = this.price;
this.price = value;
firePropertyChange("price", oldValue, value);
}
return this;
}
public static final String PROPERTY_startTime = "startTime";
private String startTime;
public String getStartTime()
{
return startTime;
}
public Offer setStartTime(String value)
{
if (value == null ? this.startTime != null : ! value.equals(this.startTime))
{
String oldValue = this.startTime;
this.startTime = value;
firePropertyChange("startTime", oldValue, value);
}
return this;
}
public static final String PROPERTY_endTime = "endTime";
private String endTime;
public String getEndTime()
{
return endTime;
}
public Offer setEndTime(String value)
{
if (value == null ? this.endTime != null : ! value.equals(this.endTime))
{
String oldValue = this.endTime;
this.endTime = value;
firePropertyChange("endTime", oldValue, value);
}
return this;
}
protected PropertyChangeSupport listeners = null;
public boolean firePropertyChange(String propertyName, Object oldValue, Object newValue)
{
if (listeners != null)
{
listeners.firePropertyChange(propertyName, oldValue, newValue);
return true;
}
return false;
}
public boolean addPropertyChangeListener(PropertyChangeListener listener)
{
if (listeners == null)
{
listeners = new PropertyChangeSupport(this);
}
listeners.addPropertyChangeListener(listener);
return true;
}
public boolean addPropertyChangeListener(String propertyName, PropertyChangeListener listener)
{
if (listeners == null)
{
listeners = new PropertyChangeSupport(this);
}
listeners.addPropertyChangeListener(propertyName, listener);
return true;
}
public boolean removePropertyChangeListener(PropertyChangeListener listener)
{
if (listeners != null)
{
listeners.removePropertyChangeListener(listener);
}
return true;
}
public boolean removePropertyChangeListener(String propertyName,PropertyChangeListener listener)
{
if (listeners != null)
{
listeners.removePropertyChangeListener(propertyName, listener);
}
return true;
}
@Override
public String toString()
{
StringBuilder result = new StringBuilder();
result.append(" ").append(this.getId());
result.append(" ").append(this.getStartTime());
result.append(" ").append(this.getEndTime());
return result.substring(1);
}
public static final String PROPERTY_product = "product";
private Product product = null;
public Product getProduct()
{
return this.product;
}
public Offer setProduct(Product value)
{
if (this.product != value)
{
Product oldValue = this.product;
if (this.product != null)
{
this.product = null;
oldValue.withoutOffers(this);
}
this.product = value;
if (value != null)
{
value.withOffers(this);
}
firePropertyChange("product", oldValue, value);
}
return this;
}
public void removeYou()
{
this.setProduct(null);
this.withoutOrders(this.getOrders().clone());
}
public static final java.util.ArrayList<OrderPosition> EMPTY_orders = new java.util.ArrayList<OrderPosition>()
{ @Override public boolean add(OrderPosition value){ throw new UnsupportedOperationException("No direct add! Use xy.withOrders(obj)"); }};
public static final String PROPERTY_orders = "orders";
private java.util.ArrayList<OrderPosition> orders = null;
public java.util.ArrayList<OrderPosition> getOrders()
{
if (this.orders == null)
{
return EMPTY_orders;
}
return this.orders;
}
public Offer withOrders(Object... value)
{
if(value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withOrders(i);
}
}
else if (item instanceof OrderPosition)
{
if (this.orders == null)
{
this.orders = new java.util.ArrayList<OrderPosition>();
}
if ( ! this.orders.contains(item))
{
this.orders.add((OrderPosition)item);
((OrderPosition)item).setOffer(this);
firePropertyChange("orders", null, item);
}
}
else throw new IllegalArgumentException();
}
return this;
}
public Offer withoutOrders(Object... value)
{
if (this.orders == null || value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withoutOrders(i);
}
}
else if (item instanceof OrderPosition)
{
if (this.orders.contains(item))
{
this.orders.remove((OrderPosition)item);
((OrderPosition)item).setOffer(null);
firePropertyChange("orders", item, null);
}
}
}
return this;
}
}
<file_sep>package org.fulib.yaml;
import java.util.Arrays;
import java.util.Map;
/**
* Simple yaml encoding
*
* @since 1.5
*/
public class Yaml
{
private Yaml()
{
// no instances
}
public static String encode(Object... objects)
{
final String[] packageNames = Arrays
.stream(objects)
.map(Object::getClass)
.map(Class::getPackage)
.map(Package::getName)
.toArray(String[]::new);
return new YamlIdMap(packageNames).encode(objects);
}
public static Map<String, Object> decode(String yaml)
{
final YamlIdMap idMap = new YamlIdMap();
idMap.decode(yaml);
return idMap.getObjIdMap();
}
}
<file_sep>package org.fulib.yaml;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
public class EventFiler
{
// =============== Fields ===============
private final EventSource eventSource;
private String historyFileName = null;
// =============== Constructors ===============
public EventFiler(EventSource eventSource)
{
this.eventSource = eventSource;
}
// =============== Properties ===============
/**
* @since 1.2
*/
public EventSource getEventSource()
{
return this.eventSource;
}
/**
* @since 1.2
*/
public String getHistoryFileName()
{
return this.historyFileName;
}
public EventFiler setHistoryFileName(String historyFileName)
{
this.historyFileName = historyFileName;
return this;
}
// =============== Methods ===============
public String loadHistory()
{
final Path historyFile = Paths.get(this.historyFileName);
if (Files.exists(historyFile))
{
return null;
}
try
{
return new String(Files.readAllBytes(historyFile));
}
catch (IOException e)
{
return null;
}
}
public boolean storeHistory()
{
final Path historyFile = Paths.get(this.historyFileName);
createDirs(historyFile);
try (final Writer writer = Files.newBufferedWriter(historyFile, StandardOpenOption.CREATE))
{
this.eventSource.encodeYaml(writer);
}
catch (IOException e)
{
Logger.getGlobal().log(Level.SEVERE, "could not write to historyFile " + historyFile, e);
return false;
}
return true;
}
public EventFiler startEventLogging()
{
this.eventSource.addEventListener(this::storeEvent);
return this;
}
/**
* Appends the event to the history file.
*
* @param event
* the event
*
* @deprecated since 1.2; use {@link #storeEvent(Map)}
*/
@Deprecated
public void storeEvent(LinkedHashMap<String, String> event)
{
this.storeEvent((Map<String, String>) event);
}
/**
* Appends the event to the history file.
*
* @param event
* the event
*
* @since 1.2
*/
public void storeEvent(Map<String, String> event)
{
final Path historyFile = Paths.get(this.historyFileName);
createDirs(historyFile);
try (final Writer writer = Files.newBufferedWriter(historyFile, StandardOpenOption.CREATE, StandardOpenOption.APPEND))
{
YamlGenerator.serialize(event, writer);
}
catch (IOException e)
{
Logger.getGlobal().log(Level.SEVERE, "could not write to historyFile " + historyFile, e);
}
}
private static void createDirs(Path historyFile)
{
try
{
Files.createDirectories(historyFile.getParent());
}
catch (IOException e)
{
Logger.getGlobal().log(Level.SEVERE, "could create directories for historyFile " + historyFile, e);
}
}
}
<file_sep>package org.fulib.yaml;
import java.io.IOException;
import java.util.Map;
import java.util.regex.Pattern;
/**
* This class provides static utility methods for generating Yaml strings.
*
* @since 1.2
*/
public class YamlGenerator
{
// =============== Constants ===============
private static final Pattern SIMPLE_VALUE_PATTERN = Pattern.compile("[a-zA-Z0-9_.]+");
// =============== Static Methods ===============
/**
* Encodes the events as a list of YAML objects.
*
* @param events
* the events
*
* @return the encoded YAML object list
*/
public static String serialize(Iterable<? extends Map<String, String>> events)
{
final StringBuilder builder = new StringBuilder();
try
{
serialize(events, builder);
}
catch (IOException e)
{
// cannot happen
}
return builder.toString();
}
/**
* Encodes the events as a list of YAML objects into the writer.
*
* @param events
* the events
* @param writer
* the writer
*
* @throws IOException
* when appending to the writer produces an error
*/
public static void serialize(Iterable<? extends Map<String, String>> events, Appendable writer) throws IOException
{
for (final Map<String, String> event : events)
{
serialize(event, writer);
}
}
/**
* Encodes the event as a YAML object.
*
* @param event
* the event
*
* @return the encoded YAML object
*/
public static String serialize(Map<String, String> event)
{
final StringBuilder writer = new StringBuilder();
try
{
serialize(event, writer);
}
catch (IOException e)
{
// cannot happen
}
return writer.toString();
}
/**
* Encodes the event as a YAML object into the writer.
*
* @param event
* the event
* @param writer
* the writer
*
* @throws IOException
* when appending to the writer produces an error
*/
public static void serialize(Map<String, String> event, Appendable writer) throws IOException
{
String prefix = "- ";
for (Map.Entry<String, String> keyValuePair : event.entrySet())
{
writer.append(prefix);
writer.append(keyValuePair.getKey());
writer.append(": ");
encapsulate(keyValuePair.getValue(), writer);
writer.append('\n');
prefix = " ";
}
writer.append('\n');
}
/**
* Encapsulates a YAML value by enclosing it in quotes ("), if necessary.
*
* @param value
* the YAML value to encapsulate
*
* @return the encapsulated YAML value
*/
public static String encapsulate(String value)
{
if (SIMPLE_VALUE_PATTERN.matcher(value).matches())
{
return value;
}
return quote(value);
}
/**
* Encapsulates a YAML value by enclosing it in quotes ("), if necessary, and appends the result to the writer.
*
* @param value
* the YAML value to encapsulate
* @param writer
* the writer
*
* @throws IOException
* when appending to the writer produces an error
*/
public static void encapsulate(String value, Appendable writer) throws IOException
{
if (SIMPLE_VALUE_PATTERN.matcher(value).matches())
{
writer.append(value);
return;
}
quote(value, writer);
}
/**
* Encapsulates a YAML value by enclosing it in quotes (").
*
* @param value
* the YAML value to quote
*
* @since 1.3
*/
public static String quote(String value)
{
// replace " with \"
return "\"" + value.replace("\"", "\\\"") + "\"";
}
/**
* Encapsulates a YAML value by enclosing it in quotes ("), and appends the result to the writer.
*
* @param value
* the YAML value to quote
* @param writer
* the writer
*
* @throws IOException
* when appending to the writer produces an error
*
* @since 1.3
*/
public static void quote(String value, Appendable writer) throws IOException
{
writer.append('"');
writer.append(value.replace("\"", "\\\""));
writer.append('"');
}
}
<file_sep>package org.fulib.yaml.testmodel;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
public class Customer
{
public static final String PROPERTY_id = "id";
private String id;
public String getId()
{
return id;
}
public Customer setId(String value)
{
if (value == null ? this.id != null : ! value.equals(this.id))
{
String oldValue = this.id;
this.id = value;
firePropertyChange("id", oldValue, value);
}
return this;
}
protected PropertyChangeSupport listeners = null;
public boolean firePropertyChange(String propertyName, Object oldValue, Object newValue)
{
if (listeners != null)
{
listeners.firePropertyChange(propertyName, oldValue, newValue);
return true;
}
return false;
}
public boolean addPropertyChangeListener(PropertyChangeListener listener)
{
if (listeners == null)
{
listeners = new PropertyChangeSupport(this);
}
listeners.addPropertyChangeListener(listener);
return true;
}
public boolean addPropertyChangeListener(String propertyName, PropertyChangeListener listener)
{
if (listeners == null)
{
listeners = new PropertyChangeSupport(this);
}
listeners.addPropertyChangeListener(propertyName, listener);
return true;
}
public boolean removePropertyChangeListener(PropertyChangeListener listener)
{
if (listeners != null)
{
listeners.removePropertyChangeListener(listener);
}
return true;
}
public boolean removePropertyChangeListener(String propertyName,PropertyChangeListener listener)
{
if (listeners != null)
{
listeners.removePropertyChangeListener(propertyName, listener);
}
return true;
}
@Override
public String toString()
{
StringBuilder result = new StringBuilder();
result.append(" ").append(this.getId());
result.append(" ").append(this.getName());
result.append(" ").append(this.getAddress());
return result.substring(1);
}
public static final String PROPERTY_name = "name";
private String name;
public String getName()
{
return name;
}
public Customer setName(String value)
{
if (value == null ? this.name != null : ! value.equals(this.name))
{
String oldValue = this.name;
this.name = value;
firePropertyChange("name", oldValue, value);
}
return this;
}
public static final String PROPERTY_address = "address";
private String address;
public String getAddress()
{
return address;
}
public Customer setAddress(String value)
{
if (value == null ? this.address != null : ! value.equals(this.address))
{
String oldValue = this.address;
this.address = value;
firePropertyChange("address", oldValue, value);
}
return this;
}
public static final java.util.ArrayList<Product> EMPTY_products = new java.util.ArrayList<Product>()
{ @Override public boolean add(Product value){ throw new UnsupportedOperationException("No direct add! Use xy.withProducts(obj)"); }};
public static final String PROPERTY_products = "products";
private java.util.ArrayList<Product> products = null;
public java.util.ArrayList<Product> getProducts()
{
if (this.products == null)
{
return EMPTY_products;
}
return this.products;
}
public Customer withProducts(Object... value)
{
if(value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withProducts(i);
}
}
else if (item instanceof Product)
{
if (this.products == null)
{
this.products = new java.util.ArrayList<Product>();
}
if ( ! this.products.contains(item))
{
this.products.add((Product)item);
((Product)item).withCustomers(this);
firePropertyChange("products", null, item);
}
}
else throw new IllegalArgumentException();
}
return this;
}
public Customer withoutProducts(Object... value)
{
if (this.products == null || value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withoutProducts(i);
}
}
else if (item instanceof Product)
{
if (this.products.contains(item))
{
this.products.remove((Product)item);
((Product)item).withoutCustomers(this);
firePropertyChange("products", item, null);
}
}
}
return this;
}
public void removeYou()
{
this.withoutOrders(this.getOrders().clone());
this.withoutProducts(this.getProducts().clone());
}
public static final java.util.ArrayList<Order> EMPTY_orders = new java.util.ArrayList<Order>()
{ @Override public boolean add(Order value){ throw new UnsupportedOperationException("No direct add! Use xy.withOrders(obj)"); }};
public static final String PROPERTY_orders = "orders";
private java.util.ArrayList<Order> orders = null;
public java.util.ArrayList<Order> getOrders()
{
if (this.orders == null)
{
return EMPTY_orders;
}
return this.orders;
}
public Customer withOrders(Object... value)
{
if(value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withOrders(i);
}
}
else if (item instanceof Order)
{
if (this.orders == null)
{
this.orders = new java.util.ArrayList<Order>();
}
if ( ! this.orders.contains(item))
{
this.orders.add((Order)item);
((Order)item).setCustomer(this);
firePropertyChange("orders", null, item);
}
}
else throw new IllegalArgumentException();
}
return this;
}
public Customer withoutOrders(Object... value)
{
if (this.orders == null || value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withoutOrders(i);
}
}
else if (item instanceof Order)
{
if (this.orders.contains(item))
{
this.orders.remove((Order)item);
((Order)item).setCustomer(null);
firePropertyChange("orders", item, null);
}
}
}
return this;
}
}
<file_sep>package org.fulib.yaml;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
public class ModelListener implements PropertyChangeListener
{
private final PropertyChangeListener elementListener;
private final ReflectorMap creatorMap;
private final Set<Object> supervisedObjects = new HashSet<>();
private boolean closed = false;
public ModelListener(Object root, PropertyChangeListener elementListener)
{
this.elementListener = elementListener;
String packageName = root.getClass().getPackage().getName();
this.creatorMap = new ReflectorMap(packageName);
this.subscribeTo(root);
}
public void removeYou()
{
this.closed = true;
for (Object obj : this.supervisedObjects)
{
this.callChangeListenerMethod(obj, "removePropertyChangeListener");
}
}
private void subscribeTo(Object newObject)
{
if (this.supervisedObjects.contains(newObject))
{
return;
}
this.callChangeListenerMethod(newObject, "addPropertyChangeListener");
this.supervisedObjects.add(newObject);
this.fireInitialPropertyChanges(newObject);
}
private void fireInitialPropertyChanges(Object newObject)
{
if (!this.creatorMap.canReflect(newObject))
{
return; // don't know structure of newObject, probably a String
}
Reflector reflector = this.creatorMap.getReflector(newObject);
for (String prop : reflector.getAllProperties())
{
Object propertyValue = reflector.getValue(newObject, prop);
if (propertyValue instanceof Collection)
{
for (Object obj : (Collection<?>) propertyValue)
{
this.propertyChange(new PropertyChangeEvent(newObject, prop, null, obj));
}
}
else
{
this.propertyChange(new PropertyChangeEvent(newObject, prop, null, propertyValue));
}
}
}
private void callChangeListenerMethod(Object receiver, String methodName)
{
Class<?> clazz = receiver.getClass();
try
{
Method addPropertyChangeListener = clazz.getMethod(methodName, PropertyChangeListener.class);
addPropertyChangeListener.invoke(receiver, this);
}
catch (Exception e)
{
// just skip it
}
}
@Override
public void propertyChange(PropertyChangeEvent evt)
{
if (this.closed)
{
return;
}
final Object newValue = evt.getNewValue();
if (newValue != null)
{
this.subscribeTo(newValue);
}
this.elementListener.propertyChange(evt);
}
}
<file_sep>package org.fulib.yaml.testmodel;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
public class Product
{
public static final String PROPERTY_description = "description";
private String description;
public String getDescription()
{
return description;
}
public Product setDescription(String value)
{
if (value == null ? this.description != null : ! value.equals(this.description))
{
String oldValue = this.description;
this.description = value;
firePropertyChange("description", oldValue, value);
}
return this;
}
protected PropertyChangeSupport listeners = null;
public boolean firePropertyChange(String propertyName, Object oldValue, Object newValue)
{
if (listeners != null)
{
listeners.firePropertyChange(propertyName, oldValue, newValue);
return true;
}
return false;
}
public boolean addPropertyChangeListener(PropertyChangeListener listener)
{
if (listeners == null)
{
listeners = new PropertyChangeSupport(this);
}
listeners.addPropertyChangeListener(listener);
return true;
}
public boolean addPropertyChangeListener(String propertyName, PropertyChangeListener listener)
{
if (listeners == null)
{
listeners = new PropertyChangeSupport(this);
}
listeners.addPropertyChangeListener(propertyName, listener);
return true;
}
public boolean removePropertyChangeListener(PropertyChangeListener listener)
{
if (listeners != null)
{
listeners.removePropertyChangeListener(listener);
}
return true;
}
public boolean removePropertyChangeListener(String propertyName,PropertyChangeListener listener)
{
if (listeners != null)
{
listeners.removePropertyChangeListener(propertyName, listener);
}
return true;
}
@Override
public String toString()
{
StringBuilder result = new StringBuilder();
result.append(" ").append(this.getId());
result.append(" ").append(this.getDescription());
return result.substring(1);
}
public static final String PROPERTY_id = "id";
private String id;
public String getId()
{
return id;
}
public Product setId(String value)
{
if (value == null ? this.id != null : ! value.equals(this.id))
{
String oldValue = this.id;
this.id = value;
firePropertyChange("id", oldValue, value);
}
return this;
}
public static final java.util.ArrayList<Offer> EMPTY_offers = new java.util.ArrayList<Offer>()
{ @Override public boolean add(Offer value){ throw new UnsupportedOperationException("No direct add! Use xy.withOffers(obj)"); }};
public static final String PROPERTY_offers = "offers";
private java.util.ArrayList<Offer> offers = null;
public java.util.ArrayList<Offer> getOffers()
{
if (this.offers == null)
{
return EMPTY_offers;
}
return this.offers;
}
public Product withOffers(Object... value)
{
if(value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withOffers(i);
}
}
else if (item instanceof Offer)
{
if (this.offers == null)
{
this.offers = new java.util.ArrayList<Offer>();
}
if ( ! this.offers.contains(item))
{
this.offers.add((Offer)item);
((Offer)item).setProduct(this);
firePropertyChange("offers", null, item);
}
}
else throw new IllegalArgumentException();
}
return this;
}
public Product withoutOffers(Object... value)
{
if (this.offers == null || value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withoutOffers(i);
}
}
else if (item instanceof Offer)
{
if (this.offers.contains(item))
{
this.offers.remove((Offer)item);
((Offer)item).setProduct(null);
firePropertyChange("offers", item, null);
}
}
}
return this;
}
public void removeYou()
{
this.withoutOffers(this.getOffers().clone());
this.withoutCustomers(this.getCustomers().clone());
}
public static final java.util.ArrayList<Customer> EMPTY_customers = new java.util.ArrayList<Customer>()
{ @Override public boolean add(Customer value){ throw new UnsupportedOperationException("No direct add! Use xy.withCustomers(obj)"); }};
public static final String PROPERTY_customers = "customers";
private java.util.ArrayList<Customer> customers = null;
public java.util.ArrayList<Customer> getCustomers()
{
if (this.customers == null)
{
return EMPTY_customers;
}
return this.customers;
}
public Product withCustomers(Object... value)
{
if(value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withCustomers(i);
}
}
else if (item instanceof Customer)
{
if (this.customers == null)
{
this.customers = new java.util.ArrayList<Customer>();
}
if ( ! this.customers.contains(item))
{
this.customers.add((Customer)item);
((Customer)item).withProducts(this);
firePropertyChange("customers", null, item);
}
}
else throw new IllegalArgumentException();
}
return this;
}
public Product withoutCustomers(Object... value)
{
if (this.customers == null || value==null) return this;
for (Object item : value)
{
if (item == null) continue;
if (item instanceof java.util.Collection)
{
for (Object i : (java.util.Collection) item)
{
this.withoutCustomers(i);
}
}
else if (item instanceof Customer)
{
if (this.customers.contains(item))
{
this.customers.remove((Customer)item);
((Customer)item).withoutProducts(this);
firePropertyChange("customers", item, null);
}
}
}
return this;
}
}
<file_sep>package org.fulib.yaml;
import java.util.Objects;
/**
* This class provides static utility methods for working with strings in general, as used by various implementations
* in this library.
*/
public class StrUtil
{
/**
* Transforms the first character of the input string to uppercase, leaving the remaining characters as-is.
*
* @param string
* the input string
*
* @return the input string with the first character in uppercase
*/
public static String cap(String string)
{
final StringBuilder builder = new StringBuilder(string);
builder.setCharAt(0, Character.toUpperCase(builder.charAt(0)));
return builder.toString();
}
/**
* Checks if the two strings are equal, handling nulls correctly.
*
* @param word1
* the first string
* @param word2
* the second string
*
* @return true if the two strings are equal or both null, false otherwise
*
* @deprecated since 1.2; use {@link Objects#equals(Object, Object)} instead
*/
@Deprecated
public static boolean stringEquals(String word1, String word2)
{
return Objects.equals(word1, word2);
}
/**
* Transforms the first character of the input string to lowercase, leaving the remaining characters as-is.
*
* @param string
* the input string
*
* @return the input string with the first character in lowercase
*/
public static String downFirstChar(String string)
{
final StringBuilder builder = new StringBuilder(string);
builder.setCharAt(0, Character.toLowerCase(builder.charAt(0)));
return builder.toString();
}
}
<file_sep>rootProject.name = 'fulibYaml'
<file_sep># fulibYaml
[](https://github.com/fujaba/fulibYaml/actions)
[](https://javadoc.io/doc/org.fulib/fulibYaml)
Yaml serialization for [fulib](https://github.com/fujaba/fulib) object models. Used for storage and communication.
## Installation
`build.gradle`:
```groovy
repositories {
// ...
mavenCentral()
}
```
```groovy
dependencies {
// https://mvnrepository.com/artifact/org.fulib/fulibYaml
implementation group: 'org.fulib', name: 'fulibYaml', version: '1.5.0'
}
```
## License
[MIT](LICENSE.md)
<file_sep>package org.fulib.yaml;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.*;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class YamlIdMap extends IdMap
{
// =============== Constants ===============
private static final String REMOVE = "remove";
// =============== Fields ===============
private String yaml;
private boolean decodingPropertyChange;
private Yamler yamler = new Yamler();
private HashMap<String, String> attrTimeStamps = new HashMap<>();
private String yamlChangeText;
// =============== Constructors ===============
/**
* @param packageName
* the names of the package in which model classes reside
*
* @since 1.2
*/
public YamlIdMap(String packageName)
{
this(new ReflectorMap(packageName));
}
/**
* @param packageNames
* the names of the packages in which model classes reside
*/
public YamlIdMap(String... packageNames)
{
this(new ReflectorMap(packageNames));
}
/**
* @param packageNames
* the names of the packages in which model classes reside
*
* @since 1.2
*/
public YamlIdMap(Collection<String> packageNames)
{
this(new ReflectorMap(packageNames));
}
/**
* @since 1.2
*/
public YamlIdMap(ReflectorMap reflectorMap)
{
super(reflectorMap);
}
// =============== Properties ===============
/**
* @deprecated since 1.2; use {@link #getObject(String)} instead
*/
@Deprecated
public LinkedHashMap<String, Object> getObjIdMap()
{
return this.objIdMap;
}
/**
* @deprecated since 1.2; use {@link #getId(Object)} instead
*/
@Deprecated
public LinkedHashMap<Object, String> getIdObjMap()
{
return this.idObjMap;
}
/**
* @deprecated since 1.2; use {@link #getAttributeTimeStamp(String)} instead
*/
@Deprecated
public HashMap<String, String> getAttrTimeStamps()
{
return this.attrTimeStamps;
}
/**
* @deprecated since 1.2; use {@link #setUserId(String)} instead
*/
@Deprecated
public YamlIdMap withUserId(String userId)
{
this.userId = userId;
return this;
}
public boolean isDecodingPropertyChange()
{
return this.decodingPropertyChange;
}
public void setDecodingPropertyChange(boolean decodingPropertyChange)
{
this.decodingPropertyChange = decodingPropertyChange;
}
// =============== Methods ===============
// --------------- CSV ---------------
public Object decodeCSV(String fileName)
{
try
{
byte[] bytes = Files.readAllBytes(Paths.get(fileName));
String csvText = new String(bytes);
String yamlText = this.convertCsv2Yaml(csvText);
// System.out.println(yamlText);
return this.decode(yamlText);
}
catch (IOException e)
{
Logger.getGlobal().log(Level.SEVERE, e.getMessage(), e);
}
return null;
}
private String convertCsv2Yaml(String csvText)
{
String[] split = csvText.split(";");
for (int i = 0; i < split.length; i++)
{
String token = split[i];
if (token.startsWith("\"") && token.endsWith("\""))
{
// already done
continue;
}
if (token.startsWith("\"") && !token.endsWith("\""))
{
// there is a semicolon within " ; " , recombine it
int j = i;
String nextToken;
while (true)
{
j++;
nextToken = split[j];
split[j] = "";
token = token + ";" + nextToken;
if (nextToken.endsWith("\""))
{
split[i] = token;
i = j;
break;
}
}
continue;
}
if (token.trim().length() == 0)
{
continue;
}
Pattern pattern = Pattern.compile("\\s");
Matcher matcher = pattern.matcher(token.trim());
boolean found = matcher.find();
if (found)
{
token = YamlGenerator.encapsulate(token);
split[i] = token;
}
}
StringBuilder buf = new StringBuilder();
for (String str : split)
{
buf.append(str).append(" ");
}
return buf.toString();
}
// --------------- Decoding ---------------
public Object decode(String yaml, Object root)
{
this.putObject(root);
Object decodedRoot = this.decode(yaml);
if (decodedRoot != root)
{
throw new RuntimeException("Object passed as root does not match the first object in the yaml string.\n"
+ "Ensure that the type of the passed root and the first object in the yaml string match. \n"
+ "Ensure that the key of the passed root and the key of the first object in tha yaml string match. \n"
+ "You get the key of the passed root object via 'String key = getOrCreateKey(root);'\n");
}
return root;
}
public Object decode(String yaml)
{
this.decodingPropertyChange = false;
this.yamlChangeText = null;
this.yaml = yaml;
Object root;
this.yamler = new Yamler(yaml);
if (!"-".equals(this.yamler.getCurrentToken()))
{
return this.yamler.decode(yaml);
}
root = this.parseObjectIds();
this.yamler = new Yamler(yaml);
this.parseObjectAttrs();
// reset property change decoding
this.setDecodingPropertyChange(false);
this.yamlChangeText = null;
return root;
}
// --------------- Parsing ---------------
private void parseObjectAttrs()
{
while (this.yamler.getCurrentToken() != null)
{
if (!"-".equals(this.yamler.getCurrentToken()))
{
this.yamler.printError("'-' expected");
this.yamler.nextToken();
continue;
}
String key = this.yamler.nextToken();
if (key.endsWith(":"))
{
// usual
this.parseUsualObjectAttrs();
}
else
{
this.parseObjectTableAttrs();
}
}
}
private void parseObjectTableAttrs()
{
// skip column names
String className = this.yamler.getCurrentToken();
Reflector creator = this.reflectorMap.getReflector(className);
this.yamler.nextToken();
ArrayList<String> colNameList = new ArrayList<>();
while (this.yamler.getCurrentToken() != null && this.yamler.getLookAheadToken() != null
&& this.yamler.getLookAheadToken().endsWith(":"))
{
String colName = this.yamler.stripColon(this.yamler.getCurrentToken());
colNameList.add(colName);
this.yamler.nextToken();
}
while (this.yamler.getCurrentToken() != null && !"-".equals(this.yamler.getCurrentToken()))
{
String objectId = this.yamler.stripColon(this.yamler.getCurrentToken());
this.yamler.nextToken();
Object obj = this.objIdMap.get(objectId);
// column values
int colNum = 0;
while (this.yamler.getCurrentToken() != null && !this.yamler.getCurrentToken().endsWith(":") && !"-".equals(
this.yamler.getCurrentToken()))
{
String attrName = colNameList.get(colNum);
if (this.yamler.getCurrentToken().startsWith("["))
{
String value = this.yamler.getCurrentToken().substring(1);
if ("".equals(value.trim()))
{
value = this.yamler.nextToken();
}
this.setValue(creator, obj, attrName, value);
while (this.yamler.getCurrentToken() != null && !this.yamler.getCurrentToken().endsWith("]"))
{
this.yamler.nextToken();
value = this.yamler.getCurrentToken();
if (this.yamler.getCurrentToken().endsWith("]"))
{
value = this.yamler.getCurrentToken().substring(0, this.yamler.getCurrentToken().length() - 1);
}
if (!"".equals(value.trim()))
{
this.setValue(creator, obj, attrName, value);
}
}
}
else
{
this.setValue(creator, obj, attrName, this.yamler.getCurrentToken());
}
colNum++;
this.yamler.nextToken();
}
}
}
private void parseUsualObjectAttrs()
{
String objectId = this.yamler.stripColon(this.yamler.getCurrentToken());
String className = this.yamler.nextToken();
this.yamler.nextToken();
if (className.endsWith(".remove"))
{
this.objIdMap.remove(objectId);
// skip time stamp, if necessary
while (this.yamler.getCurrentToken() != null && !"-".equals(this.yamler.getCurrentToken()))
{
this.yamler.nextToken();
}
return;
}
if (".Map".equals(className))
{
YamlObject yamlObj = (YamlObject) this.objIdMap.get(objectId);
Map<String, Object> map = yamlObj.getProperties();
while (this.yamler.getCurrentToken() != null && !"-".equals(this.yamler.getCurrentToken()))
{
String attrName = this.yamler.stripColon(this.yamler.getCurrentToken());
this.yamler.nextToken();
if (map == null)
{
// no object created by parseObjectIds. Object has been removed.
// ignore attr changes
while (this.yamler.getCurrentToken() != null && !this.yamler.getCurrentToken().endsWith(":")
&& !"-".equals(this.yamler.getCurrentToken()))
{
this.yamler.nextToken();
}
continue;
}
// many values
ArrayList<Object> previousValue = null;
while (this.yamler.getCurrentToken() != null && !this.yamler.getCurrentToken().endsWith(":") && !"-".equals(
this.yamler.getCurrentToken()))
{
String attrValue = this.yamler.getCurrentToken();
Object target = this.objIdMap.get(attrValue);
if (target != null)
{
if (previousValue != null)
{
previousValue.add(target);
map.put(attrName, previousValue);
}
else
{
map.put(attrName, target);
previousValue = new ArrayList<>();
previousValue.add(target);
}
}
else
{
if (previousValue != null)
{
previousValue.add(attrValue);
map.put(attrName, previousValue);
}
else
{
map.put(attrName, attrValue);
previousValue = new ArrayList<>();
previousValue.add(attrValue);
}
}
this.yamler.nextToken();
}
}
}
else
{
Reflector reflector = this.reflectorMap.getReflector(className);
Object obj = this.objIdMap.get(objectId);
// read attributes
while (this.yamler.getCurrentToken() != null && !"-".equals(this.yamler.getCurrentToken()))
{
String attrName = this.yamler.stripColon(this.yamler.getCurrentToken());
this.yamler.nextToken();
if (obj == null)
{
// no object created by parseObjectIds. Object has been removed.
// ignore attr changes
while (this.yamler.getCurrentToken() != null && !this.yamler.getCurrentToken().endsWith(":")
&& !"-".equals(this.yamler.getCurrentToken()))
{
this.yamler.nextToken();
}
continue;
}
// many values
while (this.yamler.getCurrentToken() != null && !this.yamler.getCurrentToken().endsWith(":") && !"-".equals(
this.yamler.getCurrentToken()))
{
String attrValue = this.yamler.getCurrentToken();
if (this.yamler.getLookAheadToken() != null && this.yamler.getLookAheadToken().endsWith(".time:"))
{
String propWithTime = this.yamler.nextToken();
String newTimeStamp = this.yamler.nextToken();
String oldTimeStamp = this.attrTimeStamps.get(objectId + "." + attrName);
if (oldTimeStamp == null || oldTimeStamp.compareTo(newTimeStamp) <= 0)
{
this.setDecodingPropertyChange(true);
if (this.yamlChangeText == null)
{
this.yamlChangeText = this.yaml;
}
this.setValue(reflector, obj, attrName, attrValue);
this.attrTimeStamps.put(objectId + "." + attrName, newTimeStamp);
}
}
else
{
this.setValue(reflector, obj, attrName, attrValue);
}
this.yamler.nextToken();
}
}
}
}
private void setValue(Reflector reflector, Object obj, String attrName, String attrValue)
{
String type = "new";
if (attrName.endsWith(".remove"))
{
attrName = attrName.substring(0, attrName.length() - ".remove".length());
if (reflector.getValue(obj, attrName) instanceof Collection)
{
type = REMOVE;
}
else
{
attrValue = null;
}
}
try
{
Object setResult = reflector.setValue(obj, attrName, attrValue, type);
if (setResult == null)
{
Object targetObj = this.objIdMap.get(attrValue);
if (targetObj != null)
{
reflector.setValue(obj, attrName, targetObj, type);
}
}
}
catch (Exception e)
{
// maybe a node
Object targetObj = this.objIdMap.get(attrValue);
if (targetObj != null)
{
reflector.setValue(obj, attrName, targetObj, type);
}
}
}
private Object parseObjectIds()
{
Object root = null;
while (this.yamler.getCurrentToken() != null)
{
if (!"-".equals(this.yamler.getCurrentToken()))
{
this.yamler.printError("'-' expected");
this.yamler.nextToken();
continue;
}
String key = this.yamler.nextToken();
if (key.endsWith(":"))
{
// usual
Object now = this.parseUsualObjectId();
if (root == null)
{
root = now;
}
}
else
{
Object now = this.parseObjectTableIds();
if (root == null)
{
root = now;
}
}
}
return root;
}
private Object parseUsualObjectId()
{
String objectId = this.yamler.stripColon(this.yamler.getCurrentToken());
int pos = objectId.lastIndexOf('.');
String numPart = objectId.substring(pos + 2);
int objectNum;
try
{
objectNum = Integer.parseInt(numPart);
}
catch (NumberFormatException e)
{
objectNum = this.objIdMap.size() + 1;
}
if (objectNum > this.maxUsedIdNum)
{
this.maxUsedIdNum = objectNum;
}
String className = this.yamler.nextToken();
Object obj = this.objIdMap.get(objectId);
String userId = null;
// skip attributes
while (this.yamler.getCurrentToken() != null && !"-".equals(this.yamler.getCurrentToken()))
{
String token = this.yamler.nextToken();
if (token != null && token.endsWith(".time:"))
{
token = this.yamler.nextToken();
userId = token.substring(token.lastIndexOf('.') + 1);
}
}
boolean foreignChange = false;
if (userId != null)
{
int dotIndex = objectId.indexOf('.');
if (dotIndex > 0)
{
String ownerId = objectId.substring(0, dotIndex);
foreignChange = !userId.equals(ownerId);
}
}
if (obj == null && !className.endsWith(".remove") && !foreignChange)
{
if (".Map".equals(className))
{
obj = new YamlObject(objectId);
}
else
{
Reflector reflector = this.reflectorMap.getReflector(className);
obj = reflector.newInstance();
}
this.objIdMap.put(objectId, obj);
this.idObjMap.put(obj, objectId);
}
return obj;
}
private Object parseObjectTableIds()
{
Object root = null;
// skip column names
String className = this.yamler.getCurrentToken();
Reflector reflector = this.reflectorMap.getReflector(className);
while (!"".equals(this.yamler.getCurrentToken()) && this.yamler.getLookAheadToken().endsWith(":"))
{
this.yamler.nextToken();
}
while (!"".equals(this.yamler.getCurrentToken()) && !"-".equals(this.yamler.getCurrentToken()))
{
String objectId = this.yamler.stripColon(this.yamler.getCurrentToken());
this.yamler.nextToken();
Object obj = reflector.newInstance();
this.objIdMap.put(objectId, obj);
this.idObjMap.put(obj, objectId);
if (root == null)
{
root = obj;
}
// skip column values
while (!"".equals(this.yamler.getCurrentToken()) && !this.yamler.getCurrentToken().endsWith(":")
&& !"-".equals(this.yamler.getCurrentToken()))
{
this.yamler.nextToken();
}
}
return root;
}
// --------------- Object Access ---------------
/**
* Puts the {@code object} in this IdMap with the specified {@code id}.
* The call
*
* <pre><code>
* idMap.putNameObject("foo", bar);
* </code></pre>
* <p>
* is equivalent to
*
* <pre><code>
* idMap.putObject("foo", bar);
* idMap.discoverObjects(bar);
* </code></pre>
* <p>
* and the latter should be used for clarity.
*
* @param id
* the id
* @param object
* the object
*
* @return this instance, to allow method chaining
*
* @deprecated since 1.2; use {@link #putObject(String, Object)} and {@link #discoverObjects(Object)} instead
*/
@Deprecated
public YamlIdMap putNameObject(String id, Object object)
{
this.putObject(id, object);
this.discoverObjects(object);
return this;
}
// --------------- Keys ---------------
/**
* @deprecated since 1.2; use {@link #putObject(Object)} instead
*/
@Deprecated
public String getOrCreateKey(Object obj)
{
return this.putObject(obj);
}
// --------------- Object Collection ---------------
/**
* Discovers all objects reachable from the {@code roots} and within the packages specified in the constructor.
*
* @param roots
* the root objects
*
* @return a set of all discovered objects
*
* @see #discoverObjects(Object...)
*
* @deprecated since 1.2; use {@link #discoverObjects(Object...)} instead (unless the resulting set is needed)
*/
@Deprecated
public LinkedHashSet<Object> collectObjects(Object... roots)
{
final LinkedHashSet<Object> collectedObjects = new LinkedHashSet<>();
this.reflectorMap.discoverObjects(roots, collectedObjects);
for (final Object collectedObject : collectedObjects)
{
this.putObject(collectedObject);
}
return collectedObjects;
}
// --------------- Encoding ---------------
/**
* Encodes this IdMap to a Yaml string.
* This method is merely a shorthand for calling {@link #discoverObjects(Object...)} and {@link #encode()}.
* I.e.,
*
* <pre><code>
* String yaml = idMap.encode(foo, bar, baz);
* </code></pre>
* <p>
* is equivalent to
*
* <pre><code>
* idMap.discoverObjects(foo, bar, baz);
* String yaml = idMap.encode();
* </code></pre>
*
* @param roots
* the root objects
*
* @return this IdMap encoded as a Yaml string
*/
public String encode(Object... roots)
{
Objects.requireNonNull(roots);
this.collectObjects(roots);
return this.encode();
}
/**
* Encodes this IdMap to a Yaml string.
*
* @return this IdMap encoded as a Yaml string
*
* @since 1.2
*/
public String encode()
{
StringBuilder buf = new StringBuilder();
for (Entry<String, Object> entry : this.objIdMap.entrySet())
{
Object obj = entry.getValue();
if (obj instanceof Enum)
{
continue;
}
String key = entry.getKey();
String className = obj.getClass().getName();
buf.append("- ").append(key).append(": \t").append(className).append("\n");
// attrs
Reflector creator = this.getReflector(obj);
for (String prop : creator.getAllProperties())
{
Object value = creator.getValue(obj, prop);
if (value == null)
{
continue;
}
if (value instanceof Collection)
{
if (((Collection<?>) value).isEmpty())
{
continue;
}
buf.append(" ").append(prop).append(':');
for (Object item : (Collection<?>) value)
{
buf.append(" \t");
this.encodeValue(buf, item);
}
buf.append('\n');
}
else if (value instanceof Map)
{
}
else
{
buf.append(" ").append(prop).append(": \t");
this.encodeValue(buf, value);
buf.append('\n');
// add time stamp?
if (this.userId != null)
{
String timeKey = key + "." + prop;
String timeStamp = this.attrTimeStamps.get(timeKey);
if (timeStamp != null)
{
buf.append(" ").append(prop).append(".time: \t").append(timeStamp).append("\n");
}
}
}
}
buf.append("\n");
}
return buf.toString();
}
private void encodeValue(StringBuilder buf, Object value)
{
if (value instanceof Enum)
{
final Enum<?> enumValue = (Enum<?>) value;
// <enumClass>.<constantName>
buf.append(enumValue.getDeclaringClass().getName()).append('.').append(enumValue.name());
return;
}
final String valueKey = this.idObjMap.get(value);
if (valueKey != null)
{
buf.append(valueKey);
}
else if (value instanceof String)
{
try
{
YamlGenerator.encapsulate((String) value, buf);
}
catch (IOException ignored)
{
}
}
else
{
buf.append(value);
}
}
/**
* @deprecated since 1.2; unused
*/
@Deprecated
public void encodeAttrValue(StringBuilder buf, Object obj, String propertyName, Object value)
{
// already known?
String key = this.putObject(obj);
String className = obj.getClass().getSimpleName();
buf.append("- ").append(key).append(": \t").append(className).append("\n");
Class<?> valueClass = value.getClass();
if (valueClass.getName().startsWith("java.lang.") || valueClass == String.class)
{
buf.append(" ")
.append(propertyName)
.append(": \t")
.append(YamlGenerator.encapsulate(value.toString()))
.append("\n");
if (this.userId != null)
{
String now = "" + LocalDateTime.now() + "." + this.userId;
buf.append(" ").append(propertyName).append(".time: \t").append(now).append("\n");
this.attrTimeStamps.put(key + "." + propertyName, now);
}
}
else
{
// value is an object
String valueKey = this.putObject(value);
buf.append(" ").append(propertyName).append(": \t").append(valueKey).append("\n");
if (this.userId != null)
{
// add timestamp only for to-one assocs
Reflector reflector = this.reflectorMap.getReflector(obj);
String fieldName = propertyName;
if (propertyName.endsWith(".remove"))
{
fieldName = propertyName.substring(0, propertyName.lastIndexOf('.'));
}
Object fieldValue = reflector.getValue(obj, fieldName);
String now = LocalDateTime.now() + "." + this.userId;
if (fieldValue instanceof Collection)
{
buf.append(" ")
.append(propertyName)
.append('.')
.append(valueKey)
.append(".time: \t")
.append(now)
.append("\n");
this.attrTimeStamps.put(key + "." + propertyName + "." + valueKey, now);
}
else
{
buf.append(" ").append(propertyName).append(".time: \t").append(now).append("\n");
this.attrTimeStamps.put(key + "." + propertyName, now);
}
}
if (!propertyName.endsWith(".remove"))
{
buf.append("- ").append(valueKey).append(": \t").append(valueClass.getSimpleName()).append("\n");
}
}
}
// --------------- Yaml Change ---------------
public String getYamlChange()
{
String result = this.yamlChangeText;
this.yamlChangeText = "";
return result;
}
// --------------- Time Stamps ---------------
/**
* @deprecated since 1.2; unused
*/
@Deprecated
public String getLastTimeStamps()
{
LinkedHashMap<String, String> user2TimeStampMap = this.getLastTimeStampMap();
StringBuilder buf = new StringBuilder();
for (Entry<String, String> e : user2TimeStampMap.entrySet())
{
buf.append(e.getValue()).append(" ");
}
return buf.toString();
}
/**
* @since 1.2
*/
public String getAttributeTimeStamp(String attribute)
{
return this.attrTimeStamps.get(attribute);
}
/**
* @deprecated since 1.2; unused
*/
@Deprecated
public LinkedHashMap<String, String> getLastTimeStampMap()
{
LinkedHashMap<String, String> user2TimeStampMap = new LinkedHashMap<>();
for (Entry<String, String> e : this.attrTimeStamps.entrySet())
{
String timeStamp = e.getValue();
int pos = timeStamp.lastIndexOf('.');
String userName = timeStamp.substring(pos + 1);
String oldTimeStamp = user2TimeStampMap.get(userName);
if (oldTimeStamp == null || oldTimeStamp.compareTo(timeStamp) < 0)
{
user2TimeStampMap.put(userName, timeStamp);
}
}
return user2TimeStampMap;
}
/**
* @deprecated since 1.2; unused
*/
@Deprecated
public LinkedHashMap<String, String> getLastTimeStampMap(String lastTimeStamps)
{
LinkedHashMap<String, String> user2TimeStampMap = new LinkedHashMap<>();
String[] split = lastTimeStamps.split("\\s+");
for (String s : split)
{
int pos = s.lastIndexOf('.');
String user = s.substring(pos + 1);
user2TimeStampMap.put(user, s);
}
return user2TimeStampMap;
}
}
<file_sep>package org.fulib.yaml;
import java.io.IOException;
import java.io.Writer;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.function.BiPredicate;
import java.util.function.Consumer;
import java.util.function.Function;
public class EventSource
{
// =============== Constants ===============
public static final String EVENT_KEY = ".eventKey";
public static final String EVENT_TIMESTAMP = ".eventTimestamp";
public static final String EVENT_TYPE = "eventType";
// =============== Fields ===============
private final Yamler yamler = new Yamler();
private final List<Consumer<? super Map<String, String>>> eventListeners = new ArrayList<>();
private final Map<String, Long> keyToTimeStampMap = new HashMap<>();
private final SortedMap<Long, Map<String, String>> timeStampToEventMap = new TreeMap<>();
private long lastEventTime;
private long oldEventTimeStamp = 0;
public DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
// =============== Properties ===============
public long getLastEventTime()
{
return this.lastEventTime;
}
// =============== Methods ===============
// --------------- Bulk Retrieval ---------------
/**
* Gets all events after or at the specified timestamp.
*
* @param since
* the timestamp
*
* @return all events after or at the specified timestamp
*
* @deprecated since 1.2; use {@link #getEvents(long)} instead
*/
@Deprecated
public SortedMap<Long, LinkedHashMap<String, String>> pull(long since)
{
return this.pull(since, (Function<Map.Entry<Long, LinkedHashMap<String, String>>, Boolean>) null);
}
/**
* Gets all events after or at the specified timestamp that have any one of the relevant event types.
*
* @param since
* the timestamp
* @param relevantEventTypes
* the relevant event types
*
* @return all events after or at the specified timestamp that have any one of the relevant event types
*
* @deprecated since 1.2; use {@link #getEvents(long, String...)} instead
*/
@Deprecated
public SortedMap<Long, LinkedHashMap<String, String>> pull(long since, String... relevantEventTypes)
{
final Set<String> eventTypes = new HashSet<>(Arrays.asList(relevantEventTypes));
return this.pull(since, e -> eventTypes.contains(e.getValue().get(EVENT_KEY)));
}
/**
* Gets all events after or at the specified timestamp that fulfill the given predicate.
*
* @param since
* the timestamp
* @param filterOp
* the predicate on timestamp and event
*
* @return all events after or at the specified timestamp that fulfill the given predicate
*
* @deprecated since 1.2; use {@link #getEvents(long, BiPredicate)} instead
*/
@Deprecated
public SortedMap<Long, LinkedHashMap<String, String>> pull(long since,
Function<Map.Entry<Long, LinkedHashMap<String, String>>, Boolean> filterOp)
{
final SortedMap<Long, Map<String, String>> tailMap = this.timeStampToEventMap.tailMap(since);
final TreeMap<Long, LinkedHashMap<String, String>> resultMap = new TreeMap<>();
for (Map.Entry<Long, Map<String, String>> entry : tailMap.entrySet())
{
final LinkedHashMap<String, String> linkedEvent = makeLinked(entry.getValue());
if (filterOp == null || filterOp.apply(new AbstractMap.SimpleEntry<>(entry.getKey(), linkedEvent)))
{
resultMap.put(entry.getKey(), linkedEvent);
}
}
return resultMap;
}
/**
* Gets all events, sorted by timestamp.
*
* @return all events, sorted by timestamp
*
* @since 1.2
*/
public SortedMap<Long, Map<String, String>> getEvents()
{
return Collections.unmodifiableSortedMap(this.timeStampToEventMap);
}
/**
* Gets all events after or at the specified timestamp.
*
* @param since
* the timestamp
*
* @return all events after or at the specified timestamp
*
* @since 1.2
*/
public SortedMap<Long, Map<String, String>> getEvents(long since)
{
return Collections.unmodifiableSortedMap(this.timeStampToEventMap.tailMap(since));
}
/**
* Gets all events after or at the specified timestamp that have any one of the relevant event types.
*
* @param since
* the timestamp
* @param relevantEventTypes
* the relevant event types
*
* @return all events after or at the specified timestamp that have any one of the relevant event types
*
* @since 1.2
*/
public SortedMap<Long, Map<String, String>> getEvents(long since, String... relevantEventTypes)
{
final Set<String> eventTypes = new HashSet<>(Arrays.asList(relevantEventTypes));
return this.getEvents(since, (k, v) -> eventTypes.contains(v.get(EVENT_KEY)));
}
/**
* Gets all events after or at the specified timestamp that fulfill the given predicate.
*
* @param since
* the timestamp
* @param filterOp
* the predicate on timestamp and event
*
* @return all events after or at the specified timestamp that fulfill the given predicate
*
* @since 1.2
*/
public SortedMap<Long, Map<String, String>> getEvents(long since,
BiPredicate<? super Long, ? super Map<String, String>> filterOp)
{
final SortedMap<Long, Map<String, String>> events = this.timeStampToEventMap.tailMap(since);
if (filterOp == null)
{
return events;
}
final SortedMap<Long, Map<String, String>> result = new TreeMap<>();
for (final Map.Entry<Long, Map<String, String>> entry : events.entrySet())
{
final Long key = entry.getKey();
final Map<String, String> value = entry.getValue();
if (filterOp.test(key, value))
{
result.put(key, value);
}
}
return result;
}
// --------------- Single Events ---------------
/**
* Gets the newest event with the given key, or null if not found.
*
* @param eventKey
* the event key
*
* @return the newest event with the given key
*
* @deprecated since 1.2; use {@link #getNewestEvent(String)} instead
*/
@Deprecated
public LinkedHashMap<String, String> getEvent(String eventKey)
{
return makeLinked(this.getNewestEvent(eventKey));
}
/**
* Gets the newest event with the given key, or null if not found.
*
* @param eventKey
* the event key
*
* @return the newest event with the given key
*
* @since 1.2
*/
public Map<String, String> getNewestEvent(String eventKey)
{
final Long timeStamp = this.keyToTimeStampMap.get(eventKey);
return timeStamp != null ? this.timeStampToEventMap.get(timeStamp) : null;
}
/**
* Checks whether the given event was already overwritten, i.e. whether a newer event with the same key exists.
*
* @param event
* the event
*
* @return true if the event was overwritten, false otherwise.
*
* @deprecated since 1.2; use {@link #isOverwritten(Map)} instead
*/
@Deprecated
public boolean isOverwritten(LinkedHashMap<String, String> event)
{
return this.isOverwritten((Map<String, String>) event);
}
/**
* Checks whether the given event was already overwritten, i.e. whether a newer event with the same key exists.
*
* @param event
* the event
*
* @return true if the event was overwritten, false otherwise.
*
* @since 1.2
*/
public boolean isOverwritten(Map<String, String> event)
{
String eventKey = event.get(EVENT_KEY);
String eventTimeTxt = event.get(EVENT_TIMESTAMP);
Long storedTime = this.keyToTimeStampMap.get(eventKey);
if (storedTime == null)
{
return false;
}
String storedTimeTxt = this.dateFormat.format(storedTime);
return storedTimeTxt.compareTo(eventTimeTxt) >= 0;
}
// --------------- Modification ---------------
public void addEventListener(Consumer<? super Map<String, String>> listener)
{
this.eventListeners.add(listener);
}
/**
* Sets the timestamp to use for the next event added with {@link #append(Map)}.
*
* @param oldEventTimeStamp
* the old timestamp, as a string
*
* @return this instance, to allow method chaining
*
* @deprecated since 1.2; parse the string yourself and use {@link #setOldEventTimeStamp(long)}
*/
@Deprecated
public EventSource setOldEventTimeStamp(String oldEventTimeStamp)
{
if (oldEventTimeStamp == null)
{
return this; //========================
}
long oldTimeStamp = 0;
try
{
oldTimeStamp = this.dateFormat.parse(oldEventTimeStamp).getTime();
}
catch (ParseException e)
{
e.printStackTrace();
}
return this.setOldEventTimeStamp(oldTimeStamp);
}
/**
* Sets the timestamp to use for the next event added with {@link #append(Map)}.
*
* @param oldEventTimeStamp
* the old timestamp, as a string
*
* @return this instance, to allow method chaining
*
* @since 1.2
*/
public EventSource setOldEventTimeStamp(long oldEventTimeStamp)
{
this.oldEventTimeStamp = oldEventTimeStamp;
return this;
}
/**
* Adds the given event to this event source.
*
* @param event
* the event
*
* @return this instance, to allow method chaining
*
* @deprecated since 1.2; use {@link #append(Map)} instead
*/
@Deprecated
public EventSource append(LinkedHashMap<String, String> event)
{
return this.append((Map<String, String>) event);
}
/**
* Adds the given event to this event source.
*
* @param event
* the event
*
* @return this instance, to allow method chaining
*
* @since 1.2
*/
public EventSource append(Map<String, String> event)
{
String timestampString;
this.setOldEventTimeStamp(event.get(EVENT_TIMESTAMP));
if (this.oldEventTimeStamp > this.lastEventTime)
{
this.lastEventTime = this.oldEventTimeStamp;
}
else
{
long currentTime = System.currentTimeMillis();
if (currentTime > this.lastEventTime)
{
this.lastEventTime = currentTime;
}
else
{
this.lastEventTime++;
}
}
timestampString = this.dateFormat.format(this.lastEventTime);
this.oldEventTimeStamp = 0;
event.put(EVENT_TIMESTAMP, timestampString);
String key = event.get(EVENT_KEY);
if (key != null)
{
Long oldNum = this.keyToTimeStampMap.get(key);
if (oldNum != null)
{
this.timeStampToEventMap.remove(oldNum);
}
}
this.keyToTimeStampMap.put(key, this.lastEventTime);
this.timeStampToEventMap.put(this.lastEventTime, event);
for (Consumer<? super Map<String, String>> listener : this.eventListeners)
{
listener.accept(event);
}
return this;
}
/**
* Parses the string as a YAML object list and adds each object as an event via {@link #append(Map)}.
*
* @param yaml
* the YAML object list
*
* @return this instance, to allow method chaining
*
* @deprecated since 1.2; parse the YAML yourself and use {@link #append(Map)}
*/
@Deprecated
public EventSource append(String yaml)
{
if (yaml == null)
{
return this; //===========================================
}
ArrayList<LinkedHashMap<String, String>> list = this.yamler.decodeList(yaml);
for (LinkedHashMap<String, String> event : list)
{
this.append(event);
}
return this;
}
// --------------- Conversion ---------------
public String encodeYaml()
{
return YamlGenerator.serialize(this.timeStampToEventMap.values());
}
public void encodeYaml(Writer writer) throws IOException
{
YamlGenerator.serialize(this.timeStampToEventMap.values(), writer);
}
// =============== Static Methods ===============
@Deprecated // only used by legacy methods
private static LinkedHashMap<String, String> makeLinked(Map<String, String> event)
{
return event instanceof LinkedHashMap ? (LinkedHashMap<String, String>) event : new LinkedHashMap<>(event);
}
/**
* Encodes the events as a list of YAML objects.
*
* @param events
* the events
*
* @return the encoded YAML object list
*
* @deprecated since 1.2; use {@link YamlGenerator#serialize(Iterable) YamlGenerator.encodeYaml}{@code (events.values())} instead
*/
@Deprecated
public static String encodeYaml(SortedMap<Long, ? extends Map<String, String>> events)
{
return YamlGenerator.serialize(events.values());
}
/**
* Encodes the events as a list of YAML objects.
*
* @param events
* the events
*
* @return the encoded YAML object list
*
* @deprecated since 1.2; use {@link YamlGenerator#serialize(Iterable)} instead
*/
@Deprecated
public static String encodeYaml(List<? extends Map<String, String>> events)
{
return YamlGenerator.serialize(events);
}
/**
* Encodes the event as a YAML object.
*
* @param event
* the event
*
* @return the encoded YAML object
*
* @deprecated since 1.2; use {@link YamlGenerator#serialize(Map)} instead
*/
@Deprecated
public static String encodeYaml(LinkedHashMap<String, String> event)
{
return YamlGenerator.serialize(event);
}
}
<file_sep>package org.fulib.yaml;
import java.util.Set;
/**
* A reflector specialized for {@link YamlObject} instances.
*/
public class YamlObjectReflector extends Reflector
{
// =============== Fields ===============
private YamlObject yamlObject;
// =============== Constructors ===============
/**
* @param newObject
* the yaml object (must be a {@link YamlObject} instance)
*
* @deprecated since 1.2; use {@link #YamlObjectReflector(YamlObject)} instead
*/
@Deprecated
public YamlObjectReflector(Object newObject)
{
this((YamlObject) newObject);
}
/**
* @param yamlObject
* the yaml object
*
* @since 1.2
*/
public YamlObjectReflector(YamlObject yamlObject)
{
this.yamlObject = yamlObject;
}
// =============== Properties ===============
@Override
public Set<String> getOwnProperties()
{
return this.yamlObject.getProperties().keySet();
}
@Override
public Set<String> getAllProperties()
{
return this.getOwnProperties();
}
// =============== Methods ===============
@Override
public Object newInstance()
{
return new YamlObject();
}
@Override
public Object getValue(Object object, String attribute)
{
return this.yamlObject.get(attribute);
}
@Override
public Object setValue(Object object, String attribute, Object value)
{
return this.yamlObject.put(attribute, value);
}
@Override
public void removeObject(Object object)
{
}
}
<file_sep>package org.fulib.yaml;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.StringTokenizer;
import java.util.function.BiConsumer;
import java.util.function.Supplier;
/**
* Yamler reads simple key value pairs in YAML syntax.
* <p>
* Example:
* <pre><code>
* String yaml = "msgType: newPlayer\n"
* + "login: albert\n"
* + "colors: blue red \n";
*
* Yamler yamler = new Yamler();
* Map<String, String> map = yamler.decode(yaml);
* // {msgType=newPlayer, login=albert, colors=blue red}
* </code></pre>
*/
public class Yamler
{
// =============== Constants ===============
private static final int LEADING_CONTEXT_CHARS = 10;
private static final int TRAILING_CONTEXT_CHARS = 20;
// =============== Fields ===============
private String yaml;
private StringTokenizer tokenizer;
private String lookAheadToken;
private String currentToken;
private int currentPos;
private int lookAheadPos;
// =============== Constructors ===============
public Yamler()
{
// empty
}
public Yamler(String yaml)
{
this.setInput(yaml);
}
// =============== Properties ===============
private void setInput(String yaml)
{
this.yaml = yaml;
this.tokenizer = new StringTokenizer(yaml);
this.lookAheadToken = null;
this.nextToken();
this.nextToken();
}
public String getCurrentToken()
{
return this.currentToken;
}
public int getCurrentPos()
{
return this.currentPos;
}
public String getLookAheadToken()
{
return this.lookAheadToken;
}
public int getLookAheadPos()
{
return this.lookAheadPos;
}
// =============== Methods ===============
public LinkedHashMap<String, String> decode(String yaml)
{
LinkedHashMap<String, String> result = new LinkedHashMap<>();
this.decode(yaml, result::put);
return result;
}
/**
* @since 1.2
*/
public void decode(String yaml, BiConsumer<String, String> consumer)
{
this.setInput(yaml);
while (this.currentToken != null && this.currentToken.endsWith(":"))
{
String key = this.stripColon(this.currentToken);
this.nextToken();
String value = "";
int valueStart = this.currentPos;
// many values
while (this.currentToken != null && !this.currentToken.endsWith(":"))
{
value = yaml.substring(valueStart, this.currentPos + this.currentToken.length());
this.nextToken();
}
consumer.accept(key, value);
}
}
public ArrayList<LinkedHashMap<String, String>> decodeList(String yaml)
{
ArrayList<LinkedHashMap<String, String>> result = new ArrayList<>();
this.decodeList(yaml, () -> {
LinkedHashMap<String, String> map = new LinkedHashMap<>();
result.add(map);
return map::put;
});
return result;
}
/**
* @since 1.2
*/
public void decodeList(String yaml, Supplier<? extends BiConsumer<String, String>> supplier)
{
this.setInput(yaml);
while ("-".equals(this.currentToken))
{
final BiConsumer<String, String> consumer = supplier.get();
this.nextToken();
while (this.currentToken != null && this.currentToken.endsWith(":"))
{
String key = this.stripColon(this.currentToken);
this.nextToken();
String value = this.currentToken;
this.nextToken();
consumer.accept(key, value);
}
}
}
public String nextToken()
{
this.currentToken = this.lookAheadToken;
this.currentPos = this.lookAheadPos;
if (this.tokenizer.hasMoreTokens())
{
this.lookAheadToken = this.tokenizer.nextToken();
int currentLength = 0;
if (this.currentToken != null)
{
currentLength = this.currentToken.length();
}
this.lookAheadPos = this.yaml.indexOf(this.lookAheadToken, this.lookAheadPos + currentLength);
// lookAheadPos = scanner.match().start();
}
else
{
this.lookAheadToken = null;
}
if (this.lookAheadToken != null && this.lookAheadToken.startsWith("\""))
{
// get up to end of string
int stringStartPos = this.lookAheadPos + 1;
String subToken = this.lookAheadToken;
//MatchResult match = scanner.match();
int subTokenEnd = this.lookAheadPos + subToken.length();
while (subTokenEnd < stringStartPos + 1
|| (!subToken.endsWith("\"") || subToken.endsWith("\\\"")) && this.tokenizer.hasMoreTokens())
{
subToken = this.tokenizer.nextToken();
subTokenEnd = this.yaml.indexOf(subToken, subTokenEnd) + subToken.length();
}
this.lookAheadToken = this.yaml.substring(stringStartPos, subTokenEnd - 1).replace("\\\"", "\"");
}
return this.currentToken;
}
public String stripColon(String key)
{
String id = key;
if (key.endsWith(":"))
{
id = key.substring(0, key.length() - 1);
}
else
{
this.printError("key does not end with ':' " + key);
}
return id;
}
/**
* Encapsulates a YAML value by enclosing it in quotes ("), if necessary.
*
* @param value
* the YAML value to encapsulate
*
* @return the encapsulated YAML value
*
* @deprecated since 1.2; use {@link YamlGenerator#encapsulate(String)} instead
*/
@Deprecated
public static String encapsulate(String value)
{
return YamlGenerator.encapsulate(value);
}
void printError(String msg)
{
int startPos = this.currentPos - LEADING_CONTEXT_CHARS;
if (startPos < 0)
{
startPos = 0;
}
int endPos = this.currentPos + TRAILING_CONTEXT_CHARS;
if (endPos >= this.yaml.length())
{
endPos = this.yaml.length();
}
final String info =
this.yaml.substring(startPos, this.currentPos) + "<--" + msg + "-->" + this.yaml.substring(this.currentPos,
endPos);
System.err.println(info);
}
}
| 1fa0c0a63aeb101ccb0a647e4a8d3d9423ccd853 | [
"Markdown",
"Java",
"Gradle"
] | 20 | Java | fujaba/fulibYaml | d0330918d2d1e9de8c473f3b0107adb13dcb72a9 | 3cdd63f0530fd5c1250f40b6b2168f9273d95b26 | |
refs/heads/master | <repo_name>PIUisUndefined/tripteaze<file_sep>/database-mongo/index.js
var mongoose = require('mongoose');
let uri;
if (!process.env.MONGODB_URI) {
config = require('../config.js');
uri = config.mongo;
} else {
console.log('PROCESS PICKED UP', process.env.MONGODB_URI)
uri = process.env.MONGODB_URI;
}
//URI is stored either on heroku or local config file
let Schema = mongoose.Schema;
mongoose.connect(uri);
var db = mongoose.connection;
db.on('error', function() {
console.log(uri);
console.log('mongoose connection error');
});
db.once('open', function() {
console.log('mongoose connected successfully');
});
function toLower (v) {
return v.toLowerCase();
}
var userSchema = Schema({
id: Schema.Types.ObjectId,
name: {type: String, set: toLower, index: true, required: [true, "can't be blank"]},
password: <PASSWORD>
});
var tripSchema = Schema({
id: Schema.Types.ObjectId,
city: String,
tripFromDate: Date,
tripToDate: Date,
isPublic: {type: Boolean, default: false},
isArchived: {type: Boolean, default: false},
//need to make sure each trip has a reference user
user: {type: Schema.Types.ObjectId, ref: 'User'}
});
var restaurantSchema = Schema({
id: {type: Number, index: true},
name: String,
url: String,
address: String,
zip: Number,
logo: String,
//latitude and longitude coordinates are placed in 'location' property
location: [{type: Number}],
price: Number,
//need to make sure each restaurant or event has a reference trip
trip: {type: Schema.Types.ObjectId, ref: 'Trip'}
});
var eventSchema = Schema({
id: {type: Number, index: true},
name: String,
description: String,
url: String,
start_time: String,
end_time: String,
is_free: Boolean,
organizer_id: Number,
venue_id: Number,
category_id: Number,
logo: String,
//need to make sure each restaurant or event has a reference trip
trip: {type: Schema.Types.ObjectId, ref: 'Trip'}
});
var User = mongoose.model('User', userSchema);
var Trip = mongoose.model('Trip', tripSchema);
var Restaurant = mongoose.model('Restaurant', restaurantSchema);
var Event = mongoose.model('Event', eventSchema);
let addNewTrip = (username, city, fromDate, toDate, callback) => {
User.findOne({name: username}, function (err, user) {
if(err) {
callback(err);
}
Trip.create({
id: new mongoose.Types.ObjectId(),
city: city,
user: user.id,
// Dates need to be in YYYY-MM-DD format
tripFromDate: fromDate,
tripToDate: toDate
}, (err, data) => {
if(err) {
callback(err);
} else {
callback(null, data);
}
});
});
};
let addRestaurantToTrip = (food, username, city, callback) => {
//first find corresponding user
User.findOne({name: username}, function (err, user) {
if(err) {
console.log('error: ', err);
callback(err);
} else {
Trip.findOne({user: user.id, city: city}, function (err, trip) {
if(err) {
console.log('error', err);
callback(err);
} else {
Restaurant.findOneAndUpdate({ id: food.restaurant.id},
{$set: {
id: food.restaurant.id,
name: food.restaurant.name,
url: food.restaurant.url,
logo: food.restaurant.featured_image,
address: food.restaurant.location.address,
zip: food.restaurant.location.zipcode,
location: [food.restaurant.location.latitude, food.restaurant.location.longitude],
price: food.restaurant.price_range,
trip: trip.id
}
}, {upsert: true}, function(err) {
if(err) {
console.log('error: ', err);
callback(err);
} else {
callback();
}
}
);
}
//then add restaurant to database based on trip ID
});
}
//then find corresponding trip based on city for selected user
});
};
let addEventToTrip = (event, username, city, callback) => {
//first find corresponding user
User.findOne({name: username}, function (err, user) {
if(err) {
console.log('error: ', err);
callback(err);
}
//then find corresponding trip based on city for selected user
Trip.findOne({user: user.id, city: city}, function (err, trip) {
if(err) {
console.log('error', err);
callback(err);
}
//then add event to database based on trip ID
//need to look at eventbrite API for structure
Event.findOneAndUpdate({id: event.id},
{$set: {
name: event.name.text,
description: event.description.text,
id: event.id,
url: event.url,
start_time: event.start.local,
end_time: event.end.local,
is_free: event.is_free,
organizer_id: event.organizer_id,
venue_id: event.venue_id,
category_id: event.category_id,
logo: event.logo.url,
trip: trip.id
}
}, {upsert: true}, function(err) {
if(err) {
console.log('error: ', err);
callback(err);
} else {
callback();
}
}
);
});
});
};
//for signup page-takes in username and password and adds user info to database
//need to make sure usernames are unique in db
let addNewUser = (name, password) => {
User.findOneAndUpdate({name: name},
{$set: {
id: new mongoose.Types.ObjectId(),
name: name,
password: <PASSWORD>
}
}, {upsert: true},
function(err) {
if(err) {
console.log('error: ', err);
}
}
);
};
// checks if username already exists in the database and
// returns that user
let userExists = (username, cb) => {
// checks database based on input username
User.find({
name: username
}, (err, existingUser) => {
if (err) {
console.error('error in userExists: ', err);
} else {
// callback on the existing user if it exists
cb(existingUser);
}
})
};
//for login page-take in username and retrieve password from db
//on server side, bcrypt will be used to compare user input password to stored db password
//if they match user will be logged in, otherwise error message
let retrieveUserPassword = (username, callback) => {
User.find({name: username}, function(err, user) {
// If the user exists in the database
if (user.length > 0) {
// Then run the callback on that user's password
callback(null, user[0].password);
} else {
// Should probably send an alert or something...
console.log('user does not exist');
callback('user does not exist');
}
});
};
//for user page-display all existing trips for user after being logged in
let showUserTrips = (username, callback) => {
//first find corresponding user
User.findOne({name: username}, function (err, user) {
if(err || user === null) {
console.log('error: ', err);
callback(err);
} else {
//then find all trips for selected user
Trip.find({user: user.id}, function (err, trips) {
if(err) {
callback(err, null);
} else {
callback(null, trips);
}
});
}
});
};
let showTripEvents = (username, city, callback) => {
//first find corresponding user
User.findOne({name: username}, function (err, user) {
if(err || user === null) {
console.log('error: ', err);
callback(err);
} else {
//then find trip based on selected user and city
Trip.findOne({user: user.id, city: city}, function (err, trip) {
if(err || trip === null) {
console.log('error', err);
callback(err);
} else {
if(err) {
callback(err, null);
} else {
getTripEvents(trip.id, callback);
}
}
});
}
});
};
let showTripRestaurants = (username, city, callback) => {
//first find corresponding user
User.findOne({ name: username }, function (err, user) {
if (err || user === null) {
console.log('error: ', err);
callback(err);
} else {
//then find trip based on selected user and city
Trip.findOne({ user: user.id, city: city }, function (err, trip) {
if (err || trip === null) {
console.log('error', err);
callback(err);
} else {
if (err) {
callback(err, null);
} else {
getTripRestaurants(trip.id, callback);
}
}
});
}
});
};
//allows user to update whether trip is public, archived, and/or if the trip dates changed
//assumes username and city are known to obtain corresponding trip and update
let modifyTripDetails = (makePublic, makeArchived, username, fromDate, toDate, city, callback) => {
//first find corresponding user
User.findOne({name: username}, function (err, user) {
if(err) {
callback(err);
console.log('error: ', err);
}
//then find corresponding trip based on city for selected user
Trip.findOne({user: user.id, city: city}, function (err, trip) {
if(err) {
callback(err);
console.log('error', err);
}
//makePublic = makePublic || trip.isPublic;
makeArchived = makeArchived || trip.isArchived;
newFromDate = fromDate || trip.tripFromDate;
newToDate = toDate || trip.tripToDate;
Trip.update({id: trip.id},
{$set:
{
isPublic: makePublic,
isArchived: makeArchived,
tripFromDate: newFromDate,
tripToDate: newToDate
}
}, function (err) {
if (err) {
callback(err);
console.log('error: ', err);
} else {
callback();
}
}
);
});
});
};
getTripEvents = (tripID, callback) => {
Event.find({ trip: tripID }, function (err, events) {
if (err) {
callback(err, null);
} else {
callback(null, events);
}
});
}
getTripRestaurants = (tripID, callback) => {
Restaurant.find({ trip: tripID }, function (err, eatin) {
if (err) {
callback(err, null);
} else {
callback(null, eatin);
}
});
}
//removal function assumes we know the ID of the restaurant, event,
//or trip that we are wanting to remove from the database
let remove = (modelType, ID, callback) => {
if(modelType === 'restaurant') {
Restaurant.remove( {id: ID}, function (err) {
if(err) {
console.log('error: ',err);
callback(err)
} else {
callback()
}
});
} else if (modelType === 'event') {
Event.remove( {id: ID}, function (err) {
if(err) {
callback(err);
console.log('error: ',err);
callback(err)
} else {
callback()
}
});
} else if (modelType === 'trip') {
Trip.remove( {id: ID}, function (err) {
if(err) {
callback(err);
console.log('error: ',err);
callback(err)
} else {
callback()
}
});
} else {
console.log('must specify correct model type to remove');
callback(err);
}
};
//for home page-displays all existing public trips
let showAllPublicTrips = (callback) => {
Trip.find({isPublic: true}, function(err, trips) {
if(err) {
callback(err, null);
} else {
callback(null, trips);
}
});
};
module.exports.addNewTrip = addNewTrip;
module.exports.addRestaurantToTrip = addRestaurantToTrip;
module.exports.addEventToTrip = addEventToTrip;
module.exports.addNewUser = addNewUser;
module.exports.retrieveUserPassword = retrieveUserPassword;
module.exports.showUserTrips = showUserTrips;
module.exports.modifyTripDetails = modifyTripDetails;
module.exports.remove = remove;
module.exports.showAllPublicTrips = showAllPublicTrips;
module.exports.userExists = userExists;
module.exports.showTripEvents = showTripEvents;
module.exports.showTripRestaurants = showTripRestaurants;
module.exports.getTripRestaurants = getTripRestaurants;
module.exports.getTripEvents = getTripEvents;
// {restaurant: {
// R: { res_id: 16608481 },
// apikey: '<KEY>',
// id: '16608481',
// name: '<NAME>',
// url: 'https://www.zomato.com/kingston-se-sa/old-woolstore-kingston-se?utm_source=api_basic_user&utm_medium=api&utm_campaign=v2.1',
// location: {
// address: '11 Hansen St Kingston Sa, Kingston SE',
// locality: 'Kingston SE',
// city: 'Kingston SE',
// city_id: 1814,
// latitude: '-36.8287500000',
// longitude: '139.8503820000',
// zipcode: '5275',
// country_id: 14,
// locality_verbose: 'Kingston SE, Kingston SE'
// },
// switch_to_order_menu: 0,
// cuisines: 'Others',
// average_cost_for_two: 30,
// price_range: 3,
// currency: '$',
// offers: [],
// thumb: '',
// user_rating: {
// aggregate_rating: '2.8',
// rating_text: 'Average',
// rating_color: 'FFBA00',
// votes: '4'
// }
// }
// }<file_sep>/react-client/src/index.jsx
import React from 'react';
import ReactDOM from 'react-dom';
import $ from 'jquery'; //replace with axios
import { Provider } from 'react-redux';
import { createStore, applyMiddleware } from 'redux';
import thunk from 'redux-thunk';
import lightBaseTheme from 'material-ui/styles/baseThemes/lightBaseTheme';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
import getMuiTheme from 'material-ui/styles/getMuiTheme';
import reducer from './reducers/index.js';
import Home from './components/homePage.jsx';
import User from './components/userPage.jsx';
import Search from './components/searchPage.jsx';
import {
BrowserRouter as Router,
Route,
Switch
} from 'react-router-dom';
const Root = ({ store }) => (
<Provider store={store}>
<MuiThemeProvider theme={getMuiTheme(lightBaseTheme)}>
<Router>
<Switch>
<Route exact path="/" component={Home} />
<Route path = "/trips" component={User} />
<Route path="/plan" component={Search} />
</Switch>
</Router>
</MuiThemeProvider>
</Provider>
)
let store = createStore(reducer, applyMiddleware(thunk))
ReactDOM.render( <Root store={store} />
, document.getElementById('app'));
<file_sep>/react-client/src/components/restaurants.jsx
import React from 'react';
import { GridList, GridTile } from 'material-ui/GridList';
import IconButton from 'material-ui/IconButton';
import AddBorder from 'material-ui/svg-icons/content/add-circle-outline';
import Snackbar from 'material-ui/Snackbar';
//these are search results for restaurants
const Eatin = (props) => {
const styles = {
root: {
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'space-around',
// width: '50%'
},
gridList: {
width: '100%',
height: 800,
overflowY: 'auto',
},
anchor: {
color: 'white'
}
};
if (props.restaurants.length !== 0) {
return (
<div style={styles.root}>
<GridList
cellHeight={180}
style={styles.gridList}
>
{props.restaurants.map((food) => {
if (food.restaurant.featured_image) {
return (
<GridTile
key={food.restaurant.id}
title= {<a style = {styles.anchor} href = {food.restaurant.url} target = "_blank" >{food.restaurant.name}</a>}
subtitle= {`Cost for Two $${food.restaurant.average_cost_for_two}`}
actionIcon={<IconButton onClick={() => props.addFoodToTrip(food, props.user, props.city)}><AddBorder color="white" /></IconButton>}
>
<Snackbar open={props.foodSnackbar} message={'Restaurant has been added to your trip!'} autoHideDuration={3000} onRequestClose={props.onRequestClose}/>
<img src={food.restaurant.featured_image} alt='' />
</GridTile>
);
} else {
return(
<GridTile
key={food.restaurant.id}
title={food.restaurant.name}
subtitle={`Cost for Two $${food.restaurant.average_cost_for_two}`}
actionIcon={<IconButton onClick={() => props.addFoodToTrip(food, props.user, props.city)}><AddBorder color="white" /></IconButton>}
>
<Snackbar open={props.foodSnackbar} message={'Restaurant has been added to your trip!'} autoHideDuration={3000} onRequestClose={props.onRequestClose}/>
<img src='' />
</GridTile>
)
}
})}
</GridList>
</div>
);
} else {
return null;
}
}
export default Eatin;<file_sep>/react-client/src/reducers/index.js
const initialState = {
loading: false,
username: '',
password: '',
authenticated: false,
currentError: '', //is never actually used rn
publicTrips: [],
userTrips: [],
city: '',
tripFromDate: '',
tripToDate: '',
isPublic: false,
minToDate: {}, // so users cannot set a "to" date before the "from" date
deleteSnackbar: false,
publicSnackbar: false,
privateSnackbar: false,
eventQuery: '',
eventResults: [],
foodQuery: '',
foodResults: [],
eventSnackbar: false,
foodSnackbar: false,
activeTrip: {
status: false,
index: 0 //location of trip in 'userTrips'
}
};
//you should probably look up combine reducers to make this easier to look at
const reducer = function (state = initialState, action) { //if state is undefined, state becomes inital state
switch (action.type) {
case 'TOGGLE_LOADING' :
return Object.assign({}, state, {loading: !state.loading});
case 'SHOW_USER_TRIPS' :
return Object.assign({}, state, {userTrips: action.payload});
case 'SHOW_PUBLIC_TRIPS' :
return Object.assign({}, state, {publicTrips: action.payload});
case 'RESET_USER_TRIPS' :
return Object.assign({}, state, {userTrips: []});
case 'ACTIVATE_EVENT_SNACKBAR' :
return Object.assign({}, state, { eventSnackbar: true });
case 'DEACTIVATE_EVENT_SNACKBAR' :
return Object.assign({}, state, { eventSnackbar: false });
case 'ACTIVATE_FOOD_SNACKBAR' :
return Object.assign({}, state, { foodSnackbar: true });
case 'DEACTIVATE_FOOD_SNACKBAR' :
return Object.assign({}, state, { foodSnackbar: false });
case 'ACTIVATE_DELETE_SNACKBAR' :
return Object.assign({}, state, { deleteSnackbar: true });
case 'DEACTIVATE_DELETE_SNACKBAR' :
return Object.assign({}, state, { deleteSnackbar: false });
case 'ACTIVATE_PUBLIC_SNACKBAR' :
return Object.assign({}, state, { publicSnackbar: true });
case 'DEACTIVATE_PUBLIC_SNACKBAR' :
return Object.assign({}, state, { publicSnackbar: false });
case 'ACTIVATE_PRIVATE_SNACKBAR' :
return Object.assign({}, state, { privateSnackbar: true });
case 'DEACTIVATE_PRIVATE_SNACKBAR' :
return Object.assign({}, state, { privateSnackbar: false });
case 'UPDATE_EVENT_RESULTS' :
return Object.assign({}, state, {eventResults: action.payload})
case 'UPDATE_FOOD_RESULTS':
return Object.assign({}, state, {foodResults: action.payload})
case 'UPDATE_USERNAME':
return Object.assign({}, state, { username: action.payload })
case 'UPDATE_PASSWORD':
return Object.assign({}, state, { password: action.payload })
case 'AUTHEN':
return Object.assign({}, state, { authenticated: true, password: '' });
case 'LOGOUT':
return initialState;
case 'UPDATE_EVENT_QUERY':
return Object.assign({}, state, {eventQuery: action.payload});
case 'UPDATE_FOOD_QUERY':
return Object.assign({}, state, {foodQuery: action.payload});
case 'UPDATE_CITY' :
return Object.assign({}, state, { city: action.payload });
case 'UPDATE_TO_PUBLIC' :
return Object.assign({}, state, { isPublic: true });
case 'UPDATE_TO_PRIVATE' :
return Object.assign({}, state, { isPublic: false });
case 'UPDATE_TRIP_FROM_DATE':
return Object.assign({}, state, { tripFromDate: action.payload })
case 'UPDATE_TRIP_TO_DATE':
return Object.assign({}, state, { tripToDate: action.payload })
case 'SET_MIN_TO_DATE':
return Object.assign({}, state, { minToDate: action.payload })
case 'UPDATE_CITY':
return Object.assign({}, state, { city: action.payload })
case 'ACTIVATE' :
return Object.assign({}, state, { activeTrip: { status: true, index: action.payload }})
case 'DEACTIVATE' :
return Object.assign({}, state, { activeTrip: { status: false, index: 0 } })
case 'ERROR' :
return Object.assign({}, state, { currentError: action.payload })
case 'UPDATE_EVENT_QUERY':
return Object.assign({}, state, { eventQuery: action.payload });
case 'UPDATE_EVENT_RESULTS':
return Object.assign({}, state, { eventResults: action.payload })
case 'REFRESH_TRIP_EVENTS':
const oldTrip = state.userTrips[state.activeTrip.index];
const newTrip = Object.assign({}, oldTrip, { events: action.payload });
const newTrips = state.userTrips.slice();
newTrips.splice(state.activeTrip.index, 1, newTrip);
return Object.assign({}, state, { userTrips: newTrips });
case 'UPDATE_FOOD_QUERY':
return Object.assign({}, state, { foodQuery: action.payload });
case 'UPDATE_FOOD_RESULTS':
return Object.assign({}, state, { foodResults: action.payload })
case 'REFRESH_TRIP_EATIN':
const oldTripEatin = state.userTrips[state.activeTrip.index];
const newTripEatin = Object.assign({}, oldTripEatin, { eatin: action.payload });
const newTripsEatin = state.userTrips.slice();
newTripsEatin.splice(state.activeTrip.index, 1, newTripEatin);
return Object.assign({}, state, { userTrips: newTripsEatin });
case 'ACTIVATE_EVENT_SNACKBAR':
return Object.assign({}, state, { eventSnackbar: true });
case 'DEACTIVATE_EVENT_SNACKBAR':
return Object.assign({}, state, { eventSnackbar: false });
case 'ACTIVATE_FOOD_SNACKBAR':
return Object.assign({}, state, { foodSnackbar: true });
case 'DEACTIVATE_FOOD_SNACKBAR':
return Object.assign({}, state, { foodSnackbar: false });
case 'ACTIVATE_DELETE_SNACKBAR':
return Object.assign({}, state, { deleteSnackbar: true });
case 'DEACTIVATE_DELETE_SNACKBAR':
return Object.assign({}, state, { deleteSnackbar: false });
case 'ACTIVATE_PUBLIC_SNACKBAR':
return Object.assign({}, state, { publicSnackbar: true });
case 'DEACTIVATE_PUBLIC_SNACKBAR':
return Object.assign({}, state, { publicSnackbar: false });
default:
return state; //if unrecognized action type nothing happens
}
}
export default reducer;<file_sep>/react-client/src/components/signup.jsx
import React from 'react';
import TextField from 'material-ui/TextField';
import Dialog from 'material-ui/Dialog';
import FlatButton from 'material-ui/FlatButton';
import RaisedButton from 'material-ui/RaisedButton';
import * as theme from './homePage.jsx'; // * does all named exports from that file
import * as loginStyle from './login.jsx';
import * as searchStyle from './searchPage.jsx';
import lightBaseTheme from 'material-ui/styles/baseThemes/lightBaseTheme';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
import getMuiTheme from 'material-ui/styles/getMuiTheme';
import { cyan50, cyan100, cyan200, cyan300, cyan400, cyan500, cyan600, cyan700, cyan800, cyan900 } from 'material-ui/styles/colors';
class Signup extends React.Component {
constructor (props) {
super(props);
this.state = {
open: false
}
}
submit (event) {
event.preventDefault();
this.props.signup(this.props.username, this.props.password);
this.setState({ open: false });
this.props.forward();
}
changeUsername (event) {
this.props.updateUsername(event.target.value);
}
changePassword (event) {
this.props.updatePassword(event.target.value);
}
handleOpen () {
this.setState({ open: true });
};
handleClose () {
this.setState({ open: false });
};
render () {
const actions = [
<FlatButton
label="Join"
primary={true}
keyboardFocused={true}
onClick={this.submit.bind(this)}
style={loginStyle.styles.actionButtons}
/>,
<FlatButton
label="Cancel"
onClick={this.handleClose.bind(this)}
style={loginStyle.styles.actionButtons}
/>
];
return (
<MuiThemeProvider muiTheme={theme.muiTheme}>
<div>
<RaisedButton
label="Join"
onClick={this.handleOpen.bind(this)}
style={searchStyle.styles.navButtons}
/>
<Dialog
title="Sign Up"
actions={actions}
modal={false}
open={this.state.open}
onRequestClose={this.handleClose.bind(this)}>
<label>Username:</label>
<TextField id = "SUUsername" onChange = {this.changeUsername.bind(this)} />
<br/>
<label>Password:</label>
<TextField id = 'SUPassword' type="password" onChange = {this.changePassword.bind(this)} />
</Dialog>
</div>
</MuiThemeProvider>
)
}
}
export default Signup;<file_sep>/_README.md
# TripTeaze
> Pithy project description
## Usage
> Some usage instructions
## Requirements
- Node 0.10.x
## Development
npm run react-dev
npm run server-dev
### Installing Dependencies
From within the root directory:
npm install
you'll need a gitignored config.js file in the root directory for API keys: 'token' for eventbrite, 'zomatoKey' for zomato and 'mongo' a url to an mlab mongodb. Get your own keys or ask us for ours.
The heroku deployment uses process.env variables for these EVENTBRITE, ZOMATO and MONGODB_URI respectively, set those up on heroku
### Roadmap
-new found bug, when you delete events/activities on the userpage it deletes it from the wrong trip
-when you try and delete your last trip it does delete but it doesn't disappear right away
-more search options
-implement archiving of your own trips
-sorting of your trips
-comment/review system
-friending
-instagram api 4 more pics
-getting session stuff to work?
-passport
-anon user creating trips
-once trip dates have changed, have any events outside of the new date range removed
-Use archived parameter of trips somehow
-Make page transitions better
## Contributing
See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines.
<file_sep>/react-client/src/actions/index.js
import axios from 'axios';
////////////////////////////////HOME PAGE STUFF\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
const loading = () => ({type: 'TOGGLE_LOADING'});
const setUserTrips = (trips) => ({ type: 'SHOW_USER_TRIPS', payload: trips});
const setPublicTrips = (trips) => ({ type: 'SHOW_PUBLIC_TRIPS', payload: trips});
export const updateUsername = (username) => ({ type: 'UPDATE_USERNAME', payload: username });
export const updatePassword = (password) => ({ type: 'UPDATE_PASSWORD', payload: password });
export const login = (username, password) => {
return (dispatch) => {
return axios({
method: 'get',
url: '/login',
params: {
username: username,
password: <PASSWORD>
}
}).then(
results => {
if (results.data.error) {
alert(results.data.message);
} else {
dispatch(authenticate())
dispatch(fetchTrips(username));
}
},
error => { console.log('error', error); dispatch(badStuff(error)) }
);
}
};
export const signup = (username, password) => {
return (dispatch) => {
return axios({
method: 'post',
url: '/signup',
data: {
username: username,
password: <PASSWORD>
}
}).then(
results => {
if (results.data.error) {
alert(results.data.message);
} else {
dispatch(authenticate());
}
},
error => dispatch(badStuff(error))
);
};
}
export const authenticate = () => ({ type: 'AUTHEN' });
export const logOut = () => {
return (dispatch) => {
return axios({
method: 'get',
url: 'logout',
}).then(
results => {
dispatch(deauthenticate());
}
)
}
}
export const deauthenticate = () => ({ type: 'LOGOUT' });
export const badStuff = (error) => ({type: 'ERROR', payload: error});
/////////////////////////////SEARCH PAGE STUFF \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
/*****************************MAKE A TRIP *************************************/
export const fetchTrips = (param) => {
return (dispatch) => {
dispatch(loading());
return axios({
method: 'get',
url: '/trips',
params: {
search: param
}
}).then(
results => {
if (param === 'public') {
dispatch(setPublicTrips(results.data.trips));
dispatch(loading());
} else {
dispatch(setUserTrips(results.data.trips));
dispatch(loading());
}
},
error => dispatch(badStuff(error))
);
}
};
export const updateTripDates = (user, city, fromDate, toDate) => {
return dispatch => {
return axios({
method: 'patch',
url: '/plan',
data: {
user: user,
tripCity: city,
tripFromDate: fromDate,
tripToDate: toDate
}
}).then(
results => {
dispatch(updateFromDate(''));
dispatch(updateToDate(''));
dispatch(setMinToDate({}));
},
error => dispatch(badStuff(error))
)
}
}
export const updateFromDate = (date) => ({ type: 'UPDATE_TRIP_FROM_DATE', payload: date });
export const updateToDate = (date) => ({ type: 'UPDATE_TRIP_TO_DATE', payload: date });
export const setMinToDate = (date) => ({ type: 'SET_MIN_TO_DATE', payload: date });
export const updateCity = (city) => ({ type: 'UPDATE_CITY', payload: city });
export const makeNewTrip = (username, city, index, fromDate, toDate) => {
return (dispatch) => {
return axios({
method: 'post',
url: '/trips',
data: {
tripUser: username,
tripCity: city,
tripFromDate: fromDate,
tripToDate: toDate
}
}).then(
results => {
dispatch(activateTrip(index));
dispatch(fetchTrips(username));
dispatch(makePrivate());
dispatch(updateCity(''));
dispatch(updateToDate(''));
dispatch(setMinToDate({}));
dispatch(updateFromDate(''));
},
error => dispatch(badStuff(error))
);
};
}
export const activateTrip = (tripIndex) => {
//return { type: 'ACTIVATE', payload: tripIndex }
return (dispatch) => {
dispatch(updateFoodResults([]));
dispatch(updateEventResults([]));
dispatch(actuallyActivate(tripIndex));
}
};
const actuallyActivate = (tripIndex) => ({ type: 'ACTIVATE', payload: tripIndex });
export const deactivate = () => {
return (dispatch) => {
dispatch(updateFoodResults([]));
dispatch(updateEventResults([]));
dispatch(actuallyDeactivate());
}
};
const actuallyDeactivate = () => ({ type: 'DEACTIVATE' });
/***************************** EVENTS *************************************/
export const updateEventQuery = (query) => ({ type: 'UPDATE_EVENT_QUERY', payload: query });
export const searchEvents = (city, query, fromDate, toDate) => {
return (dispatch) => {
return axios({
method: 'post',
url: '/events',
data: {
tripCity: city,
eventQuery: query,
tripFromDate: fromDate,
tripToDate: toDate
}
}).then(
results => (dispatch(updateEventResults(results.data))),
error => dispatch(badStuff(error))
);
};
}
const updateEventResults = (searchResults) => ({ type: 'UPDATE_EVENT_RESULTS', payload: searchResults });
export const addEventToTrip = (event, username, city) => {
return (dispatch) => {
return axios({
method: 'post',
url: '/events/add',
data: {
tripEvent: event,
tripUser: username,
tripCity: city
}
}).then(
results => {
dispatch(fetchEventsFromTrip(username, city));
dispatch(activateEventSnackbar());
},
error => dispatch(badStuff(error))
);
};
}
export const fetchEventsFromTrip = (username, city) => {
//dispatch({ type: 'LOADING' });
return (dispatch) => {
return axios({
method: 'get',
url: '/events',
params: {
tripUser: username,
tripCity: city
}
}).then(
results => {dispatch(setTripEvents(results.data.events))},
error => {dispatch(badStuff(err))}
)
}
}
const setTripEvents = (events) => ({ type: 'REFRESH_TRIP_EVENTS', payload: events });
export const activateEventSnackbar = () => ({type: 'ACTIVATE_EVENT_SNACKBAR'});
export const deactivateEventSnackbar = () => ({type: 'DEACTIVATE_EVENT_SNACKBAR'});
/***************************** FOOD *************************************/
export const updateFoodQuery = (query) => ({ type: 'UPDATE_FOOD_QUERY', payload: query });
export const searchForFood = (city, query) => {
return (dispatch) => {
return axios({
method: 'post',
url: '/foods',
data: {
tripCity: city,
foodQuery: query
}
}).then(
results => {
dispatch(updateFoodResults(results.data.foods))},
error => dispatch(badStuff(error))
)
}
}
const updateFoodResults = (searchResults) => ({ type: 'UPDATE_FOOD_RESULTS', payload: searchResults})
export const addFoodToTrip = (food, username, city) => {
return (dispatch) => {
return axios({
method: 'post',
url: '/foods/add',
data: {
tripFood: food,
tripUser: username,
tripCity: city
}
}).then(
results => {
dispatch(fetchFoodFromTrip(username, city));
dispatch(activateFoodSnackbar());
},
error => dispatch(badStuff(error))
);
};
}
export const fetchFoodFromTrip = (username, city) => {
//dispatch({ type: 'LOADING' });
return (dispatch) => {
return axios({
method: 'get',
url: '/foods',
params: {
tripUser: username,
tripCity: city
}
}).then(
results => {
dispatch(setTripEatin(results.data.foods)) },
error => { dispatch(badStuff(err)) }
)
}
}
const setTripEatin = (foods) => ({ type: 'REFRESH_TRIP_EATIN', payload: foods });
export const activateFoodSnackbar = () => ({type: 'ACTIVATE_FOOD_SNACKBAR'});
export const deactivateFoodSnackbar = () => ({type: 'DEACTIVATE_FOOD_SNACKBAR'});
//////////////////////////////USER PAGE STUFF \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
export const deleteTrip = (user, trip) => {
return (dispatch) => {
return axios ({
method: 'patch',
url: '/trips',
data: {
username: user,
tripID: trip.id
}
}).then (
results => {
dispatch(fetchTrips(user));
dispatch(activateDeleteSnackbar());
},
error => dispatch(badStuff(error))
)
}
}
export const deleteEvent = (event, username, city) => {
console.log('delete!')
return (dispatch) => {
return axios ({
method: 'post',
url: '/events/remove',
data: {
eventID: event.id
}
}).then (
results => { dispatch(fetchEventsFromTrip(username, city)) },
error => {dispatch(badStuff(error))}
);
};
}
export const deleteFood = (food, username, city) => {
console.log('delete!')
return (dispatch) => {
return axios({
method: 'post',
url: '/foods/remove',
data: {
foodID: food.id
}
}).then(
results => { dispatch(fetchFoodFromTrip(username, city)) },
error => { dispatch(badStuff(error)) }
);
};
}
export const toggleTripStatus = (user, trip) => {
return dispatch => {
return axios ({
method: 'patch',
url: '/trips',
data: {
user: user,
tripCity: trip.city,
public : !trip.isPublic
}
}).then (
results => {
dispatch(fetchTrips(user));
trip.isPublic === false ? dispatch(activatePublicSnackbar()) : dispatch(activatePrivateSnackbar());
},
error => dispatch(badStuff(error))
)
}
}
export const makePublic = () => ({type: 'UPDATE_TO_PUBLIC'});
export const makePrivate = () => ({type: 'UPDATE_TO_PRIVATE'});
export const activateDeleteSnackbar = () => ({type: 'ACTIVATE_DELETE_SNACKBAR'});
export const deactivateDeleteSnackbar = () => ({type: 'DEACTIVATE_DELETE_SNACKBAR'});
export const activatePublicSnackbar = () => ({type: 'ACTIVATE_PUBLIC_SNACKBAR'});
export const deactivatePublicSnackbar = () => ({type: 'DEACTIVATE_PUBLIC_SNACKBAR'});
export const activatePrivateSnackbar = () => ({type: 'ACTIVATE_PRIVATE_SNACKBAR'});
export const deactivatePrivateSnackbar = () => ({type: 'DEACTIVATE_PRIVATE_SNACKBAR'});
//ACTION_NAME must correspond with reducer switch option
// complex action example w/ async
// export const search = (searchTerm) => {
// return (dispatch) => {
// return makeAnAPIcall(searchTerm).then(
// searchResults => dispatch(updateResults(searchResults)),
// error => dispatch(badStuff(error))
// );
// };
// }
// const updateResults = (searchResults) => ({type: UPDATE_RESULTS, results: searchResults});
// const badStuff = (error) => ({type: ASYNC_ERROR, error: error});
// NOTES
// -dispatch is a Redux keyword for triggering actions
// -it's actuallys being used under the hood but react-redux's mapDispatchToProps makes us
// not have to say dispatch when we usually call action functions
// -actions are dispatched from the front-end (or here because of our middleware to handle asynchronicity)
// the return statement is automatically passed onto the reducer under the parameter action
// -the capital names are action types, commonly reducers use this key in a switch to decide what to do
// with the other parameters associated with the action (to modify the state)
<file_sep>/server/index.js
const express = require('express');
const bodyParser = require('body-parser');
const bcrypt = require('bcrypt');
const session = require('express-session');
const db = require('../database-mongo/index.js');
const eventbrite = require('../APIhelper/eventbrite.js');
const zomato = require('../APIhelper/zomatoHelper.js')
const path = require('path');
const moment = require('moment');
const app = express();
app.use(bodyParser.json());
app.use(session({
secret: 'shhhhh af',
resave: false,
saveUnitialized: true
}));
app.use(express.static(__dirname + '/../react-client/dist'));
/***********************************************************************/
/* login */
app.checkPassword = (userName, pw, checkPw) => {
let match = false;
let unhashedPw = bcrypt.compareSync(pw, checkPw);
if (unhashedPw) {
match = true;
}
return match;
}
app.get('/login', (req, res) =>{
let userName = req.query.username
let password = req.query.password
db.retrieveUserPassword(userName, (err, userPw) => {
if (err) {
res.status(200).json({error: true, message: 'Sorry, we didn\'t recognize that username. Please try again!'});
} else if (app.checkPassword(userName, password, userPw)) {
req.session.loggedIn = true;
res.status(200).end()
} else {
console.log('Unmatching username and password')
res.status(200).json({error: true, message: 'Sorry, username and password do not match! Please try again!'});
}
})
})
app.get('/logout', (req, res) => {
req.session.destroy((err) => {
if (err) {
throw err;
res.status(500).send(err)
}
})
res.end();
});
app.get('/plan', (req, res) => {
res.sendFile(path.join(__dirname, '/../react-client/dist', 'index.html'));
});
/*************************** SIGN UP STUFF ***************************/
// Sign up
app.post('/signup', (req, res) => {
let username = req.body.username;
let password = req.body.password;
// Checks if the username already exists in the db
db.userExists(username, (existingUser) => {
// If the username already exists
if (existingUser.length > 0) {
console.log('Username already exists!');
// Redirect to the signup page
res.status(200).json({error: true, message: 'Sorry! username already in use! Please pick a different one!'})
// Else if new user
} else {
// Hash the password
bcrypt.hash(password, 10, (err, hash) => {
if (err) {
console.error('Error in hash password: ', err);
res.status(200).json({error: true, message: 'Sorry! unknown error on our end! Please try again'});
} else {
// Store the new user/hash in the db
db.addNewUser(username, hash);
console.log(`User '${username}' added to database`);
res.status(200).end()
}
});
}
});
});
// Creates new session after new user is added to the database
const createSession = (req, res, newUser) => {
return req.session.regenerate(() => {
req.session.user = newUser;
// Redirects to home page
res.redirect('/');
});
}
/*************************** TRIP STUFF ***************************/
app.get('/trips', (req, res) => {
const type = req.query.search; // right now tailored for public trips but can be adapted for user trips as well
if (type === 'public') {
db.showAllPublicTrips(function(err, data) {
if (err || !data) {
res.status(500).send(err);
} else {
getTripsEvents(data, function (err, tripsEvents) {
if (err) {
res.status(500).send(err);
} else {
res.status(200).json({ trips: tripsEvents });
}
});
}
});
} else if (!type) {
res.sendFile(path.join(__dirname, '/../react-client/dist', 'index.html'));
} else {
db.showUserTrips(type, function(err, data) {
if (err || !data) {
res.status(500).send(err);
} else {
getTripsEvents(data, function (err, fullTrips) {
if (err) {
res.status(500).send(err);
} else {
res.status(200).json({ trips: fullTrips });
}
});
}
});
}
});
getTripsEvents = (trips, callback) => {
let fullTrips = [];
numFinished = 0;
for (let i = 0; i < trips.length; i++) {
fullTrips.push(Object.assign({}, {
id : trips[i].id,
city: trips[i].city,
isArchived: trips[i].isArchived,
isPublic: trips[i].isPublic,
fromDate: trips[i].tripFromDate,
toDate: trips[i].tripToDate
}));
const tripID = trips[i].id
db.getTripEvents(tripID, function (err, events) {
fullTrips[i].events = events;
db.getTripRestaurants(tripID, function (err, food) {
fullTrips[i].eatin = food;
numFinished++;
if (numFinished === trips.length) {
callback(null, fullTrips);
}
});
});
}
}
app.post('/trips', (req, res) => {
const user = (req.body.tripUser);
const city = (req.body.tripCity);
const fromDate = (req.body.tripFromDate);
const toDate = (req.body.tripToDate);
db.addNewTrip(user, city, fromDate, toDate, function(err, data) {
if (err) {
console.log(err);
res.status(500).send(err);
} else {
res.status(200).json({ city: data.city });
}
});
});
app.patch('/trips', (req, res) => {
if (req.body.public !== undefined) {
db.modifyTripDetails(req.body.public, null, req.body.user, null, null, req.body.tripCity, function(err, data) {
if (err) {
res.status(500).send(err);
} else {
res.sendStatus(204);
}
});
} else {
db.remove('trip', req.body.tripID, function(err, data) {
if (err) {
res.status(500).send(err);
} else {
res.status(204).end();
}
})
}
});
app.patch('/plan', (req, res) => {
console.log('-----> server', req.body);
db.modifyTripDetails(null, null, req.body.user, req.body.tripFromDate, req.body.tripToDate, req.body.tripCity, function(err, data) {
if (err) {
res.status(500).send(err);
} else {
res.sendStatus(204);
}
});
});
/******************************** Search - Events *****************************/
app.post('/events', function (req, res) {
const city = req.body.tripCity;
const query = req.body.eventQuery;
const toDate = req.body.tripToDate;
const fromDate = req.body.tripFromDate;
eventbrite.searchEvents(query, city, fromDate, toDate, (err, data) => {
if(err) {
res.status(500).send(err);
} else {
res.status(200);
res.status(200).json(data);
}
});
});
app.post('/events/remove', function (req, res) {
db.remove('event', req.body.eventID, function(err) {
if(err) {
res.status(500).send(err);
} else {
res.status(200).end();
}
});
});
app.post('/events/add', function (req,res) {
const event = req.body.tripEvent;
const user = req.body.tripUser;
const city = req.body.tripCity;
db.addEventToTrip(event, user, city, function(err) {
if (err) {
console.log(err);
res.status(500).send(err);
} else {
res.status(201).end();
}
});
});
app.get('/events', (req, res) => {
const user = req.query.tripUser;
const city = req.query.tripCity;
db.showTripEvents(user, city, function(err, data) {
if (err) {
res.status(500).end(err);
} else {
res.status(200).json({ events: data });
}
});
});
/********************************* Search - Foods ***************************/
app.post('/foods', (req, res) => {
let city = req.body.tripCity;
let searchFood = req.body.foodQuery;
zomato.searchForCityId( city, ( err, data ) => {
if (err) {
res.status(500).send(err);
} else {
let cityId = data
zomato.searchForFoods( cityId, searchFood, (err, result) => {
if (err) {
res.status(500).send(err);
} else {
res.status(200).json({foods: result})
}
});
}
});
});
app.post('/foods/remove', function (req, res) {
db.remove('restaurant', req.body.foodID, function (err) {
if (err) {
res.status(500).send(err);
} else {
res.status(200).end();
}
});
});
app.post('/foods/add', function (req, res) {
const food = req.body.tripFood;
const user = req.body.tripUser;
const city = req.body.tripCity;
db.addRestaurantToTrip(food, user, city, function (err) {
if (err) {
res.status(500).send(err);
} else {
res.status(201).end();
}
});
});
app.get('/foods', (req, res) => {
const user = req.query.tripUser;
const city = req.query.tripCity;
db.showTripRestaurants(user, city, function (err, data) {
if (err) {
res.status(500).send(err);
} else {
res.status(200).json({ foods: data });
}
});
});
+app.get('/*', function (req, res) {
res.sendFile(path.join(__dirname, 'build', 'index.html'));
});
/****************************************************************************/
const port = process.env.PORT || 3000;
app.listen(port, function() {
console.log('listening on port 3000!');
});
<file_sep>/react-client/src/components/searchPage.jsx
import React from 'react';
import ReactDOM from 'react-dom';
import { Link } from 'react-router-dom';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import moment from 'moment';
import RaisedButton from 'material-ui/RaisedButton';
import Paper from 'material-ui/Paper';
import TextField from 'material-ui/TextField';
import SelectField from 'material-ui/SelectField';
import MenuItem from 'material-ui/MenuItem';
import FlatButton from 'material-ui/FlatButton';
import Drawer from 'material-ui/Drawer';
import Dialog from 'material-ui/Dialog';
import AppBar from 'material-ui/AppBar';
import IconButton from 'material-ui/IconButton';
import NavigationClose from 'material-ui/svg-icons/navigation/close';
import DatePicker from 'material-ui/DatePicker';
import Toggle from 'material-ui/Toggle';
import { Card, CardActions, CardHeader, CardMedia, CardTitle, CardText } from 'material-ui/Card';
import lightBaseTheme from 'material-ui/styles/baseThemes/lightBaseTheme';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
import getMuiTheme from 'material-ui/styles/getMuiTheme';
import { cyan50, cyan100, cyan200, cyan300, cyan400, cyan500, cyan600, cyan700, cyan800, cyan900 } from 'material-ui/styles/colors';
import * as theme from './homePage.jsx'; // * does all named exports from that file
import * as tripStyle from './trip.jsx';
import * as actions from '../actions/index.js';
import Activity from './activity.jsx';
import UserPage from './userPage.jsx';
import Events from './events.jsx';
import Signup from './signup.jsx';
import Login from './login.jsx';
import Eatin from './restaurants.jsx';
export const styles = {
activityContainer: {
display: 'inline-block',
marginBottom: '1%',
marginLeft: '2%',
marginTop: '1%',
verticalAlign: 'top',
width: '47%'
},
activityTitle: {
backgroundColor: '#f9f9f9',
color: cyan800,
fontSize: 20,
fontWeight: 'bold',
padding: '1%',
margin: '2%',
textAlign: 'left'
},
cardTitle: {
fontSize: 15,
fontWeight: 'bold',
lineHeight: '0 !important'
},
createTripCard: {
display: 'inline-block',
marginLeft: '19%',
marginTop: '1%',
width: '30%',
},
existingTripsCard: {
display: 'inline-block',
marginTop: '1%',
marginLeft: '2%',
verticalAlign: 'top',
width: '30%',
},
navButtons: {
marginRight: '1em',
marginLeft: '1em'
},
searchBar: {
paddingLeft: '5%',
paddingRight: '5%'
},
searchInput: {
width: '80%'
},
searchResults: {
margin: '2%'
},
tripDatesCard: {
display: 'flex',
flexFlow: 'column wrap'
},
tripDatesHeaders: {
backgroundColor: '#f9f9f9',
color: cyan900,
fontSize: 15,
fontWeight: 'bold',
padding: '1%'
},
welcomeUser: {
marginTop: '1%'
}
}
//this is a terrible nightmare component that should be overhauled
class SearchPage extends React.Component {
constructor (props) {
super(props);
if (props.store.userTrips.length !== 0 && props.store.activeTrip.status) {
this.state = {
open: true,
activeCity: props.store.userTrips[props.store.activeTrip.index].city,
dropdown: props.store.userTrips[props.store.activeTrip.index].city,
activeFromDate: props.store.userTrips[props.store.activeTrip.index].fromDate,
activeToDate: props.store.userTrips[props.store.activeTrip.index].toDate,
editDatesOpen: false,
tempFromDate: props.store.userTrips[props.store.activeTrip.index].fromDate,
tempToDate: props.store.userTrips[props.store.activeTrip.index].toDate
}
} else if (props.store.userTrips.length !== 0) {
this.state = {
open: false,
activeCity: props.store.userTrips[props.store.activeTrip.index].city,
dropdown: 0,
activeFromDate: props.store.userTrips[props.store.activeTrip.index].fromDate,
activeToDate: props.store.userTrips[props.store.activeTrip.index].toDate,
editDatesOpen: false,
tempFromDate: props.store.userTrips[props.store.activeTrip.index].fromDate,
tempToDate: props.store.userTrips[props.store.activeTrip.index].toDate
}
} else {
this.state = {
open: false,
dropdown: 0,
editDatesOpen: false
}
}
}
//works for input box and dropdown menu
updateCity (event, index, value) {
let store = this.props.store;
let actions = this.props.actions;
if (value && index !== 0) { //is dropdown
this.setState({dropdown: value, open: true});
this.setState({activeCity: store.userTrips[index - 1].city});
this.setState({activeFromDate: store.userTrips[index - 1].fromDate, activeToDate: store.userTrips[index - 1].toDate});
actions.updateCity('');
actions.activateTrip(index - 1);
} else if (index === 0) { //top option of dropdown
this.setState({ dropdown: value });
actions.deactivate();
} else { //is textbox
actions.updateCity(this.formatCity(event.target.value));
}
}
//makes cities always only have a capital letter as the first char of each word
formatCity (city) {
const words = city.split(' ');
let newWords = [];
for (let word of words) {
word = word.slice(0,1).toUpperCase().concat(word.slice(1).toLowerCase());
newWords.push(word);
}
return newWords.join(' ');
}
//creates a new trip from the input box
submit (event) {
let store = this.props.store;
let actions = this.props.actions;
event.preventDefault();
if (store.authenticated) {
if (store.city !== '' && store.tripFromDate !== '' && store.tripToDate !== '') { //checks for required entries
actions.makeNewTrip(store.username, store.city, store.userTrips.length, store.tripFromDate, store.tripToDate);
this.setState({ activeCity: store.city, open: true, activeFromDate: store.tripFromDate, activeToDate: store.tripToDate});
} else {
window.alert('Please make sure to fill in the dates and city name!');
}
}
};
/***************************** Event - search **********************************/
updateEventQuery(event) {
this.props.actions.updateEventQuery(event.target.value)
};
submitEventQuery (event) {
let store = this.props.store;
let actions = this.props.actions;
event.preventDefault(); //prevent refresh, might not need this anymore
if ((store.activeTrip.status || store.city) && store.eventQuery) {
let city = store.activeTrip.status ? this.state.activeCity : store.city; //lets you maybe search on a city without creating a trip
actions.searchEvents(this.state.activeCity, store.eventQuery, this.state.activeFromDate, this.state.activeToDate);
} else {
window.alert('Please select a city and search terms first!');
}
};
/***************************** Food - search **********************************/
updateFoodQuery (event) {
this.props.actions.updateFoodQuery(event.target.value)
};
submitFoodQuery (event) {
let store = this.props.store;
event.preventDefault();
if(store.activeTrip.status || store.city) {
let city = store.activeTrip.status ? this.state.activeCity : store.city;
this.props.actions.searchForFood(this.state.activeCity, store.foodQuery)
} else {
window.alert('Please select a city and search terms first!')
}
};
/***************************** MESSAGE *****************************/
render () {
let message = '';
let messageEvents = '';
let messageFood = '';
let activeCity = this.state.activeCity;
let store = this.props.store; //the redux store
let actions = this.props.actions;
if (!store.activeTrip.status) {
message = 'Pick a city for your trip!';
messageEvents = 'First pick a city before searching events!';
messageFood = '';
} else {
message = `You\'re going to ${activeCity}! \n Or plan a different trip: `;
messageEvents = `Type a keyword to find events in ${activeCity}!`;
messageFood= `Or search for food in ${activeCity}!`;
}
/*************************** DATE SELECTION STUFF ***************************/
// all this probably shouldn't be in the render method but oh well
let today = new Date();
let formatDate = (date) => {
// Dates need to be in YYYY-MM-DD format
return moment(date).format('YYYY-MM-DDT00:00:00.000Z'); //<-- moment is cool library
}
let updateFromDate = (event, date) => { //pulls from material date pickker
let fromDate;
if (date !== '') {
fromDate = formatDate(date);
} else {
fromDate = '';
}
actions.updateFromDate(fromDate); //update store
// This sets minimum "To" date based on the current "From" date in the correct date format
if (date !== '') {
actions.setMinToDate(date);
} else {
actions.setMinToDate({});
}
this.setState({
activeFromDate: fromDate
});
}
const updateToDate = (event, date) => {
let toDate;
if (date !== '') {
toDate = formatDate(date);
} else {
toDate = '';
}
actions.updateToDate(toDate);
this.setState({
activeToDate: toDate
});
};
//some of this is just trying to keep this component's state and the redux store in sync which is
//kind of bad form for redux for this component to have so much stuff
const submitEditDates = () => {
let city = this.state.activeCity;
let newFromDate = store.tripFromDate;
let newToDate = store.tripToDate;
// updates trip dates in the db
actions.updateTripDates(store.username, city, newFromDate, newToDate);
store.userTrips[store.activeTrip.index].fromDate = store.tripFromDate;
store.userTrips[store.activeTrip.index].toDate = store.tripToDate;
this.setState({
activeFromDate: newFromDate,
activeToDate: newToDate,
tempFromDate: newFromDate,
tempToDate: newToDate,
editDatesOpen: false
});
};
/*************************** EXISTING TRIPS DROPDOWN ***************************/
const dropdown = () => {
if (store.authenticated) { //logged in
return (
<div>
<SelectField
value={this.state.dropdown}
onChange = {this.updateCity.bind(this)}
>
<MenuItem primaryText = 'Make a New Trip' />
{store.userTrips.map((trip, index) =>
<MenuItem
key = {index}
value = {trip.city}
primaryText = {trip.city}
/>
)}
</SelectField>
<br/>
<RaisedButton
onClick={() => (this.setState({ open: !this.state.open }))}
label='Show Details'
disabled={!store.activeTrip.status}
/>
</div>
)
} else {
return (
<div>Please login to view your current trips!</div>
)
}
}
/*************************** TRIP DETAILS SIDEBAR ***************************/
//edit a trip's dates modal buttons
const editDateActions = [
<FlatButton
label="Submit"
primary={true}
keyboardFocused={true}
onClick={submitEditDates}
/>,
<FlatButton
label="Cancel"
primary={true}
style={{marginLeft: '2%'}}
onClick={() => {
updateFromDate(null, '');
updateToDate(null, '');
this.setState({
editDatesOpen: false,
});
}}
/>
];
const drawer = () => {
if (store.activeTrip.status) {
let activeTrip = store.userTrips[store.activeTrip.index];
if (activeTrip) {
let fromDate = moment(activeTrip.fromDate).format('MM/DD/YY');
let toDate = moment(activeTrip.toDate).format('MM/DD/YY');
return (
<Drawer
width={400}
openSecondary={true}
open={this.state.open}
>
<AppBar
title={this.state.activeCity}
iconElementLeft={
<IconButton
onClick={() => (this.setState({ open: false }))}
>
<NavigationClose />
</IconButton>}
/>
<div style={{
margin: '2%',
fontSize: 25,
fontWeight: 'bold',
textAlign: 'center'
}}>
{fromDate} - {toDate}
<div style={{display: 'inline-block', marginLeft: '1%'}}>
<FlatButton
label="Edit"
onClick={() => this.setState({editDatesOpen: true})}
backgroundColor='transparent'
hoverColor='#f9f9f9'
/>
<Dialog
title="Modify Trip Dates"
actions={editDateActions}
modal={false}
open={this.state.editDatesOpen}
onRequestClose={() => this.setState({editDatesOpen: false})}
>
<div
style={{color: cyan800}}
>Trip Dates for {this.state.activeCity}:
</div>
<div style={{
color: cyan900,
fontWeight: 'bold'
}}>{fromDate} - {toDate}</div>
<br/><br/>
Edit your trip dates below:
<DatePicker
floatingLabelText="From"
autoOk={true}
onChange={updateFromDate}
minDate={today}
/>
<DatePicker
floatingLabelText="To"
autoOk={true}
onChange={updateToDate}
minDate={store.minToDate}
/>
</Dialog>
</div>
</div>
{showActivityDiv('event', activeTrip)}
<div style={tripStyle.styles.tripDetails}>
{activeTrip.events.map((event, index) =>
(<Activity
key={index}
sidebar = 'true'
type='event'
activity={event}
user={store.username}
city={this.state.activeCity}
deleteEvent={actions.deleteEvent}
/>))}
</div>
{showActivityDiv('eatin', activeTrip)}
<div style={tripStyle.styles.tripDetails}>
{activeTrip.eatin.map((eatin, index) =>
(<Activity
key={index}
sidebar='true'
type='food'
user={store.username}
city={this.state.activeCity}
deleteFood={actions.deleteFood}
activity={eatin}
/>))}
</div>
</Drawer>
);
}
}
}
//differ for logged in and out users
const navLinks = () => {
if (store.authenticated) {
return (
<div style={theme.styles.navLinks}>
<Link to='/'>
<RaisedButton
label="Home"
/>
</Link>
<Link to='trips'>
<RaisedButton
label="My Trips"
disabled={!store.authenticated}
style={styles.navButtons}
/>
</Link>
<Link to='/'>
<RaisedButton
disabled={!store.authenticated}
onClick={actions.logOut}
label='Log Out'
/>
</Link>
</div>
);
} else {
return (
<div style={theme.styles.navLinks}>
<Link to='/'>
<RaisedButton
label="Home"
/>
</Link>
<Signup
signup={actions.signup}
username={store.username}
password={<PASSWORD>}
updateUsername={actions.updateUsername}
updatePassword={actions.updatePassword}
/>
<Login
login={actions.login}
username={store.username}
password={<PASSWORD>}
updateUsername={actions.updateUsername}
updatePassword={actions.updatePassword}
/>
</div>
)
}
}
/************************* ACTIVITY HEADER DIVS ******************************/
const showActivityDiv = (activityType, trip) => {
// If activity = event and there are events in the current trip
if (activityType === 'event' && trip.events.length > 0) {
return (
<div style={tripStyle.styles.activityHeader}>Events:</div>
)
// If activity = eatin and there are restaurants in the current trip
} else if (activityType === 'eatin' && trip.eatin.length > 0) {
return (
<div style={tripStyle.styles.activityHeader}>Food:</div>
)
}
};
/*************************** WELCOME USER TEXT ***************************/
const welcomeUser = () => {
if (store.authenticated) {
return (
<div style={theme.styles.discoverTrips}>Welcome back, {store.username}!</div>
)
} else {
return (
<div style={theme.styles.discoverTrips}>Welcome!</div>
)
}
}
/************************ CREATE TRIP SEARCH BUTTON ************************/
const searchButton = () => {
return (
<RaisedButton
onClick={this.submit.bind(this)}
label='Create Trip'
disabled={!store.authenticated}
/>
)
}
/*************************** STUFF ON PAGE ***************************/
return (
<MuiThemeProvider muiTheme={theme.muiTheme}>
<Paper>
{/************************** NAVIGATION **************************/}
{navLinks()}
{/******************************* HEADER *******************************/}
<div style={theme.styles.header}>
<Link to="/" style={{textDecoration: 'none', color: cyan900}}>
TripTeaze
</Link>
</div>
<div style={styles.welcomeUser}>{welcomeUser()}</div>
{/************************** CREATE TRIP CARD **************************/}
<div style={styles.createTripCard}>
{drawer()}
<Card>
<CardTitle
title="Create New Trip"
titleStyle={styles.cardTitle}
actAsExpander={true}
showExpandableButton={true}
/>
<CardText
expandable={true}
>
<div style={styles.tripDatesCard}>
<div style={styles.tripDatesHeaders}>Trip Dates:</div>
<div>
<DatePicker
floatingLabelText="From"
autoOk={true}
onChange={updateFromDate}
minDate={today}
/>
<DatePicker
floatingLabelText="To"
autoOk={true}
onChange={updateToDate}
// defaultDate={} // set default "to" date as the "from" date?
minDate={store.minToDate}
/>
</div>
<br/>
<div>
<div style={styles.tripDatesHeaders}> {message} </div>
<TextField
id='city'
value={store.city}
onChange={this.updateCity.bind(this)}
/>
<br/>
{searchButton()}
</div>
</div>
</CardText>
</Card>
</div>
{/************************** EXISTING TRIPS CARD **************************/}
<div style={styles.existingTripsCard}>
<Card
initiallyExpanded={true}
>
<CardTitle
title="Current Trips"
titleStyle={styles.cardTitle}
actAsExpander={true}
showExpandableButton={true}
/>
<CardText
expandable={true}
>
{dropdown()}
</CardText>
</Card>
</div>
{/************************** EXPLORE SECTION **************************/}
<div style={{marginTop: '3%'}}>
<div style={theme.styles.discoverTrips}>Explore</div>
{/************************** SEARCH EVENTS **************************/}
<Paper style={styles.activityContainer}>
<div style={styles.activityTitle}>Events</div>
<div style={styles.searchBar}>
<TextField
id = 'event'
onChange = {this.updateEventQuery.bind(this)}
inputStyle={{ width: '100%' }}
style={styles.searchInput}
/>
<RaisedButton
onClick={this.submitEventQuery.bind(this)}
label='Search'
/>
</div>
{/************************** EVENT RESULTS **************************/}
<div style={styles.searchResults}>
<Events
events={store.eventResults}
addEventToTrip={actions.addEventToTrip}
user={store.username}
city={this.state.activeCity}
eventSnackbar={store.eventSnackbar}
onRequestClose={actions.deactivateEventSnackbar}
/>
</div>
</Paper>
{/************************** SEARCH EATIN **************************/}
<Paper style={styles.activityContainer}>
<div style={styles.activityTitle}>Restaurants</div>
<div style={styles.searchBar}>
<TextField
id='food'
onChange={this.updateFoodQuery.bind(this)}
inputStyle={{ width: '100%' }}
style={styles.searchInput}
/>
<RaisedButton
onClick={this.submitFoodQuery.bind(this)}
label='Search'
/>
</div>
{/************************** EATIN RESULTS **************************/}
<div style={styles.searchResults}>
<Eatin
restaurants={store.foodResults}
addFoodToTrip={actions.addFoodToTrip}
user={store.username}
city={this.state.activeCity}
foodSnackbar={store.foodSnackbar}
onRequestClose={actions.deactivateFoodSnackbar}
/>
</div>
</Paper>
</div>
</Paper>
</MuiThemeProvider>
);
}
}
//react-redux stuff
//state is the redux store
const mapStateToProps = state => (
{ store: state }
);
//dispatch is the movement of actions to the reducer
const mapDispatchToProps = dispatch => (
{ actions: bindActionCreators(actions, dispatch) }
);
export default connect(mapStateToProps, mapDispatchToProps)(SearchPage); | 20c1a5f2b65b9c19292d527c6fab07fd6e0f97d5 | [
"JavaScript",
"Markdown"
] | 9 | JavaScript | PIUisUndefined/tripteaze | d651ce7cd7440c396fe09d003235b9f96fe9f54c | 1291e1a87d97ac46bffcb00e86353b62dbfc2f84 | |
refs/heads/master | <file_sep>#Objective
To create a database where students can signup and can make it easier for signups to go straight to mailerlite
<file_sep>const cool = require('cool-ascii-faces');
const express = require('express');
const path = require('path');
var mysql = require('mysql');
var bodyParser = require('body-parser');
var Mailerlite = require('mailerlite-nodejs-api');
var $apiKey = process.env.MAILERLITE_KEY;
var $campaign_id = process.env.CAMPAIGN_ID;
var mailerlite = new Mailerlite($apiKey);
var $ML_Subscribers = mailerlite.subscribers;
var $ML_Campaigns = mailerlite.campaigns;
var $ML_lists = mailerlite.lists;
var pool = mysql.createPool({
host: process.env.DB_HOST,
user: process.env.DB_USERNAME,
password: <PASSWORD>,
database: process.env.DB_DATABASE,
});
const PORT = process.env.PORT || 5000
// const { Pool } = require('pg');
// const pool = new Pool({
// connectionString: process.env.DATABASE_URL,
// ssl: true
// });
express()
.use(express.static(path.join(__dirname, 'public')))
.use(bodyParser.json()) // to support JSON-encoded bodies
.use(bodyParser.urlencoded({ // to support URL-encoded bodies
extended: true
}))
.use(express.json()) // to support JSON-encoded bodies
.use(express.urlencoded()) // to support URL-encoded bodies
.all('*',function(req,res,next){
if (!req.get('Origin')) return next();
res.set('Access-Control-Allow-Origin','http://myapp.com');
res.set('Access-Control-Allow-Methods','GET,POST');
res.set('Access-Control-Allow-Headers','X-Requested-With,Content-Type');
if ('OPTIONS' == req.method) return res.send(200);
next();
})
.set('views', path.join(__dirname, 'views'))
.set('view engine', 'ejs')
.get('/', (req, res) => res.render('pages/index'))
.get('/cool', (req, res) => res.send(cool()))
.get('/times', (req, res) => {
let result = ''
const times = process.env.TIMES || 5
for (i = 0; i < times; i++) {
result += i + ' '
}
res.send(result)
})
.get('/db', async (req, res) => {
pool.getConnection(function(err, conn) {
if (err) throw err; // not connected!
var sql = "SELECT * FROM IEEE_Club_Members";
// Use the connection
conn.query(sql, function (error, results, fields) {
console.log(results);
// When done with the connection, release it.
conn.release();
// Handle error after the release.
if (error) throw error;
console.log(results);
res.send(results)
// Don't use the connection here, it has been returned to the pool.
});
});
})
.post('/new_subscriber', async (req, res) => {
res.header("Access-Control-Allow-Origin", "*");
var netid = "'"+req.body.netid+"'";
var fname = "'"+req.body.fname+"'";
var lname = "'"+req.body.lname+"'";
var email = "'"+req.body.email+"'";
var year = "'"+req.body.year+"'";
var timestamp = new Date().getTime();
console.log(netid)
pool.getConnection(function(err, conn) {
if (err) throw err; // not connected!
var sql = "INSERT INTO IEEE_Club_Members values ("+netid+","+fname+","+lname+","+email+","+year+","+timestamp+")";
sql+= " ON DUPLICATE KEY UPDATE Email="+email;
// Use the connection
conn.query(sql, function (error, results, fields) {
// When done with the connection, release it.
conn.release();
// Handle error after the release.
if (error) throw error;
console.log('1 record inserted');
// Don't use the connection here, it has been returned to the pool.
});
});
var subscriber = [{
'email': req.body.email,
'name': req.body.fname,
'fields':{'graduating_year':year}
}];
console.log($campaign_id);
$ML_Subscribers.setId($campaign_id).addAll(subscriber,1,function (r){
console.log(r);
res.status(200).send('success');
});
})
.post('/delete_subscriber', async (req, res) => {
res.header("Access-Control-Allow-Origin", "*");
var netid = "'"+req.body.netid+"'";
var fname = "'"+req.body.fname+"'";
var lname = "'"+req.body.lname+"'";
var email = "'"+req.body.email+"'";
var year = "'"+req.body.year+"'";
var timestamp = new Date().getTime();
console.log(netid)
pool.getConnection(function(err, conn) {
if (err) throw err; // not connected!
var sql = "DELETE FROM IEEE_Club_Members WHERE netid="+netid;
// Use the connection
conn.query(sql, function (error, results, fields) {
// When done with the connection, release it.
conn.release();
// Handle error after the release.
if (error) throw error;
console.log('1 record removed');
// Don't use the connection here, it has been returned to the pool.
});
});
var subscriber = [{
'email': req.body.email,
'name': req.body.fname,
}];
console.log($campaign_id);
$ML_Subscribers.unsubscribe(subscriber,1,function (r){
console.log(r);
res.status(200).send('success');
});
})
.listen(PORT, () => console.log(`Listening on ${ PORT }`))
<file_sep>var netid= document.getElementById('net');
var fst= document.getElementById('first');
var lst=document.getElementById('last');
var email=document.getElementById('mail');
var grads=document.getElementById('grad');
var submitQuery = document.querySelector('input[type="submit"][value="Submit"]');
// var unsubscribeQuery = document.querySelector('input[type="submit"][value="Unsubscribe"]');
//var url = "http://localhost:5000"
var url = "https://emails-ieee.herokuapp.com"
function handle_form_submission(){
console.warn('TEST');
alert('Submit button pressed');
return false; //do not submit the form
}
function valid(){
var missing = "";
var isValid = true;
if (!(netid.checkValidity())) {
$('#net').css({ "background": '#FFFF00'});
missing +="NetID,";
isValid = false;
}
if (!(fst.checkValidity())) {
$('#first').css({ "background": '#FFFF00'});
missing +="First Name,";
isValid = false;
}
if (!(lst.checkValidity())) {
$('#last').css({ "background": '#FFFF00'});
missing +="Last Name,";
isValid = false;
}
if (!(mail.checkValidity())) {
$('#mail').css({ "background": '#FFFF00'});
missing +="Email,";
isValid = false;
}
if (!(grads.checkValidity())) {
$('#grad').css({ "background": '#FFFF00'});
missing +="Graduation Year,";
isValid = false;
}
if(!isValid){
alert("Missing:" + missing);
return false;
}
return true;
}
function reset(){
console.log("clicked");
$('#myForm')[0].reset()
}
submitQuery.addEventListener ('click',function(){
// Sending and receiving data in JSON format using POST method
//
json = {netid: netid.value,fname: fst.value,lname: lst.value,email:email.value,year:grads.value};
if(valid()){
$.ajax({
type: "POST",
crossDomain: true,
url: url+"/new_subscriber",
data: json,
success: function(jsondata){
alert('success');
console.log('success');
reset();
}
});
}
});
unsubscribeQuery.addEventListener ('click',function(){
// Sending and receiving data in JSON format using POST method
//
json = {netid: netid.value,fname: fst.value,lname: lst.value,email:email.value,year:grads.value};
if(valid()){
$.ajax({
type: "POST",
crossDomain: true,
url: url+"/delete_subscriber",
data: json,
success: function(jsondata){
alert('success');
console.log('success');
reset();
}
});
}
});
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.8.2
-- https://www.phpmyadmin.net/
--
-- Host: localhost
-- Generation Time: Sep 01, 2018 at 02:00 AM
-- Server version: 10.1.34-MariaDB
-- PHP Version: 7.1.19
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `ieee`
--
-- --------------------------------------------------------
--
-- Table structure for table `IEEE_Club_Members`
--
CREATE TABLE `IEEE_Club_Members` (
`netid` varchar(25) NOT NULL,
`fname` char(25) NOT NULL,
`lname` char(25) NOT NULL,
`email` varchar(50) NOT NULL,
`year` int(4) NOT NULL,
`Time Stamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `IEEE_Club_Members`
--
--
-- Indexes for dumped tables
--
--
-- Indexes for table `IEEE_Club_Members`
--
ALTER TABLE `IEEE_Club_Members`
ADD UNIQUE KEY `netid` (`netid`);
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| edc093482f8720fabb8a206f0ab7526209fe5782 | [
"Markdown",
"SQL",
"JavaScript"
] | 4 | Markdown | roshni98/IEEE | f968c93050fc1bcb739749191ca3baa2f71be03d | 001e201f6c669b33ec8a8d00b05ad6d398393e05 | |
refs/heads/master | <repo_name>sohankunkerkar/libsystemd-rs<file_sep>/src/activation.rs
use nix::mqueue::mq_getattr;
use nix::sys::socket::getsockname;
use nix::sys::socket::SockAddr;
use nix::sys::stat::fstat;
use std::convert::TryFrom;
use std::env;
use std::os::unix::io::{IntoRawFd, RawFd};
use std::process;
use crate::errors::*;
/// Minimum FD number used by systemd for passing sockets.
const SD_LISTEN_FDS_START: RawFd = 3;
/// Trait for checking the type of a file descriptor.
pub trait IsType {
/// Returns true if a file descriptor is a FIFO.
fn is_fifo(&self) -> bool;
/// Returns true if a file descriptor is a special file.
fn is_special(&self) -> bool;
/// Returns true if a file descriptor is a `PF_INET` socket.
fn is_inet(&self) -> bool;
/// Returns true if a file descriptor is a `PF_UNIX` socket.
fn is_unix(&self) -> bool;
/// Returns true if a file descriptor is a POSIX message queue descriptor.
fn is_mq(&self) -> bool;
}
/// File descriptor passed by systemd to socket-activated services.
///
/// See https://www.freedesktop.org/software/systemd/man/systemd.socket.html.
#[derive(Debug, Clone)]
pub struct FileDescriptor(SocketFd);
/// Possible types of sockets.
#[derive(Debug, Clone)]
enum SocketFd {
/// A FIFO named pipe (see `man 7 fifo`)
Fifo(RawFd),
/// A special file, such as character device nodes or special files in
/// `/proc` and `/sys`.
Special(RawFd),
/// A `PF_INET` socket, such as UDP/TCP sockets.
Inet(RawFd),
/// A `PF_UNIX` socket (see `man 7 unix`).
Unix(RawFd),
/// A POSIX message queue (see `man 7 mq_overview`).
Mq(RawFd),
}
impl IsType for FileDescriptor {
fn is_fifo(&self) -> bool {
match self.0 {
SocketFd::Fifo(_) => true,
_ => false,
}
}
fn is_special(&self) -> bool {
match self.0 {
SocketFd::Special(_) => true,
_ => false,
}
}
fn is_unix(&self) -> bool {
match self.0 {
SocketFd::Unix(_) => true,
_ => false,
}
}
fn is_inet(&self) -> bool {
match self.0 {
SocketFd::Inet(_) => true,
_ => false,
}
}
fn is_mq(&self) -> bool {
match self.0 {
SocketFd::Mq(_) => true,
_ => false,
}
}
}
/// Check for file descriptors passed by systemd.
///
/// Invoked by socket activated daemons to check for file descriptors needed by the service.
/// If `unset_env` is true, the environment variables used by systemd will be cleared.
pub fn receive_descriptors(unset_env: bool) -> Result<Vec<FileDescriptor>> {
let pid = env::var("LISTEN_PID");
let fds = env::var("LISTEN_FDS");
if unset_env {
env::remove_var("LISTEN_PID");
env::remove_var("LISTEN_FDS");
env::remove_var("LISTEN_FDNAMES");
}
let pid = pid?.parse::<u32>()?;
let fds = fds?.parse::<u32>()?;
if process::id() != pid {
return Err("PID mismatch".into());
}
let vec = socks_from_fds(fds);
Ok(vec)
}
/// Check for named file descriptors passed by systemd.
///
/// Like `sd_listen_fds`, but this will also return a vector of names associated with each file
/// descriptor.
pub fn receive_descriptors_with_names(unset_env: bool) -> Result<Vec<(FileDescriptor, String)>> {
let pid = env::var("LISTEN_PID");
let fds = env::var("LISTEN_FDS");
let names = env::var("LISTEN_FDNAMES");
if unset_env {
env::remove_var("LISTEN_PID");
env::remove_var("LISTEN_FDS");
env::remove_var("LISTEN_FDNAMES");
}
let pid = pid?.parse::<u32>()?;
let fds = fds?.parse::<u32>()?;
if process::id() != pid {
return Err("PID mismatch".into());
}
let names: Vec<String> = names?.split(':').map(String::from).collect();
let vec = socks_from_fds(fds);
let out = vec.into_iter().zip(names.into_iter()).collect();
Ok(out)
}
fn socks_from_fds(num_fds: u32) -> Vec<FileDescriptor> {
let mut vec = Vec::new();
for fd_offset in 0..num_fds {
let fd = SD_LISTEN_FDS_START + (fd_offset as i32);
match FileDescriptor::try_from(fd) {
Ok(sock) => vec.push(sock),
Err(e) => eprintln!("failed to receive socket: {}", e),
};
}
vec
}
impl IsType for RawFd {
fn is_fifo(&self) -> bool {
match fstat(*self) {
Ok(stat) => (stat.st_mode & 0o0_170_000) == 0o010_000,
Err(_) => false,
}
}
fn is_special(&self) -> bool {
match fstat(*self) {
Ok(stat) => (stat.st_mode & 0o0_170_000) == 0o100_000,
Err(_) => false,
}
}
fn is_inet(&self) -> bool {
match getsockname(*self) {
Ok(addr) => {
if let SockAddr::Inet(_) = addr {
true
} else {
false
}
}
Err(_) => false,
}
}
fn is_unix(&self) -> bool {
match getsockname(*self) {
Ok(addr) => {
if let SockAddr::Unix(_) = addr {
true
} else {
false
}
}
Err(_) => false,
}
}
fn is_mq(&self) -> bool {
mq_getattr(*self).is_ok()
}
}
impl TryFrom<RawFd> for FileDescriptor {
type Error = Error;
fn try_from(value: RawFd) -> Result<Self> {
if value.is_fifo() {
return Ok(FileDescriptor(SocketFd::Fifo(value)));
} else if value.is_special() {
return Ok(FileDescriptor(SocketFd::Special(value)));
} else if value.is_inet() {
return Ok(FileDescriptor(SocketFd::Inet(value)));
} else if value.is_unix() {
return Ok(FileDescriptor(SocketFd::Unix(value)));
} else if value.is_mq() {
return Ok(FileDescriptor(SocketFd::Mq(value)));
}
Err("invalid file descriptor".into())
}
}
impl IntoRawFd for FileDescriptor {
fn into_raw_fd(self) -> RawFd {
match self.0 {
SocketFd::Fifo(fd) => fd,
SocketFd::Special(fd) => fd,
SocketFd::Inet(fd) => fd,
SocketFd::Unix(fd) => fd,
SocketFd::Mq(fd) => fd,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_socketype_is_unix() {
let sock = FileDescriptor(SocketFd::Unix(0i32));
assert!(sock.is_unix());
}
#[test]
fn test_socketype_is_special() {
let sock = FileDescriptor(SocketFd::Special(0i32));
assert!(sock.is_special());
}
#[test]
fn test_socketype_is_inet() {
let sock = FileDescriptor(SocketFd::Inet(0i32));
assert!(sock.is_inet());
}
#[test]
fn test_socketype_is_fifo() {
let sock = FileDescriptor(SocketFd::Fifo(0i32));
assert!(sock.is_fifo());
}
#[test]
fn test_socketype_is_mq() {
let sock = FileDescriptor(SocketFd::Mq(0i32));
assert!(sock.is_mq());
}
}
<file_sep>/src/errors.rs
#![allow(deprecated)]
error_chain::error_chain! {
foreign_links {
Io(::std::io::Error);
Env(::std::env::VarError);
Ffi(::std::ffi::NulError);
Nix(nix::Error);
Parse(::std::num::ParseIntError);
}
}
| 9e5b8d7ad2ca7ba3feb4993ac3e894813fcd8870 | [
"Rust"
] | 2 | Rust | sohankunkerkar/libsystemd-rs | 569f64778cf836cdd224a197d26561a823be6914 | b432e8cbc086c02b2eb97affe782cb575c2432b0 | |
refs/heads/master | <repo_name>DImasBo/shop-django<file_sep>/ecomapp/forms.py
# -*- coding: utf-8 -*-
from django import forms
from django.utils import timezone
from ecomapp.models import Order, Page, Category
from django_summernote.widgets import SummernoteWidget
class PageEditForm(forms.ModelForm):
class Meta:
model = Page
fields = ['description']
widgets = {
'description' : SummernoteWidget(),
}
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ['name']
class OrderForm(forms.ModelForm):
class Meta:
model = Order
fields = ['second_name','first_name','last_name','phone_number','email','buying_type','address','comments']
labels = {
"email": "електрона пошта(необов'ясково, для підписки на вигідні пропозиції)"
}
<file_sep>/ecomapp/admin.py
from django.contrib import admin
from .models import Category, Product, Cart, CartItem, Product,ProductImage,Reduction ,Order, Page
from django_summernote.admin import SummernoteModelAdmin
def make_paid(modeladmin, request, queryset):
queryset.update(status='PAID_status')
def make_ip(modeladmin, request, queryset):
queryset.update(status='IP_status')
def make_aip(modeladmin, request, queryset):
queryset.update(status='AIP_status')
make_aip.short_description = 'Помітити як Прийнятий в обробку'
make_ip.short_description = 'Помітити як В обробці'
make_paid.short_description = 'Помітити як Оплачено'
# cart = models.ForeignKey(Cart,on_delete=models.CASCADE, verbose_name='Корзина')
class OrderAdmin(admin.ModelAdmin):
# fields = ('products',('second_name','first_name','last_name'), ('phone_number','email'),'buying_type','address','status','comments')
list_filter = ['status']
actions = [make_paid, make_aip, make_ip]
class ProductImageInline(admin.TabularInline):
model = ProductImage
extra = 1
class ProductAdmin(SummernoteModelAdmin,admin.ModelAdmin):
fields = ('title','description','price','category',('count','available'),'slug','album_cover')
inlines = [ ProductImageInline, ]
summernote_fields = ('description')
class PageAdmin(SummernoteModelAdmin):
summernote_fields = ('description')
# Register your models here.
admin.site.register(Page, PageAdmin)
admin.site.register(Category)
# admin.site.register(Cart)
# admin.site.register(CartItem)
admin.site.register(Order, OrderAdmin)
# admin.site.register(Reduction)
admin.site.register(Product,ProductAdmin)
<file_sep>/ecomapp/urls.py
from django.urls import path, include
from ecomapp import views
urlpatterns = [
path('',views.IndexView.as_view(), name='index'),
#prodicts url
path('search/',views.SearchView.as_view(), name = 'search'),
path('product/<str:slug>', views.ProductDetailView.as_view(),name='product_detail_url'),
#pages url
path('page/<str:slug>', views.PageView.as_view() , name='page_url'),
#edit
path('page_edit/<str:slug>', views.PageEdit.as_view() , name='page_edit_url'),
#orders url
path('order/', views.CreateOrder.as_view(),name='create_order_url'),
path('orders/', views.ListOrder.as_view(),name='order_list_url'),
path('order/show/<int:id>', views.OrderDetailView.as_view(),name='order_detail_url'),
#add product in order
path('order/add/product/order=<int:id>', views.add_product_in_order,name='add_product_in_order_url'),
path('order/add_product/category/<str:slug>', views.OrderCategoryView.as_view(), name='add_order_category_url'),
path('order/add_product', views.OrderIndexView.as_view(), name='add_order_index_url'),
path('order/add/cart/', views.OrderCartView.as_view(),name='order_cart_url'),
path('order/add/save/', views.save_order, name='save_order_url'),
path('order/add/product/<str:slug>',views.OrderProductDetailView.as_view(),name='order_product_detail_url'),
# path('order/cart/add/<str:slug>', views.add_to_order_cart,name='add_order_to_cart_url'),
#edit
path('order/remove/<int:id>/<int:item_id>',views.remove_product_order,name='remove_product_order_url'),
path('order/add/<int:id>/<int:item_id>', views.add_product_order,name='add_product_order_url'),
path('order/minus/<int:id>/<int:item_id>', views.minus_product_order ,name='minus_product_order_url'),
path('order_finish/',views.OrderFinish.as_view(), name='order_finish_url'),
#carts url
path('cart/', views.CartView.as_view(),name='cart_url'),
#edit
path('cart/minus/<str:slug>',views.minus_to_cart,name='minus_to_cart_url'),
path('cart/add/<str:slug>',views.add_to_cart,name='add_to_cart_url'),
path('cart/remove/<str:slug>',views.remove_from_view,name='remove_from_view_url'),
#categories url
path('category/<str:slug>',views.CategoryView.as_view(), name='category_list_url' ),
#edit
path('edit_categories/',views.CategoriesEditView.as_view(),name='edit_categories_url'),
]
<file_sep>/ecomapp/views.py
from django.shortcuts import render
from django.shortcuts import redirect
from django.urls import reverse
from django.views.generic import TemplateView
from ecomapp.models import Category, Product, Cart, CartItem, ProductImage, Order, Page
from django.core.paginator import Paginator
from django.http.response import HttpResponseRedirect
from ecomapp.forms import OrderForm, PageEditForm, OrderForm, CategoryForm
from django.db.models import Q
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.mail import send_mail
from django.conf import settings
from decimal import Decimal
from ecomapp.models import sum_items
# Create your views here.
def get_cart(request):
try:
cart_id = request.session['cart_id']
cart = Cart.objects.get(id=cart_id)
request.session['total'] = cart.item.count()
except:
cart = Cart()
cart.save()
cart_id = cart.id
request.session['cart_id'] = cart_id
cart = Cart.objects.get(id=cart_id)
return cart
def delete_order_session(request):
try:
del(request.session['order_id'])
except:
print('not delete')
try:
del(request.session['cart_order_id'])
except:
print('not delete cart_order_id')
try:
del(request.session['order_total'])
except:
print('not delete order_total')
return request
class BaseView(TemplateView):
template_name = 'ecomapp/index.html'
def get(self, request):
request = delete_order_session(request)
cart = get_cart(request)
categories = Category.objects.all()
lorem_ipsum = '''Lorem Ipsum - це текст-"риба", що використовується в друкарстві та дизайні. Lorem Ipsum є, фактично, стандартною "рибою" аж з XVI сторіччя, коли невідомий друкар взяв шрифтову гранку та склав на ній підбірку зразків шрифтів. "Риба" не тільки успішно пережила п'ять століть, але й прижилася в електронному верстуванні, залишаючись по суті незмінною. Вона популяризувалась в 60-их роках минулого сторіччя завдяки виданню зразків шрифтів Letraset, які містили уривки з Lorem Ipsum, і вдруге - нещодавно завдяки програмам комп'ютерного верстування на кшталт Aldus Pagemaker, які використовували різні версії Lorem Ipsum.'''
try:
description = Page.objects.get(slug='description')
except:
description = Page(title='Опис footer' , description= 'Опис про сайт',slug='description')
description.save()
try:
about_us = Page.objects.get(slug='about-us')
except:
about_us = Page(title='Про нас' , description= lorem_ipsum,slug='about-us')
about_us.save()
try:
contacts = Page.objects.get(slug='contacts')
except:
contacts = Page(title='Контакти', description= lorem_ipsum,slug='contacts')
contacts.save()
try:
buying_type = Page.objects.get(slug='buying-type')
except:
buying_type = Page(title='Доставка', description= lorem_ipsum,slug='buying-type')
buying_type.save()
self.context = {
'categories': categories,
'cart':cart,
'description':description,
'buying_type':buying_type,
'about_us':about_us,
'contacts':contacts,
}
return render(request,self.template_name,context=self.context)
class ListOrder(LoginRequiredMixin,BaseView):
template_name = 'ecomapp/orders.html'
login_url = '/admin/'
redirect_field_name = 'redirect_to'
def get(self, request):
super().get(request)
orders = Order.objects.all()
self.context['orders'] = orders
return render(request, self.template_name, context=self.context )
class OrderDetailView(LoginRequiredMixin,BaseView):
login_url = '/admin/'
redirect_field_name = 'redirect_to'
template_name = 'ecomapp/order_detail.html'
def get(self, request,id):
super().get(request)
order = Order.objects.get(id=id)
self.context['form'] = OrderForm(instance = order)
self.context['order'] = order
return render(request,self.template_name,context=self.context)
def post(self, request, id):
super().get(request)
order = Order.objects.get(id=id)
bound_form = OrderForm(request.POST, instance=order)
self.context['form'] = bound_form
self.context['order'] = order
if bound_form.is_valid():
form = bound_form.save()
return render(request,self.template_name,context=self.context)
class PageView(BaseView):
template_name = 'ecomapp/page.html'
def get(self,request,slug):
super().get(request)
self.context['page'] = Page.objects.get(slug=slug)
return render(request, self.template_name, context=self.context)
class PageEdit(LoginRequiredMixin,BaseView):
template_name = 'ecomapp/page_edit.html'
login_url = '/admin/'
redirect_field_name = 'redirect_to'
def get(self, request,slug):
super().get(request)
page = Page.objects.get(slug=slug)
self.context['form'] = PageEditForm(instance = page)
self.context['page'] = page
return render(request,self.template_name,context=self.context)
def post(self,request,slug):
super().get(request)
self.context['page'] = Page.objects.get(slug=slug)
bound_form = PageEditForm(request.POST, instance=self.context['page'])
self.context['form'] = bound_form
if bound_form.is_valid():
form = bound_form.save()
return redirect(reverse('page_url', kwargs={'slug':form.slug}))
return render(request,self.template_name,context=self.context)
class IndexView(BaseView):
def get(self,request):
super().get(request)
products = Product.objects.filter(available=True)
#масив для вюшки з категорією на головній і її продуктами
categories_index = []
for category in self.context['categories']:
p = products.filter(category=category)
if len(p) > 0:
c = {
'name':category.name,
'url':category.get_absolute_url(),
'products':p[:3]
}
categories_index.append(c)
self.context['categories_index'] = categories_index
return render(request,self.template_name,context=self.context)
class SearchView(BaseView):
template_name = 'ecomapp/product_list.html'
def get(self,request):
super().get(request)
search_query = request.GET.get('search', '')
if search_query == '':
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
self.context['category'] = Category()
self.context['category'].name = 'Пошук: ' + search_query
products = Product.objects.filter(Q(title__icontains=search_query) | Q(description__icontains=search_query) )
paginator = Paginator(products, 9)
page_number = request.GET.get('page', 1)
page = paginator.get_page(page_number)
self.context['page'] = page
return render(request,self.template_name,context=self.context)
class CategoryView(BaseView):
template_name = 'ecomapp/product_list.html'
def get(self,request,slug):
super().get(request)
self.context['category'] = self.context['categories'].get(slug=slug)
self.context['category'].name = 'Категорія: ' + self.context['category'].name
products = Product.objects.filter(category=self.context['category'])
paginator = Paginator(products, 9)
page_number = request.GET.get('page', 1)
page = paginator.get_page(page_number)
self.context['page'] = page
self.context['is_paginated'] = page.has_other_pages()
if page.has_previous():
self.context['prev_url'] = '?page={}'.format(page.previous_page_number())
else:
self.context['prev_url'] = False
if page.has_next():
self.context['next_url'] = '?page={}'.format(page.next_page_number())
else:
self.context['next_url'] = False
return render(request, self.template_name ,context=self.context)
class CategoriesEditView(LoginRequiredMixin,BaseView):
template_name = 'ecomapp/edit_categories.html'
login_url = '/admin/'
redirect_field_name = 'redirect_to'
def get(self,request):
super().get(request)
self.context['form'] = CategoryForm()
return render(request,self.template_name,context=self.context)
def post(self,request):
super().get(request)
self.context['form'] = CategoryForm()
bound_form = CategoryForm(request.POST)
if bound_form.is_valid():
form = bound_form.save()
self.context['categories'] = Category.objects.all()
return render(request,self.template_name,context=self.context)
return render(request,self.template_name,context=self.context)
class ProductDetailView(BaseView):
template_name = 'ecomapp/product_detail.html'
def get(self,request,slug):
super().get(request)
self.context['product'] = Product.objects.get(slug=slug)
product_album = ProductImage.objects.filter(product=self.context['product'])
if len(product_album) > 0:
self.context['product_album'] = product_album
return render(request, self.template_name ,context=self.context)
#order views
def remove_product_order(request,id, item_id):
order = Order.objects.get(id=id)
order.remove_product_order(item_id)
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
def add_product_order(request,id, item_id):
order = Order.objects.get(id=id)
order.add_product_order(item_id)
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
def minus_product_order(request,id, item_id):
order = Order.objects.get(id=id)
order.minus_product_order(item_id)
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
def get_cart_order(request):
try:
cart_order_id = request.session['cart_order_id']
cart = Cart.objects.get(id=cart_order_id)
request.session['order_total'] = cart.item.count()
except:
cart = Cart()
cart.cart_total = Order.objects.get(id=request.session['order_id']).total
cart.save()
cart_order_id = cart.id
request.session['cart_order_id'] = cart_order_id
cart = Cart.objects.get(id=cart_order_id)
return cart
class OrderProductDetailView(LoginRequiredMixin,TemplateView):
template_name = 'ecomapp/order_product_detail.html'
login_url = '/admin/'
redirect_field_name = 'redirect_to'
def get(self,request,slug):
product = Product.objects.get(slug=slug)
cart_order = get_cart_order(request)
context = {
'product' :product ,
'cart_order':cart_order,
'product_album' : ProductImage.objects.filter(product=product),
'categories': Category.objects.all()
}
return render(request, self.template_name ,context=context)
class OrderIndexView(LoginRequiredMixin,TemplateView):
template_name = "ecomapp/order_index.html"
login_url = '/admin/'
redirect_field_name = 'redirect_to'
def get(self,request):
cart_order = get_cart_order(request)
search_query = request.GET.get('search', '')
if search_query != '':
title = 'Пошук'
products = Product.objects.filter(Q(title__icontains=search_query) | Q(description__icontains=search_query) )
else:
title = 'всі продукти.'
products = Product.objects.all()
context = {
'cart_order' : cart_order,
'categories' : Category.objects.all(),
'products' : products,
'title': title
}
return render(request, self.template_name ,context=context)
class OrderCategoryView(LoginRequiredMixin,TemplateView):
template_name = "ecomapp/order_index.html"
context = {}
login_url = '/admin/'
redirect_field_name = 'redirect_to'
def get(self,request, slug):
cart_order = get_cart_order(request)
self.context['cart_order'] = cart_order
self.context['categories'] = Category.objects.all()
category = self.context['categories'].filter(slug=slug)
self.context['products'] = Product.objects.filter(category=category)
return render(request, self.template_name ,context=self.context)
class OrderCartView(LoginRequiredMixin,TemplateView):
template_name = "ecomapp/order_cart.html"
context = {}
login_url = '/admin/'
redirect_field_name = 'redirect_to'
def get(self, request):
cart_order = get_cart_order(request)
self.context['cart_order'] = cart_order
self.context['categories'] = Category.objects.all()
return render(request, self.template_name ,context=self.context)
def add_product_in_order(request,id):
order = Order.objects.get(id=id)
request.session['order_id'] = id
cart = get_cart_order(request)
for item in cart.item.all():
cart.item.remove(item)
for item in order.products.all():
cart.item.add(item.id)
cart.save()
return redirect(reverse('add_order_index_url'))
def save_order(request):
cart_order = get_cart_order(request)
id = request.session['order_id']
order = Order.objects.get(id=id)
if order.products.count() > 0:
for item in order.products.all():
cart_order.item.remove(item)
for item in cart_order.item.all():
order.products.add(item)
order.set_sum(cart_order.cart_total)
request = delete_order_session(request)
return redirect(reverse('order_detail_url',kwargs={'id':id}))
class CreateOrder(BaseView):
template_name = 'ecomapp/order.html'
def send_order(self, order):
email = settings.EMAIL_HOST_USER
message = '''Клієт {} {} {} оформив замовлення.<br>
Перетелефонуйте для уточнення деталей.
Сума: {}<br>
Номер: {}<br>c
<a href="https://svetlyachokshop.com{}">переглянути </a>'''.format(order.second_name, order.first_name, order.last_name, order.total, order.phone_number, order.get_absolute_url())
tema = 'Замовлення !!!'
try:
send_mail(tema, message, email, [email] ,html_message=message, fail_silently=False)
except:
print('email error!!!!!!!!!!!!!!!!!!!!!!!!!')
def get(self,request):
super().get(request)
self.context['form'] = OrderForm()
return render(request, self.template_name, context=self.context )
def post(self,request):
super().get(request)
bound_form = OrderForm(request.POST)
if bound_form.is_valid():
form = bound_form.save(commit=False)
form.total = self.context['cart'].cart_total
form.products_add(self.context['cart'])
try:
self.send_order(form)
except:
print("not network")
return redirect(reverse('order_finish_url'))
self.context['form'] = bound_form
return render(request, self.template_name, context=self.context )
class OrderFinish(BaseView):
template_name = 'ecomapp/order_finish.html'
# cart views
class CartView(BaseView):
template_name = 'ecomapp/cart.html'
def add_to_cart(request, slug):
try:
if request.session['order_id']:
cart = get_cart_order(request)
except:
cart = get_cart(request)
flug = False
product = Product.objects.get(slug=slug)
els = list(cart.item.all())
for item in els:
if item.product == product:
item.add_qty()
flug = True
cart.set_sum(sum_items(els))
break
if flug==False:
cart.add_to_cart(slug)
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
def minus_to_cart(request, slug):
try:
if request.session['order_id']:
cart = get_cart_order(request)
except:
cart = get_cart(request)
product = Product.objects.get(slug=slug)
els = list(cart.item.all())
for item in els:
if item.product == product:
item.minus_qty()
cart.set_sum(sum_items(els))
break
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
def remove_from_view(request, slug):
try:
if request.session['order_id']:
cart = get_cart_order(request)
except:
cart = get_cart(request)
cart.remove_product_cart(slug)
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
<file_sep>/ecomapp/models.py
from django.db import models
from django.shortcuts import reverse
from django.utils.text import slugify
from transliterate import translit
from django.core.validators import MaxValueValidator, MinValueValidator
from decimal import Decimal
# Create your models here.
class Category(models.Model):
name = models.CharField(max_length=50, verbose_name = 'Назва')
slug = models.SlugField(blank=True,verbose_name="Ключове слово(не обов'ясково)")
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('category_list_url',kwargs={'slug':self.slug})
def save(self, *args, **kwargs):
if not self.slug and self.name:
slug = slugify(translit(self.name,'uk', reversed=True))
self.slug = slug
super().save( *args, **kwargs)
class Meta:
verbose_name = 'Категорію'
verbose_name_plural = 'Категорії'
ordering = ['name']
class Page(models.Model):
title = models.CharField(max_length=100, verbose_name='Заголовок')
description = models.TextField(verbose_name='Опис')
slug = models.SlugField(verbose_name="Ключове слово")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('page_url', kwargs={'slug':self.slug})
def get_edit_url(self):
return reverse('page_edit_url', kwargs={'slug':self.slug})
class Meta:
verbose_name_plural = 'Сторінки сайта'
verbose_name = 'Сторінку сайта'
def image_folder(instance, filename):
filename = instance.slug + '.' + filename.split('.')[1]
return "{0}/{1}".format(instance.slug,filename)
def image_folder_cover(instance, filename):
filename = instance.slug + '_cover.' + filename.split('.')[-1]
return "{0}/{1}".format(instance.slug,filename)
def image_folder_album(instance, filename):
filename = instance.product.slug + '.' + filename.split('.')[-1]
return "{0}/{1}".format(instance.product.slug, filename)
class Product(models.Model):
title = models.CharField(max_length=50, verbose_name='Заголовок')
description = models.TextField( verbose_name='Опис')
price = models.DecimalField(max_digits=9, decimal_places=2, verbose_name='Ціна')
category = models.ForeignKey(Category,on_delete=models.CASCADE, verbose_name='Категорія')
count = models.PositiveIntegerField(default=1, verbose_name='Кількість товару')
slug = models.SlugField(blank=True, verbose_name="Ключове слово(не обов'ясково)")
available = models.BooleanField(default=True, verbose_name='В наявності')
date = models.DateTimeField(auto_now_add=True)
album_cover = models.ImageField(upload_to=image_folder_cover, verbose_name='Фото обкладинки')
def get_absolute_url(self):
return reverse('product_detail_url',kwargs={'slug':self.slug})
def get_add_order_url(self):
return reverse('order_product_detail_url',kwargs={'slug':self.slug})
def delete(self, *args, **kwargs):
self.album_cover.delete(save=False)
super().delete(args, kwargs)
def save(self, *args, **kwargs):
if not self.slug and self.title:
slug = slugify(translit(self.title,'uk', reversed=True))
self.slug = slug
super().save( *args, **kwargs)
def get_add_to_cart_url(self):
return reverse('add_to_cart_url',kwargs={'slug':self.slug})
def get_add_to_cart_order_url(self):
return reverse('add_to_cart_url',kwargs={'slug':self.slug})
def __str__(self):
return self.title
class Meta:
verbose_name = 'Продукт'
verbose_name_plural = 'Продукти'
ordering = ['-date']
class Reduction(models.Model):
title = models.CharField(max_length = 100)
reduction = models.PositiveSmallIntegerField(default = 1, validators=[
MaxValueValidator(100),
MinValueValidator(1)
])
description = models.TextField()
product = models.ManyToManyField(Product)
image = models.ImageField(upload_to=image_folder, blank=True, null=True)
date = models.DateTimeField(auto_now_add = True)
def __str__(self):
return "{0}% | {1}".format(str(self.reduction), self.title)
class Meta:
verbose_name = 'Знишку'
verbose_name_plural = 'Знишки'
ordering = ['-date']
class ProductImage(models.Model):
product = models.ForeignKey(Product, related_name='images',on_delete=models.CASCADE)
images = models.ImageField(upload_to=image_folder_album )
def __str__(self):
return self.images.url
def delete(self, *args, **kwargs):
self.images.delete(save=False)
super().delete(*args,**kwargs)
class Meta:
verbose_name = 'фото'
verbose_name_plural = 'фото'
class CartItem(models.Model):
product = models.ForeignKey(Product,on_delete=models.CASCADE,verbose_name='Продукт')
qty = models.PositiveIntegerField(default=1,verbose_name='Назва')
item_total = models.DecimalField(max_digits=9,decimal_places=2,default=0,blank=True,verbose_name="Сума(необов'язково, рахує самостійно)")
def __str__(self):
return "{0} Cart item for product {1}".format(self.id,self.product.title)
def minus_qty(self):
if self.qty > 1:
self.qty -= 1
self.item_total = self.qty * self.product.price
self.save()
def add_qty(self):
if self.qty + 1 <= self.product.count:
self.qty += 1
self.item_total = self.qty * self.product.price
self.save()
def save(self, *args, **kwargs):
if not self.item_total:
self.item_total = self.qty * self.product.price
super().save( *args, **kwargs)
def sum_items(elements,save = True):
total_price = Decimal(0.00)
for item in elements:
total_price = total_price + item.item_total
return total_price
class Cart(models.Model):
item = models.ManyToManyField(CartItem, blank=True)
cart_total = models.DecimalField(max_digits=9,decimal_places=2,default=0)
def __str__(self):
return str(self.id)
def add_to_cart(self,slug):
product = Product.objects.get(slug=slug)
new_item = CartItem.objects.get_or_create(product=product,item_total=product.price)[0]
els = list(self.item.all())
if new_item not in els:
self.item.add(new_item)
els.append(new_item)
# print(els)
self.set_sum(sum_items(els))
def remove_product_cart(self,slug):
product = Product.objects.get(slug=slug)
els = list(self.item.all())
for item in els:
if item.product == product:
self.item.remove(item)
item.delete()
els.remove(item)
# print(els)
self.set_sum(sum_items(els))
def get_qty(self):
return str(self.item.count())
def set_sum(self,sum, save=True):
self.cart_total = sum
if save:
self.save()
class Order(models.Model):
# Accepted in processing = AIP_status, 'Прийнятий в обробку'
# In processing = IP_status, 'В обробці'
# Paid = PAID_status, 'Оплачено'
ORDER_STATUS_CHOICES = (
('AIP_status', 'Прийнятий в обробку'),
('IP_status', 'В обробці'),
('PAID_status', 'Оплачено'),
)
DELIVERY_STATUS = (
('nova_poshta', 'Нова пошта'),
('ukr_poshta', 'Укр-пошта'),
)
# cart = models.ForeignKey(Cart,on_delete=models.CASCADE, verbose_name='Корзина')
products = models.ManyToManyField(CartItem, verbose_name='Товари')
total = models.DecimalField(max_digits=9, decimal_places=2, default = 0, verbose_name="Сума(необов'язково, рахує самостійно)",blank=True)
second_name = models.CharField(max_length=200, verbose_name='Прізвище')
first_name = models.CharField(max_length=200, verbose_name='Імя')
last_name = models.CharField(max_length=200, verbose_name='Побатькові')
phone_number = models.CharField(max_length=9, verbose_name='Номер телефону +380')
email = models.EmailField(blank = True, verbose_name='Електрона пошта')
buying_type = models.CharField(max_length=40,choices=DELIVERY_STATUS, verbose_name='Спосіб доставки')
address = models.CharField(max_length=255, verbose_name='адреса')
status = models.CharField(max_length=100, choices=ORDER_STATUS_CHOICES,default='AIP_status', verbose_name='Статус')
comments = models.TextField(blank = True, verbose_name='Коментарь')
date = models.DateTimeField(auto_now_add=True)
def get_admin_url(self):
return reverse('order_detail_url',kwargs={'id':self.id})
def save(self, *args, **kwargs):
super().save( *args, **kwargs)
for item in self.products.all():
if item.product.count - item.qty < 1:
item.product.count = 0
item.product.available = False
else:
item.product.count -= item.qty
item.product.save()
def add_product_order(self,item_id):
C_item = CartItem.objects.get(id=item_id)
els = list(self.products.all())
for item in els:
if item == C_item:
item.add_qty()
break
self.set_sum(sum_items(els))
def minus_product_order(self,item_id):
C_item = CartItem.objects.get(id=item_id)
els = list(self.products.all())
for item in self.products.all():
if item == C_item:
item.minus_qty()
break
self.set_sum(sum_items(els))
def delete(self, *args, **kwargs):
for item in self.products.all():
item.delete()
super().delete(*args, **kwargs)
def remove_product_order(self,item_id):
C_item = CartItem.objects.get(id=item_id)
els = list(self.products.all())
for item in els:
if item == C_item:
self.products.remove(item)
els.remove(item)
break
C_item.delete()
self.set_sum(sum_items(els))
def products_add(self,cart):
self.save()
for item in cart.item.all():
new_item = CartItem(product=item.product,qty=item.qty,item_total=item.item_total)
new_item.save()
item.delete()
self.products.add(new_item.id)
self.save()
cart.delete()
def set_sum(self, sum,save = True):
self.total = sum
if save:
self.save()
def __str__(self):
return "{} /ДАТА: {} /ПІБ: {} {} {}".format(str(self.id), str(self.date.strftime("%d-%B-%Y")), self.second_name,self.first_name, self.last_name)
def get_absolute_url(self):
return reverse('order_detail_url',kwargs={'id':self.id})
def get_add_product_in_order(self):
return reverse('add_product_in_order_url',kwargs={'id':self.id})
class Meta:
ordering = ['-date']
verbose_name = 'Замовлення'
verbose_name_plural = 'Замовлення'<file_sep>/requirements.txt
certifi==2019.6.16
chardet==3.0.4
Django==2.2.3
django-crispy-forms==1.7.2
django-storages==1.7.1
django-summernote==0.8.11.4
dropbox==9.4.0
gunicorn==19.9.0
idna==2.8
Pillow==5.1.0
psycopg2==2.8.3
pytz==2019.1
requests==2.22.0
six==1.12.0
sqlparse==0.3.0
transliterate==1.10.2
urllib3==1.25.3 | 1b82f223a0a34188b1616b955d1b25d0c418e536 | [
"Python",
"Text"
] | 6 | Python | DImasBo/shop-django | 915a647f5fbbb8444209032b3f1a9f9f3aebc9fc | 61126ff0195395e6514c0ebcddaaae76caba515f | |
refs/heads/master | <file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.github.barteksc.pdfviewer.PDFView;
import com.github.barteksc.pdfviewer.listener.OnLoadCompleteListener;
import com.github.barteksc.pdfviewer.listener.OnPageChangeListener;
import com.github.barteksc.pdfviewer.listener.OnPageErrorListener;
import com.github.barteksc.pdfviewer.scroll.DefaultScrollHandle;
import com.github.barteksc.pdfviewer.util.FitPolicy;
import com.shockwave.pdfium.PdfDocument;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
import java.util.List;
/**
* 案例库
*/
public class PuttedForwardFragment extends Fragment implements OnPageChangeListener, OnLoadCompleteListener, OnPageErrorListener {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
Integer pageNumber = 0;
private PDFView mPdfView;
private View mInflate;
public PuttedForwardFragment() {
// Required empty public constructor
}
public static PuttedForwardFragment newInstance(String param1, String param2) {
PuttedForwardFragment fragment = new PuttedForwardFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
if (mInflate ==null){
mInflate = inflater.inflate(R.layout.fragment_putted_forward, container, false);
}
initView(mInflate);
return mInflate;
}
public void initView(View rootView) {
final RecyclerView rc = rootView.findViewById(R.id.rc);
mPdfView = rootView.findViewById(R.id.pdfView);
mPdfView.fromAsset("case1.pdf")
.defaultPage(pageNumber)
.onPageChange(this)
.enableAnnotationRendering(true)
.onLoad(this)
.scrollHandle(new DefaultScrollHandle(BaseApplication.getApplictaion()))
.spacing(10) // in dp
.onPageError(this)
.pageFitPolicy(FitPolicy.BOTH)
.load();
}
@Override
public void onPageChanged(int page, int pageCount) {
pageNumber = page;
// setTitle(String.format("%s %s / %s", pdfFileName, page + 1, pageCount));
}
@Override
public void loadComplete(int nbPages) {
// PdfDocument.Meta meta = mPdfView.getDocumentMeta();
printBookmarksTree(mPdfView.getTableOfContents(), "-");
}
@Override
public void onPageError(int page, Throwable t) {
}
public void printBookmarksTree(List<PdfDocument.Bookmark> tree, String sep) {
for (PdfDocument.Bookmark b : tree) {
if (b.hasChildren()) {
printBookmarksTree(b.getChildren(), sep + "-");
}
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.Generated;
/**
* Created by Administrator on 2018/5/18/018.
*/
@Entity
public class GreenSearchHistory {
@Id(autoincrement = true)
private Long id;
private String userId;
private String searchText;
private Long time;
@Generated(hash = 298392090)
public GreenSearchHistory(Long id, String userId, String searchText,
Long time) {
this.id = id;
this.userId = userId;
this.searchText = searchText;
this.time = time;
}
@Generated(hash = 918844437)
public GreenSearchHistory() {
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public String getUserId() {
return this.userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getSearchText() {
return this.searchText;
}
public void setSearchText(String searchText) {
this.searchText = searchText;
}
public Long getTime() {
return this.time;
}
public void setTime(Long time) {
this.time = time;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.mvp.contract.GetHttpMissionLogContract;
import com.zhang.okinglawenforcementphone.mvp.model.GetHttpMissionLogModel;
import java.io.File;
/**
* Created by Administrator on 2018/4/23/023.
*/
public class GetHttpMissionLogPresenter implements GetHttpMissionLogContract.Presenter {
private GetHttpMissionLogContract.View mView;
private GetHttpMissionLogContract.Model mModel;
public GetHttpMissionLogPresenter(GetHttpMissionLogContract.View view) {
mView = view;
mModel = new GetHttpMissionLogModel(this);
}
@Override
public void getHttpMissionLog(GreenMissionTask mission) {
mModel.getHttpMissionLog(mission);
}
@Override
public void loadHttpMissionLogSucc(GreenMissionLog greenMissionLog) {
mView.loadHttpMissionLogSucc(greenMissionLog);
}
@Override
public void loadEmpty(GreenMissionLog greenMissionLog) {
mView.loadEmpty(greenMissionLog);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/**
* Created by Administrator on 2018/1/18.
*/
public class ReceptionStaffBean {
private List<ALLUSERBean> ALLUSER;
private List<CBRBean> CBR;
private List<SZJCBean> SZJC;
@SerializedName("null")
private List<NullBean> _$Null65; // FIXME check this code
private List<SubCBRBean> subCBR;
private List<SubSZJCBean> subSZJC;
public List<ALLUSERBean> getALLUSER() {
return ALLUSER;
}
public void setALLUSER(List<ALLUSERBean> ALLUSER) {
this.ALLUSER = ALLUSER;
}
public List<CBRBean> getCBR() {
return CBR;
}
public void setCBR(List<CBRBean> CBR) {
this.CBR = CBR;
}
public List<SZJCBean> getSZJC() {
return SZJC;
}
public void setSZJC(List<SZJCBean> SZJC) {
this.SZJC = SZJC;
}
public List<NullBean> get_$Null65() {
return _$Null65;
}
public void set_$Null65(List<NullBean> _$Null65) {
this._$Null65 = _$Null65;
}
public List<SubCBRBean> getSubCBR() {
return subCBR;
}
public void setSubCBR(List<SubCBRBean> subCBR) {
this.subCBR = subCBR;
}
public List<SubSZJCBean> getSubSZJC() {
return subSZJC;
}
public void setSubSZJC(List<SubSZJCBean> subSZJC) {
this.subSZJC = subSZJC;
}
public static class ALLUSERBean {
/**
* DEPTID : 001001
* DEPTNAME : 广东省水利厅水利水政监察局
* REMARK : CBR
* USERID : d97863e57c7b4f8ba30262d932b36645
* USERNAME : 周 磊
* ZFZH : O116030
*/
private String DEPTID;
private String DEPTNAME;
private String REMARK;
private String USERID;
private String USERNAME;
private String ZFZH;
public String getDEPTID() {
return DEPTID;
}
public void setDEPTID(String DEPTID) {
this.DEPTID = DEPTID;
}
public String getDEPTNAME() {
return DEPTNAME;
}
public void setDEPTNAME(String DEPTNAME) {
this.DEPTNAME = DEPTNAME;
}
public String getREMARK() {
return REMARK;
}
public void setREMARK(String REMARK) {
this.REMARK = REMARK;
}
public String getUSERID() {
return USERID;
}
public void setUSERID(String USERID) {
this.USERID = USERID;
}
public String getUSERNAME() {
return USERNAME;
}
public void setUSERNAME(String USERNAME) {
this.USERNAME = USERNAME;
}
public String getZFZH() {
return ZFZH;
}
public void setZFZH(String ZFZH) {
this.ZFZH = ZFZH;
}
}
public static class CBRBean {
/**
* DEPTID : 001001
* DEPTNAME : 广东省水利厅水利水政监察局
* REMARK : CBR
* USERID : d97863e57c7b4f8ba30262d932b36645
* USERNAME : 周 磊
* ZFZH : O116030
*/
private String DEPTID;
private String DEPTNAME;
private String REMARK;
private String USERID;
private String USERNAME;
private String ZFZH;
public String getDEPTID() {
return DEPTID;
}
public void setDEPTID(String DEPTID) {
this.DEPTID = DEPTID;
}
public String getDEPTNAME() {
return DEPTNAME;
}
public void setDEPTNAME(String DEPTNAME) {
this.DEPTNAME = DEPTNAME;
}
public String getREMARK() {
return REMARK;
}
public void setREMARK(String REMARK) {
this.REMARK = REMARK;
}
public String getUSERID() {
return USERID;
}
public void setUSERID(String USERID) {
this.USERID = USERID;
}
public String getUSERNAME() {
return USERNAME;
}
public void setUSERNAME(String USERNAME) {
this.USERNAME = USERNAME;
}
public String getZFZH() {
return ZFZH;
}
public void setZFZH(String ZFZH) {
this.ZFZH = ZFZH;
}
}
public static class SZJCBean {
/**
* DEPTID : 001001
* DEPTNAME : 广东省水利厅水利水政监察局
* REMARK : SZJC
* USERID : 4b7d309839db41e18a3439623c4fdf1d
* USERNAME : 张立强
* ZFZH : O078035
*/
private String DEPTID;
private String DEPTNAME;
private String REMARK;
private String USERID;
private String USERNAME;
private String ZFZH;
public String getDEPTID() {
return DEPTID;
}
public void setDEPTID(String DEPTID) {
this.DEPTID = DEPTID;
}
public String getDEPTNAME() {
return DEPTNAME;
}
public void setDEPTNAME(String DEPTNAME) {
this.DEPTNAME = DEPTNAME;
}
public String getREMARK() {
return REMARK;
}
public void setREMARK(String REMARK) {
this.REMARK = REMARK;
}
public String getUSERID() {
return USERID;
}
public void setUSERID(String USERID) {
this.USERID = USERID;
}
public String getUSERNAME() {
return USERNAME;
}
public void setUSERNAME(String USERNAME) {
this.USERNAME = USERNAME;
}
public String getZFZH() {
return ZFZH;
}
public void setZFZH(String ZFZH) {
this.ZFZH = ZFZH;
}
}
public static class NullBean {
/**
* DEPTID : 001001
* DEPTNAME : 广东省水利厅水利水政监察局
* USERID : f0624a37baef40798a216654d3900ae6
* USERNAME : 王春海
* ZFZH : O110454
*/
private String DEPTID;
private String DEPTNAME;
private String USERID;
private String USERNAME;
private String ZFZH;
public String getDEPTID() {
return DEPTID;
}
public void setDEPTID(String DEPTID) {
this.DEPTID = DEPTID;
}
public String getDEPTNAME() {
return DEPTNAME;
}
public void setDEPTNAME(String DEPTNAME) {
this.DEPTNAME = DEPTNAME;
}
public String getUSERID() {
return USERID;
}
public void setUSERID(String USERID) {
this.USERID = USERID;
}
public String getUSERNAME() {
return USERNAME;
}
public void setUSERNAME(String USERNAME) {
this.USERNAME = USERNAME;
}
public String getZFZH() {
return ZFZH;
}
public void setZFZH(String ZFZH) {
this.ZFZH = ZFZH;
}
}
public static class SubCBRBean {
/**
* DEPTID : 001001029001
* DEPTNAME : 研发中心测试大队
* REMARK : CBR
* USERID : 8aac8d5f5<KEY>
* USERNAME : DEV队长
* ZFZH : dev12345
*/
private String DEPTID;
private String DEPTNAME;
private String REMARK;
private String USERID;
private String USERNAME;
private String ZFZH;
public String getDEPTID() {
return DEPTID;
}
public void setDEPTID(String DEPTID) {
this.DEPTID = DEPTID;
}
public String getDEPTNAME() {
return DEPTNAME;
}
public void setDEPTNAME(String DEPTNAME) {
this.DEPTNAME = DEPTNAME;
}
public String getREMARK() {
return REMARK;
}
public void setREMARK(String REMARK) {
this.REMARK = REMARK;
}
public String getUSERID() {
return USERID;
}
public void setUSERID(String USERID) {
this.USERID = USERID;
}
public String getUSERNAME() {
return USERNAME;
}
public void setUSERNAME(String USERNAME) {
this.USERNAME = USERNAME;
}
public String getZFZH() {
return ZFZH;
}
public void setZFZH(String ZFZH) {
this.ZFZH = ZFZH;
}
}
public static class SubSZJCBean {
/**
* DEPTID : 001001029001
* DEPTNAME : 研发中心测试大队
* REMARK : SZJC
* USERID : 8aac8d5f586ac00<KEY>bb7050d
* USERNAME : DEV队长
* ZFZH : dev12345
*/
private String DEPTID;
private String DEPTNAME;
private String REMARK;
private String USERID;
private String USERNAME;
private String ZFZH;
public String getDEPTID() {
return DEPTID;
}
public void setDEPTID(String DEPTID) {
this.DEPTID = DEPTID;
}
public String getDEPTNAME() {
return DEPTNAME;
}
public void setDEPTNAME(String DEPTNAME) {
this.DEPTNAME = DEPTNAME;
}
public String getREMARK() {
return REMARK;
}
public void setREMARK(String REMARK) {
this.REMARK = REMARK;
}
public String getUSERID() {
return USERID;
}
public void setUSERID(String USERID) {
this.USERID = USERID;
}
public String getUSERNAME() {
return USERNAME;
}
public void setUSERNAME(String USERNAME) {
this.USERNAME = USERNAME;
}
public String getZFZH() {
return ZFZH;
}
public void setZFZH(String ZFZH) {
this.ZFZH = ZFZH;
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.github.mikephil.charting.animation.EasingFunction;
import com.github.mikephil.charting.charts.PieChart;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.PieData;
import com.github.mikephil.charting.data.PieDataSet;
import com.github.mikephil.charting.data.PieEntry;
import com.github.mikephil.charting.formatter.PercentFormatter;
import com.github.mikephil.charting.highlight.Highlight;
import com.github.mikephil.charting.listener.OnChartValueSelectedListener;
import com.github.mikephil.charting.utils.ColorTemplate;
import com.github.sundeepk.compactcalendarview.CompactCalendarView;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.StatisRcyAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTaskDao;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.utils.DialogUtil;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
/**
* 统计查询
*/
public class StatisticalActivity extends BaseActivity {
@BindView(R.id.tv_title)
TextView mTvTitle;
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.calendar_title)
TextView calendarTitle;
@BindView(R.id.materialcalendarview2)
CompactCalendarView materialcalendarview;
@BindView(R.id.chart)
PieChart mChart;
private Unbinder mBind;
private SimpleDateFormat mDateFormat = new SimpleDateFormat("yyyy年MM月");
private SimpleDateFormat mDateFormatForDay = new SimpleDateFormat("yyyy年MM月dd日");
private String mSelectMonth;
private List<GreenMissionTask> mGreenMissionTasks;
private ArrayList<GreenMissionTask>mCompleteMissionTask = new ArrayList<>();
private ArrayList<GreenMissionTask>mUnfinishedMissionTask = new ArrayList<>();
private String mSelectDate;
private DialogUtil mDialogUtil;
private StatisRcyAdapter mStatisRcyAdapter;
private View mStatisView;
private Intent intent;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_statistical);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
materialcalendarview.setListener(new CompactCalendarView.CompactCalendarViewListener() {
@Override
public void onDayClick(Date dateClicked) {
mSelectDate = mDateFormatForDay.format(dateClicked);
mChart.setCenterText(mSelectDate);
getMissionTaskPie(-1);
}
@Override
public void onMonthScroll(Date firstDayOfNewMonth) {
mSelectMonth = mDateFormat.format(firstDayOfNewMonth);
calendarTitle.setText(mSelectMonth);
mChart.setCenterText(mSelectMonth);
getMissionTaskPie(0);
}
});
mChart.setOnChartValueSelectedListener(new OnChartValueSelectedListener() {
@Override
public void onValueSelected(Entry e, Highlight h) {
List<GreenMissionTask> greenMissionTasks = (List<GreenMissionTask>) e.getData();
if (mDialogUtil ==null){
mDialogUtil = new DialogUtil();
mStatisView = View.inflate(BaseApplication.getApplictaion(), R.layout.statistical_dialog, null);
RecyclerView rcyStatis = mStatisView.findViewById(R.id.rcy_statis);
rcyStatis.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
rcyStatis.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 10, getResources().getColor(R.color.activity_bg)));
mStatisRcyAdapter = new StatisRcyAdapter(R.layout.statistical_dialog_item,greenMissionTasks);
mStatisRcyAdapter.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
rcyStatis.setAdapter(mStatisRcyAdapter);
mStatisRcyAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
List<GreenMissionTask> data = adapter.getData();
GreenMissionTask greenMissionTask = data.get(position);
switch (greenMissionTask.getStatus()){
case "0":
case "1":
case "2":
intent = new Intent(StatisticalActivity.this, ArrangeTeamMembersActivity.class);
intent.putExtra("id", greenMissionTask.getId());
intent.putExtra("position", position);
startActivity(intent);
break;
case "3":
case "4":
case "100":
intent = new Intent(StatisticalActivity.this, MissionActivity.class);
intent.putExtra("id", greenMissionTask.getId());
intent.putExtra("position", position);
startActivity(intent);
break;
case "5":
intent = new Intent(StatisticalActivity.this, MissionRecorActivity.class);
intent.putExtra("id", greenMissionTask.getId());
intent.putExtra("taskId", greenMissionTask.getTaskid());
startActivity(intent);
break;
case "9":
intent = new Intent(StatisticalActivity.this, MissionActivity.class);
intent.putExtra("id", greenMissionTask.getId());
intent.putExtra("position", position);
startActivity(intent);
break;
default:
break;
}
}
});
}else {
mStatisRcyAdapter.setNewData(greenMissionTasks);
}
mDialogUtil.showBottomDialog(StatisticalActivity.this,mStatisView,400f);
}
@Override
public void onNothingSelected() {
}
});
}
private void initData() {
materialcalendarview.setFirstDayOfWeek(Calendar.SUNDAY);
materialcalendarview.setLocale(TimeZone.getDefault(), Locale.getDefault());
materialcalendarview.setUseThreeLetterAbbreviation(true);
Date date = new Date();
materialcalendarview.setCurrentDate(date);
mSelectMonth = mDateFormat.format(date);
calendarTitle.setText(mSelectMonth);
mChart.setUsePercentValues(true);
mChart.getDescription().setEnabled(false);
mChart.setCenterTextSize(16f);
mChart.setCenterText(mSelectMonth);
mChart.setDrawHoleEnabled(true);
mChart.setHoleColor(Color.WHITE);
moveOffScreen();
mChart.setTransparentCircleColor(Color.WHITE);
mChart.setTransparentCircleAlpha(110);
mChart.setHoleRadius(58f);
mChart.setTransparentCircleRadius(61f);
mChart.setDrawCenterText(true);
mChart.setRotationEnabled(false);
mChart.setHighlightPerTapEnabled(true);
mChart.setMaxAngle(180f); // HALF CHART
mChart.setRotationAngle(180f);
mChart.setCenterTextOffset(0, -20);
mGreenMissionTasks = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().queryBuilder()
.where(GreenMissionTaskDao.Properties.Userid.eq(OkingContract.CURRENTUSER.getUserid())).list();
getMissionTaskPie(0);
mChart.animateY(1400, EaseInOutQuad);
Legend l = mChart.getLegend();
l.setVerticalAlignment(Legend.LegendVerticalAlignment.TOP);
l.setHorizontalAlignment(Legend.LegendHorizontalAlignment.CENTER);
l.setOrientation(Legend.LegendOrientation.HORIZONTAL);
l.setDrawInside(false);
l.setXEntrySpace(7f);
l.setYEntrySpace(0f);
l.setYOffset(0f);
// entry label styling
mChart.setEntryLabelColor(Color.WHITE);
mChart.setEntryLabelTextSize(12f);
}
private void getMissionTaskPie(int type) {
mCompleteMissionTask.clear();
mUnfinishedMissionTask.clear();
for (GreenMissionTask greenMissionTask : mGreenMissionTasks) {
Log.i("GG",greenMissionTask.getEnd_time()+"");
Long end_time = greenMissionTask.getEnd_time();
if (type==0){
String yearMonth = mDateFormat.format(end_time);
if (mSelectMonth.equals(yearMonth)) {
if (greenMissionTask.getStatus().equals("5") || greenMissionTask.getStatus().equals("100")) {
mCompleteMissionTask.add(greenMissionTask);
} else {
mUnfinishedMissionTask.add(greenMissionTask);
}
}
}else {
String yearDay = mDateFormatForDay.format(end_time);
if (mSelectDate.equals(yearDay)) {
if (greenMissionTask.getStatus().equals("5") || greenMissionTask.getStatus().equals("100")) {
mCompleteMissionTask.add(greenMissionTask);
} else {
mUnfinishedMissionTask.add(greenMissionTask);
}
}
}
}
setData(mCompleteMissionTask, mUnfinishedMissionTask);
}
@Override
protected void onDestroy() {
super.onDestroy();
mDateFormat = null;
mBind.unbind();
}
private void setData(ArrayList<GreenMissionTask> completeMissionTask, ArrayList<GreenMissionTask> unfinishedMissionTask) {
ArrayList<PieEntry> values = new ArrayList<PieEntry>();
PieEntry completePieEntry = new PieEntry(completeMissionTask.size(), "完成" + completeMissionTask.size());
completePieEntry.setData(completeMissionTask);
values.add(completePieEntry);
PieEntry unfinishedPieEntry = new PieEntry(unfinishedMissionTask.size(), "未完成" + unfinishedMissionTask.size());
unfinishedPieEntry.setData(unfinishedMissionTask);
values.add(unfinishedPieEntry);
PieDataSet dataSet = new PieDataSet(values, "");
dataSet.setSliceSpace(3f);
dataSet.setSelectionShift(5f);
dataSet.setColors(ColorTemplate.MATERIAL_COLORS);
//dataSet.setSelectionShift(0f);
PieData data = new PieData(dataSet);
data.setValueFormatter(new PercentFormatter());
data.setValueTextSize(11f);
data.setValueTextColor(Color.WHITE);
mChart.setData(data);
mChart.invalidate();
}
public static final EasingFunction EaseInOutQuad = new EasingFunction() {
public float getInterpolation(float input) {
input *= 2f;
if (input < 1f) {
return 0.5f * input * input;
}
return -0.5f * ((--input) * (input - 2f) - 1f);
}
};
private void moveOffScreen() {
Display display = getWindowManager().getDefaultDisplay();
int height = display.getHeight(); // deprecated
int offset = (int) (height * 0.30); /* percent to move */
RelativeLayout.LayoutParams rlParams =
(RelativeLayout.LayoutParams) mChart.getLayoutParams();
rlParams.setMargins(0, 0, 0, -offset);
mChart.setLayoutParams(rlParams);
}
}
<file_sep>package com.zhang.baselib.utils;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
/**
* Created by Administrator on 2018/4/18.
*/
public class NetUtil {
/**
* 判断网络是否连接
* 需添加权限
*
* @code <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
*/
public static boolean isConnected(Context context) {
NetworkInfo info = getActiveNetworkInfo(context);
return info != null && info.isConnected();
}
/**
* 获取活动网络信息
*
* @param context 上下文
* @return NetworkInfo
*/
private static NetworkInfo getActiveNetworkInfo(Context context) {
ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
return cm.getActiveNetworkInfo();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.content.Intent;
import android.net.Uri;
import android.support.v4.app.Fragment;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.GlideApp;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceMedia;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.PlayVideoOnlineActivity;
import java.util.ArrayList;
/**
* Created by zhao on 2016/10/9.
*/
public class CaseSimpleAdapter extends BaseAdapter {
private ArrayList<GreenEvidenceMedia> greenMedias;
private OnClickListener onClickListener;
private Fragment f;
private boolean canAdd;
private String typeName;
public CaseSimpleAdapter(ArrayList<GreenEvidenceMedia> greenMedias, Fragment f, boolean canAdd, String typeName) {
this.greenMedias = greenMedias;
this.f = f;
this.canAdd = canAdd;
this.typeName = typeName;
}
public void setOnClickListener(OnClickListener onClickListener) {
this.onClickListener = onClickListener;
}
@Override
public int getCount() {
return greenMedias.size()+1;
}
@Override
public Object getItem(int i) {
return null;
}
@Override
public long getItemId(int i) {
return 0;
}
@Override
public View getView(final int position, View convertView, ViewGroup viewGroup) {
ViewHolder viewHolder;
if (convertView == null) {
viewHolder = new ViewHolder();
convertView = View.inflate(BaseApplication.getApplictaion(), R.layout.pic_item,null);
viewHolder.iv_pic = convertView.findViewById(R.id.sdv);
viewHolder.tv = (TextView) convertView.findViewById(R.id.tv);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
if (position == 0) {
viewHolder.tv.setVisibility(View.GONE);
GlideApp.with(f)
.load(R.drawable.video)
.into(viewHolder.iv_pic);
if (canAdd) {
viewHolder.iv_pic .setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (onClickListener != null) {
onClickListener.onAddVideo();
}
}
});
}
} else {
viewHolder.tv.setVisibility(View.VISIBLE);
final Uri uri = Uri.parse(greenMedias.get(position - 1).getPath());
GlideApp.with(f)
.load(uri)
.into(viewHolder.iv_pic);
viewHolder.iv_pic .setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
if (onClickListener != null && canAdd) {
onClickListener.onLongItemClick(CaseSimpleAdapter.this, greenMedias, position - 1);
}
// bitmapCache.remove(uri);
return false;
}
});
viewHolder.iv_pic .setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(f.getActivity(), PlayVideoOnlineActivity.class);
intent.putExtra("path",uri.getPath());
f.startActivity(intent);
}
});
String path = uri.getPath();
String s = path.substring(path.lastIndexOf("/") + 1, path.length());
viewHolder.tv.setText(s.split("_")[0]+typeName);
}
return convertView;
}
public interface OnClickListener {
void onAddVideo();
void onLongItemClick(CaseSimpleAdapter adapter, ArrayList<GreenEvidenceMedia> data, int position);
}
class ViewHolder{
ImageView iv_pic;
TextView tv;
}
}
<file_sep>apply plugin: 'com.android.library'
android {
compileSdkVersion 27
defaultConfig {
javaCompileOptions {
annotationProcessorOptions {
arguments = [moduleName: project.getName()]
}
}
minSdkVersion 17
targetSdkVersion 27
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
implementation 'com.android.support:appcompat-v7:27.1.1'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
compile 'io.reactivex.rxjava2:rxjava:2.1.1'
compile 'io.reactivex.rxjava2:rxandroid:2.0.1'
compile 'com.squareup.retrofit2:retrofit:2.3.0'
compile 'com.squareup.retrofit2:converter-gson:2.3.0'
compile 'com.squareup.retrofit2:adapter-rxjava2:2.3.0'
compile 'com.github.CymChad:BaseRecyclerViewAdapterHelper:2.9.30'
compile 'com.google.code.gson:gson:2.8.0'
compile 'org.greenrobot:greendao:3.2.2'
compile 'com.github.bumptech.glide:glide:4.7.1'
annotationProcessor 'com.github.bumptech.glide:compiler:4.7.1'
//加密
compile 'net.zetetic:android-database-sqlcipher:3.5.6'
compile files('libs/commons-lang3-3.0-beta.jar')
compile 'com.apkfuns.logutils:library:1.0.6'
compile 'org.greenrobot:eventbus:3.1.1'
compile 'com.jaeger.statusbarutil:library:1.5.1'
}<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.app.Activity;
import android.content.ComponentName;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Environment;
import android.support.design.widget.TextInputEditText;
import android.text.TextUtils;
import android.view.View;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.RadioButton;
import android.widget.TextView;
import android.widget.Toast;
import com.baidu.ocr.sdk.model.IDCardResult;
import com.chad.library.adapter.base.BaseMultiItemQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.chad.library.adapter.base.entity.MultiItemEntity;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.AppUtil;
import com.zhang.baselib.utils.FileUtil;
import com.zhang.baselib.utils.TextUtil;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.WrittenItemBean;
import com.zhang.okinglawenforcementphone.beans.WrittenRecordLevel0;
import java.io.File;
import java.util.Calendar;
import java.util.List;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
/**
* Created by Administrator on 2018/5/11/011.
*/
public class ExpandableStopIllegalAdapter extends BaseMultiItemQuickAdapter<MultiItemEntity, BaseViewHolder> {
public static final int TYPE_LEVEL_0 = 0;
public static final int TYPE_LEVEL_1 = 1;
public static final int TYPE_LEVEL_2 = 2;
public static final int TYPE_LEVEL_3 = 3;
private RadioButton mRb_natural_person;
private TextInputEditText mTet_parties_concerned_natural;
private TextInputEditText mTet_sex_natural;
private TextInputEditText mTet_card_natural;
private TextInputEditText mTet_phone_natural;
private TextInputEditText mTet_address_natural;
private TextInputEditText mTet_parties_concerned_other;
private TextInputEditText mTet_position_other;
private TextInputEditText mTet_phone_other;
private TextInputEditText mTet_representative_other;
private TextInputEditText mTet_credit_code_oher;
private TextInputEditText mTet_address_other;
private TextView mBt_idcard;
private EditText mEt_illegal_facts;
private EditText mEt_legal_provisions1;
private EditText mEt_legal_provisions2;
private TextInputEditText mTet_contact;
private TextInputEditText mTet_phone;
private TextInputEditText mTet_addr;
private Button mBt_prin;
private RxDialogLoading mRxDialogLoading;
private Activity mActivity;
private String mNaturalInfo;
private int mMYear;
private int mMMonth;
private int mMDay;
private TextView mTv_time;
public ExpandableStopIllegalAdapter(Activity activity, List<MultiItemEntity> data) {
super(data);
this.mActivity = activity;
addItemType(TYPE_LEVEL_0, R.layout.activity_mission_recor_level0);
addItemType(TYPE_LEVEL_1, R.layout.stop_thellegal_activities_info1);
addItemType(TYPE_LEVEL_2, R.layout.stop_thellegal_activities_info2);
addItemType(TYPE_LEVEL_3, R.layout.stop_thellegal_activities_info3);
Calendar c = Calendar.getInstance();//
// 获取当前年份
mMYear = c.get(Calendar.YEAR);
// 获取当前月份
mMMonth = c.get(Calendar.MONTH) + 1;
// 获取当日期
mMDay = c.get(Calendar.DAY_OF_MONTH);
}
@Override
protected void convert(final BaseViewHolder helper, MultiItemEntity item) {
switch (helper.getItemViewType()) {
case TYPE_LEVEL_0:
final WrittenRecordLevel0 lv0 = (WrittenRecordLevel0) item;
helper.setText(R.id.title, lv0.subTitle);
helper.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
int adapterPosition = helper.getAdapterPosition();
List<WrittenItemBean> subItems = lv0.getSubItems();
int subItemType = subItems.get(0).getItemType();
if (lv0.isExpanded()) {
collapse(adapterPosition);
} else {
expand(adapterPosition);
}
}
});
break;
case TYPE_LEVEL_1:
if (mRb_natural_person == null) {
mRb_natural_person = helper.getView(R.id.rb_natural_person);
mRb_natural_person.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
if (mRb_natural_person.isChecked()) {
mTet_parties_concerned_natural.setEnabled(true);
mTet_parties_concerned_other.setEnabled(false);
mTet_parties_concerned_other.setText("");
mTet_sex_natural.setEnabled(true);
mTet_card_natural.setEnabled(true);
mTet_phone_natural.setEnabled(true);
mTet_phone_other.setEnabled(false);
mTet_phone_other.setText("");
mTet_address_natural.setEnabled(true);
mTet_address_other.setEnabled(false);
mTet_address_other.setText("");
mTet_position_other.setEnabled(false);
mTet_position_other.setText("");
mTet_representative_other.setEnabled(false);
mTet_representative_other.setText("");
mTet_credit_code_oher.setEnabled(false);
mTet_credit_code_oher.setText("");
mBt_idcard.setEnabled(true);
} else {
mTet_parties_concerned_natural.setEnabled(false);
mTet_parties_concerned_other.setEnabled(true);
mTet_sex_natural.setEnabled(false);
mTet_card_natural.setEnabled(false);
mTet_phone_natural.setEnabled(false);
mTet_phone_other.setEnabled(true);
mTet_address_natural.setEnabled(false);
mTet_address_other.setEnabled(true);
mTet_position_other.setEnabled(true);
mTet_representative_other.setEnabled(true);
mTet_credit_code_oher.setEnabled(true);
mBt_idcard.setEnabled(false);
}
}
});
}
if (mTet_parties_concerned_natural == null) {
mTet_parties_concerned_natural = helper.getView(R.id.tet_parties_concerned_natural);
}
if (mTet_sex_natural == null) {
mTet_sex_natural = helper.getView(R.id.tet_sex_natural);
}
if (mTet_card_natural == null) {
mTet_card_natural = helper.getView(R.id.tet_card_natural);
}
if (mTet_phone_natural == null) {
mTet_phone_natural = helper.getView(R.id.tet_phone_natural);
}
if (mTet_address_natural == null) {
mTet_address_natural = helper.getView(R.id.tet_address_natural);
TextUtil.setEditTextInhibitInputSpace(mTet_address_natural);
TextUtil.setEditTextInhibitInputSpeChat(mTet_address_natural);
}
if (mTet_parties_concerned_other == null) {
mTet_parties_concerned_other = helper.getView(R.id.tet_parties_concerned_other);
}
if (mTet_position_other == null) {
mTet_position_other = helper.getView(R.id.tet_position_other);
}
if (mTet_phone_other == null) {
mTet_phone_other = helper.getView(R.id.tet_phone_other);
}
if (mTet_representative_other == null) {
mTet_representative_other = helper.getView(R.id.tet_representative_other);
TextUtil.setEditTextInhibitInputSpace(mTet_representative_other);
TextUtil.setEditTextInhibitInputSpeChat(mTet_representative_other);
}
if (mTet_credit_code_oher == null) {
mTet_credit_code_oher = helper.getView(R.id.tet_credit_code_oher);
}
if (mTet_address_other == null) {
mTet_address_other = helper.getView(R.id.tet_address_other);
TextUtil.setEditTextInhibitInputSpace(mTet_address_other);
TextUtil.setEditTextInhibitInputSpeChat(mTet_address_other);
}
if (mBt_idcard == null) {
mBt_idcard = helper.getView(R.id.bt_idcard);
helper.addOnClickListener(R.id.bt_idcard);
}
break;
case TYPE_LEVEL_2:
if (mEt_illegal_facts == null) {
mEt_illegal_facts = helper.getView(R.id.et_illegal_facts);
}
if (mEt_legal_provisions1 == null) {
mEt_legal_provisions1 = helper.getView(R.id.et_legal_provisions);
}
if (mEt_legal_provisions2 == null) {
mEt_legal_provisions2 = helper.getView(R.id.et_legal_provisions2);
}
break;
case TYPE_LEVEL_3:
if (mTet_contact == null) {
mTet_contact = helper.getView(R.id.tet_contact);
TextUtil.setEditTextInhibitInputSpace(mTet_contact);
TextUtil.setEditTextInhibitInputSpeChat(mTet_contact);
}
if (mTet_phone == null) {
mTet_phone = helper.getView(R.id.tet_phone);
}
if (mTet_addr == null) {
mTet_addr = helper.getView(R.id.tet_addr);
TextUtil.setEditTextInhibitInputSpace(mTet_addr);
TextUtil.setEditTextInhibitInputSpeChat(mTet_addr);
}
if (mBt_prin == null) {
mBt_prin = helper.getView(R.id.bt_prin);
helper.addOnClickListener(R.id.bt_prin);
}
if (mTv_time == null) {
mTv_time = helper.getView(R.id.tv_time);
mTv_time.setText(mMYear + "年" + mMMonth + "月" + mMDay + "日");
}
break;
default:
break;
}
}
public void setOCRData(IDCardResult result) {
mTet_sex_natural.setText(result.getGender().toString());
mTet_parties_concerned_natural.setText(result.getName().toString());
mTet_card_natural.setText(result.getIdNumber().toString());
mTet_address_natural.setText(result.getAddress().toString());
}
public void print() {
boolean installApp = AppUtil.isInstallApp(BaseApplication.getApplictaion(), "com.dynamixsoftware.printershare");
if (installApp) {
if (mRb_natural_person.isChecked()) { //当事人为自然人
final String partiesConcernedNatural = mTet_parties_concerned_natural.getText().toString().trim();
final String sexNatural = mTet_sex_natural.getText().toString().trim();
final String tetCardNatural = mTet_card_natural.getText().toString().trim();
final String tetPhoneNatural = mTet_phone_natural.getText().toString().trim();
final String tetAddressNatural = mTet_address_natural.getText().toString().trim();
if (TextUtils.isEmpty(partiesConcernedNatural) && TextUtils.isEmpty(sexNatural)
&& TextUtils.isEmpty(tetCardNatural) && TextUtils.isEmpty(tetPhoneNatural)
&& TextUtils.isEmpty(tetAddressNatural)) {
RxToast.warning(BaseApplication.getApplictaion(), "填入信息不能有空", Toast.LENGTH_SHORT).show();
return;
}
mNaturalInfo = "<p>当事人:<u> " + partiesConcernedNatural + " </u> 性别: <u> " + sexNatural + " </u> 身份证号:\n" +
" <u> " + tetCardNatural + " </u> 电话:\n" +
" <u> " + tetPhoneNatural + " </u> 住址:\n" +
" <u> " + tetAddressNatural + " </u>\n" +
"</p>";
//---------------------------------------------------
} else {
String partiesConcernedOther = mTet_parties_concerned_other.getText().toString().trim();
String positionOther = mTet_position_other.getText().toString().trim();
String phoneOther = mTet_phone_other.getText().toString().trim();
String representativeOther = mTet_representative_other.getText().toString().trim();
String creditCodeOher = mTet_credit_code_oher.getText().toString().trim();
String addressOther = mTet_address_other.getText().toString().trim();
if (TextUtils.isEmpty(partiesConcernedOther) && TextUtils.isEmpty(positionOther)
&& TextUtils.isEmpty(phoneOther) && TextUtils.isEmpty(representativeOther)
&& TextUtils.isEmpty(creditCodeOher) && TextUtils.isEmpty(addressOther)) {
RxToast.warning(BaseApplication.getApplictaion(), "填入信息不能有空", Toast.LENGTH_SHORT).show();
return;
}
mNaturalInfo = "<p>当事人:<u> " + partiesConcernedOther + " </u> 职务: <u> " + positionOther + " </u> 电话:\n" +
" <u> " + phoneOther + " </u> 法定代表人:\n" +
" <u> " + representativeOther + " </u> 统一社会信用代码:\n" +
" <u> " + creditCodeOher + " </u>\n" + " 住所:<u> " + addressOther + " </u>" +
"</p>";
}
if (mEt_illegal_facts == null || TextUtils.isEmpty(mEt_illegal_facts.getText().toString().trim())) {
RxToast.warning("违法事实不能为空");
return;
}
if (mEt_legal_provisions1 == null || TextUtils.isEmpty(mEt_legal_provisions1.getText().toString().trim())) {
RxToast.warning("法律条款不能为空");
return;
}
if (mEt_legal_provisions2 == null || TextUtils.isEmpty(mEt_legal_provisions2.getText().toString().trim())) {
RxToast.warning("法律条款不能为空");
return;
}
if (mTet_contact == null || TextUtils.isEmpty(mTet_contact.getText().toString().trim())) {
RxToast.warning("联系人不能为空");
return;
}
if (mTet_phone == null || TextUtils.isEmpty(mTet_phone.getText().toString().trim())) {
RxToast.warning("联系电话不能为空");
return;
}
if (mTet_addr == null || TextUtils.isEmpty(mTet_addr.getText().toString().trim())) {
RxToast.warning("联系地址不能为空");
return;
}
final String illegalFacts = mEt_illegal_facts.getText().toString().trim();
final String legalProvisions1 = mEt_legal_provisions1.getText().toString().trim();
final String legalProvisions2 = mEt_legal_provisions2.getText().toString().trim();
final String contact = mTet_contact.getText().toString().trim();
final String phone = mTet_phone.getText().toString().trim();
final String addr = mTet_addr.getText().toString().trim();
//写文件html
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
writeHtml(illegalFacts, legalProvisions1, legalProvisions2, contact, phone, addr);
}
});
ComponentName comp = new ComponentName("com.dynamixsoftware.printershare", "com.dynamixsoftware.printershare.ActivityWeb");
Intent intent = new Intent();
intent.setComponent(comp);
intent.setAction("android.intent.action.VIEW");
intent.setType("text/html");
intent.setData(Uri.parse("file:///" + Environment.getExternalStorageDirectory().getPath() + "/oking/print/temp1.html"));
mActivity.startActivity(intent);
} else {
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(mActivity, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
mRxDialogLoading.setLoadingText("正在解压插件...");
}
mRxDialogLoading.show();
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
final File assetFileToCacheDir = FileUtil.getAssetFileToCacheDir("PrinterShare.apk");
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
mRxDialogLoading.cancel();
AppUtil.installAPK(BaseApplication.getApplictaion(), assetFileToCacheDir.getPath());
}
});
}
});
}
}
private void writeHtml(String illegalFacts, String legalProvisions1, String legalProvisions2, String contact, String phone, String addr) {
File destDir = new File(Environment.getExternalStorageDirectory().getPath() + "/oking/print/temp1.html");
StringBuffer sb = new StringBuffer();
sb.append("<!DOCTYPE HTML>\n");
sb.append("<html>\n");
sb.append("<head>\n");
sb.append(" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n");
sb.append(" <style>\n");
sb.append(" </style>\n");
sb.append("</head>\n");
sb.append("<body>\n");
sb.append("<h1 align=\"center\">水行政责令停止违法行为通知书</h1>\n");
sb.append("<p align=\"right\">x水当罚字[ ]第 号</p>\n");
sb.append(mNaturalInfo);
sb.append("<p> 据初步调查,你(单位) <u> " + illegalFacts + " </u> 涉嫌违反了 <u> " + legalProvisions1 + " </u> \n");
sb.append(" 的规定,现根据:<u> " + legalProvisions2 + " </u>的规定,责令你(单位)立即停止违法行为,听后处理。</p>\n");
sb.append("<p align=\"right\">" + mMYear + "年" + mMMonth + "月" + mMDay + "日 </p>\n");
sb.append("<p> 联系人:<u> " + contact + " </u> 联系电话: <u> " + phone + " </u> 联系地址: <u> " + addr + " </u></p>\n");
sb.append("</body>\n");
sb.append("</html>");
FileUtil.writeFileFromString(destDir, sb.toString(), false);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.os.Bundle;
import android.support.design.widget.TextInputEditText;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class ApprovalTaskInfoFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.tv_taskname)
TextInputEditText mTvTaskname;
@BindView(R.id.tv_source)
TextInputEditText mTvSource;
@BindView(R.id.publisher_tv)
TextInputEditText mPublisherTv;
@BindView(R.id.tv_approver)
TextInputEditText mTvApprover;
@BindView(R.id.tv_recipient)
TextInputEditText mTvRecipient;
@BindView(R.id.tv_emergency)
TextInputEditText mTvEmergency;
@BindView(R.id.tv_begintime)
TextInputEditText mTvBegintime;
@BindView(R.id.tv_endtime)
TextInputEditText mTvEndtime;
@BindView(R.id.list_item_missionDetail)
TextInputEditText mListItemMissionDetail;
@BindView(R.id.tv_description)
TextInputEditText mTvDescription;
Unbinder unbinder;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private View mInflate;
private GreenMissionTask mGreenMissionTask;
private DateFormat mDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
public ApprovalTaskInfoFragment() {
// Required empty public constructor
}
public static ApprovalTaskInfoFragment newInstance(String param1, String param2) {
ApprovalTaskInfoFragment fragment = new ApprovalTaskInfoFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_approval_task_info, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
return mInflate;
}
private void initData() {
mTvTaskname.setText(mGreenMissionTask.getTask_name());
if ("0".equals(mGreenMissionTask.getRwly())) {
mTvSource.setText("上级交办");
} else if ("1".equals(mGreenMissionTask.getRwly())) {
mTvSource.setText("部门移送");
} else if ("2".equals(mGreenMissionTask.getRwly())) {
mTvSource.setText("系统报警");
} else if ("3".equals(mGreenMissionTask.getRwly())) {
mTvSource.setText("日常巡查");
} else if ("4".equals(mGreenMissionTask.getRwly())) {
mTvSource.setText("媒体披露");
} else if ("5".equals(mGreenMissionTask.getRwly())) {
mTvSource.setText("群众举报");
}
mPublisherTv.setText(mGreenMissionTask.getPublisher_name());
mTvApprover.setText(mGreenMissionTask.getApproved_person_name());
mTvRecipient.setText(mGreenMissionTask.getFbr());
if ("0".equals(mGreenMissionTask.getJjcd())) {
mTvEmergency.setText("特急");
} else if ("紧急".equals(mGreenMissionTask.getJjcd())) {
mTvEmergency.setText("紧急");
} else if ("一般".equals(mGreenMissionTask.getJjcd())) {
mTvEmergency.setText("一般");
} else {
mTvEmergency.setText("紧急");
}
mTvBegintime.setText(mDateFormat.format(mGreenMissionTask.getBegin_time()));
mTvEndtime.setText(mDateFormat.format(mGreenMissionTask.getEnd_time()));
mListItemMissionDetail.setText(mGreenMissionTask.getTask_area());
mTvDescription.setText(mGreenMissionTask.getTask_content());
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
public void setMissionTask(GreenMissionTask missionTask) {
mGreenMissionTask = missionTask;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.RectF;
import android.location.Location;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.RadioButton;
import com.amap.api.maps.AMap;
import com.amap.api.maps.CameraUpdate;
import com.amap.api.maps.CameraUpdateFactory;
import com.amap.api.maps.MapView;
import com.amap.api.maps.Projection;
import com.amap.api.maps.UiSettings;
import com.amap.api.maps.model.BitmapDescriptorFactory;
import com.amap.api.maps.model.CameraPosition;
import com.amap.api.maps.model.Circle;
import com.amap.api.maps.model.LatLng;
import com.amap.api.maps.model.MyLocationStyle;
import com.amap.api.maps.model.Polygon;
import com.amap.api.maps.model.PolygonOptions;
import com.google.gson.reflect.TypeToken;
import com.yinghe.whiteboardlib.bean.StrokeRecord;
import com.yinghe.whiteboardlib.view.SketchView;
import com.zhang.baselib.ui.views.RxDialogSureCancel;
import com.zhang.baselib.utils.DataUtil;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.LatLngListOV;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.views.MapDraw;
import org.greenrobot.eventbus.EventBus;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class MapByPointActivity extends BaseActivity implements AMap.OnMyLocationChangeListener, CompoundButton.OnCheckedChangeListener {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.map)
MapView mMap;
@BindView(R.id.rb_circular)
RadioButton mRbCircular;
@BindView(R.id.rb_polygon)
RadioButton mRbPolygon;
@BindView(R.id.rb_move_map)
RadioButton mRbMoveMap;
@BindView(R.id.md)
MapDraw mMd;
private Unbinder mBind;
private AMap mAMap;
private UiSettings mUiSettings;
private LatLng mLng;
private int mDrawType;
private StrokeRecord mStrokeRecord;
private RxDialogSureCancel mRxDialogSureCancel;
private String mMcoordinateJson;
private int mDrawLaLoType;
private LatLng mCenterPoint;
private float mZoom;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_map_by_point);
mBind = ButterKnife.bind(this);
mMap.onCreate(savedInstanceState);// 此方法必须重写
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
mRbCircular.setOnCheckedChangeListener(this);
mRbPolygon.setOnCheckedChangeListener(this);
mRbMoveMap.setOnCheckedChangeListener(this);
}
private void initData() {
Intent intent = getIntent();
mMcoordinateJson = intent.getStringExtra("mcoordinateJson");
mDrawLaLoType = intent.getIntExtra("drawLaLoType", 4);
float left = intent.getFloatExtra("left", 0);
float right = intent.getFloatExtra("right", 0);
float top = intent.getFloatExtra("top", 0);
float bottom = intent.getFloatExtra("bottom", 0);
mZoom = intent.getFloatExtra("zoom", 0);
mCenterPoint = (LatLng) intent.getParcelableExtra("centerPoint");
initMap();
mStrokeRecord = new StrokeRecord();
mStrokeRecord.setType(mDrawLaLoType);
Paint strokePaint = new Paint();
strokePaint.setAntiAlias(true);
strokePaint.setDither(true);
strokePaint.setColor(Color.RED);
strokePaint.setStyle(Paint.Style.STROKE);
strokePaint.setStrokeJoin(Paint.Join.ROUND);
strokePaint.setStrokeCap(Paint.Cap.ROUND);
strokePaint.setStrokeWidth(3);
if (left != 0) {
RectF rect = new RectF(left, top, right, bottom);
mStrokeRecord.rect = rect;
}
mStrokeRecord.paint = strokePaint;
mMd.setOnDrawChangedListener(new MapDraw.OnDrawChangedListener() {
@Override
public void onDrawChanged(final LatLngListOV latLngListOV) {
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(MapByPointActivity.this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
mRxDialogSureCancel.setContent("确认巡查范围已标记无误?");
mRxDialogSureCancel.getTvCancel().setText("重来");
mRxDialogSureCancel.getTvSure().setText("完成");
}
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
EventBus.getDefault().post(latLngListOV);
finish();
}
});
mRxDialogSureCancel.show();
}
});
mMd.setAmap(mAMap);
mMd.setStrokeRecord(mStrokeRecord);
}
private void initMap() {
if (mAMap == null) {
mAMap = mMap.getMap();
mUiSettings = mAMap.getUiSettings();
}
mAMap.setMaxZoomLevel(20);
mAMap.setMapType(AMap.MAP_TYPE_NORMAL);// MAP_TYPE_SATELLITE卫星地图模式
mUiSettings.setMyLocationButtonEnabled(true);// 设置默认定位按钮是否显示
mAMap.setMyLocationEnabled(true);// 设置为true表示显示定位层并可触发定位,false表示隐藏定位层并不可触发定位,默认是false
setupLocationStyle();
mAMap.setOnMyLocationChangeListener(this);
Log.i("Oking",">>>"+mMcoordinateJson);
}
private void setupLocationStyle() {
// 自定义系统定位蓝点
MyLocationStyle myLocationStyle = new MyLocationStyle();
// 自定义定位蓝点图标
myLocationStyle.myLocationIcon(BitmapDescriptorFactory.
fromResource(R.mipmap.gps_point));
// 自定义精度范围的圆形边框颜色
myLocationStyle.strokeColor(Color.argb(180, 3, 145, 255));
//自定义精度范围的圆形边框宽度
myLocationStyle.strokeWidth(5);
// 设置圆形的填充颜色
myLocationStyle.radiusFillColor(Color.argb(10, 0, 0, 180));
// 将自定义的 myLocationStyle 对象添加到地图上
myLocationStyle.myLocationType(MyLocationStyle.LOCATION_TYPE_SHOW);
mAMap.setMyLocationStyle(myLocationStyle);
}
@Override
protected void onDestroy() {
super.onDestroy();
mMap.onDestroy();
mBind.unbind();
}
/**
* 方法必须重写
*/
@Override
protected void onResume() {
super.onResume();
mMap.onResume();
}
/**
* 方法必须重写
*/
@Override
protected void onPause() {
super.onPause();
mMap.onPause();
}
/**
* 方法必须重写
*/
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
mMap.onSaveInstanceState(outState);
}
@Override
public void onMyLocationChange(Location location) {
if (location != null) {
mLng = new LatLng(location.getLatitude(), location.getLongitude());
Log.e("amap", "onMyLocationChange 定位成功, lat: " + location.getLatitude() + " lon: " + location.getLongitude());
Bundle bundle = location.getExtras();
if (bundle != null) {
int errorCode = bundle.getInt(MyLocationStyle.ERROR_CODE);
String errorInfo = bundle.getString(MyLocationStyle.ERROR_INFO);
// 定位类型,可能为GPS WIFI等,具体可以参考官网的定位SDK介绍
int locationType = bundle.getInt(MyLocationStyle.LOCATION_TYPE);
/*
errorCode
errorInfo
locationType
*/
if (mCenterPoint == null) {
changeCamera(
CameraUpdateFactory.newCameraPosition(new CameraPosition(
mLng, 18, 30, 30)));
} else {
changeCamera(
CameraUpdateFactory.newCameraPosition(new CameraPosition(
mCenterPoint, mZoom, 30, 30)));
}
Log.e("amap", "定位信息, code: " + errorCode + " errorInfo: " + errorInfo + " locationType: " + locationType);
} else {
Log.e("amap", "定位信息, bundle is null ");
}
} else {
Log.e("amap", "定位失败");
}
}
private void changeCamera(CameraUpdate cameraUpdate) {
mAMap.moveCamera(cameraUpdate);
}
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
switch (compoundButton.getId()) {
case R.id.rb_circular:
if (b) {
mUiSettings.setScrollGesturesEnabled(false);
mMd.setVisibility(View.VISIBLE);
mDrawType = 4;
mStrokeRecord.setType(mDrawType);
mMd.setStrokeRecord(mStrokeRecord);
}
break;
case R.id.rb_polygon:
if (b) {
mUiSettings.setScrollGesturesEnabled(false);
mMd.setVisibility(View.VISIBLE);
mDrawType = 5;
mStrokeRecord.setType(mDrawType);
mMd.setStrokeRecord(mStrokeRecord);
}
break;
case R.id.rb_move_map:
if (b) {
mUiSettings.setScrollGesturesEnabled(true);
mMd.setVisibility(View.GONE);
}
break;
default:
break;
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.MenuItemOV;
import com.zhang.okinglawenforcementphone.beans.MenuOV;
import com.zhy.view.flowlayout.FlowLayout;
import com.zhy.view.flowlayout.TagAdapter;
import com.zhy.view.flowlayout.TagFlowLayout;
import java.util.List;
/**
* Created by Administrator on 2018/4/18.
*/
public class IndexRecyAdapter extends BaseQuickAdapter<MenuOV, BaseViewHolder> {
private OnTagClickListener mOnTagClickListener;
public IndexRecyAdapter(int layoutResId, @Nullable List<MenuOV> data, OnTagClickListener onTagClickListener) {
super(layoutResId, data);
this.mOnTagClickListener = onTagClickListener;
}
@Override
protected void convert(BaseViewHolder helper, final MenuOV item) {
helper.setText(R.id.tv_title, item.getTage());
TagFlowLayout flowlayout = helper.getView(R.id.flowlayout);
final List<MenuItemOV> menuItemOVS = item.getMenuItemOVS();
flowlayout.setAdapter(new TagAdapter<MenuItemOV>(menuItemOVS) {
@Override
public View getView(FlowLayout parent, int position, MenuItemOV dataBean) {
View inflate = View.inflate(BaseApplication.getApplictaion(), R.layout.hot_tagflow_item, null);
TextView tv_tag = inflate.findViewById(R.id.tv_tag);
ImageView icon = inflate.findViewById(R.id.iv_icon);
tv_tag.setText(dataBean.getTitle());
icon.setImageResource(dataBean.getIcon());
return inflate;
}
});
flowlayout.setOnTagClickListener(new TagFlowLayout.OnTagClickListener() {
@Override
public boolean onTagClick(View view, int position, FlowLayout parent) {
mOnTagClickListener.onTagClickListener(menuItemOVS.get(position));
return false;
}
});
}
public interface OnTagClickListener {
void onTagClickListener(MenuItemOV bean);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
/**
* Created by Administrator on 2017/10/31.
*/
public class SectionDomain {
private String chapterItem;
private String itemTitle;
public String getChapterItem() {
return chapterItem;
}
public void setChapterItem(String chapterItem) {
this.chapterItem = chapterItem;
}
public String getItemTitle() {
return itemTitle;
}
public void setItemTitle(String itemTitle) {
this.itemTitle = itemTitle;
}
@Override
public String toString() {
return "SectionDomain{" +
"chapterItem='" + chapterItem + '\'' +
", itemTitle='" + itemTitle + '\'' +
'}';
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.beans.JPushMessageBean;
import com.zhang.okinglawenforcementphone.mvp.contract.JPushMessageContract;
import com.zhang.okinglawenforcementphone.mvp.model.JPushMessageModel;
/**
* Created by Administrator on 2018/8/27/027.
*/
public class JPushMessagePresenter implements JPushMessageContract.Presenter {
private JPushMessageContract.Model mModel;
private JPushMessageContract.View mView;
public JPushMessagePresenter(JPushMessageContract.View view) {
mView = view;
mModel = new JPushMessageModel(this);
}
@Override
public void pushMessage(JPushMessageBean jPushMessageBean) {
mModel.pushMessage(jPushMessageBean);
}
@Override
public void pushMessageSucc(String result) {
mView.pushMessageSucc(result);
}
@Override
public void pushMessageFail(Throwable ex) {
mView.pushMessageFail(ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.ArrangeMissionRcyAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLogDao;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTaskDao;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.UpdateGreenMissionTaskOV;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import org.greenrobot.greendao.query.QueryBuilder;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
public class TaskMissionProjectActivity extends BaseActivity {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.ry_arrange)
RecyclerView ryArrange;
@BindView(R.id.tv)
TextView tv;
@BindView(R.id.tv_title)
TextView mTvTitle;
private Unbinder mBind;
private List<GreenMissionTask> mGreenMissionTasks;
private String argActivity;
private ArrangeMissionRcyAdapter mAarrangeMissionRcyAdapter;
private Intent mIntent;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_task_mission_project);
mBind = ButterKnife.bind(this);
initView();
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
mAarrangeMissionRcyAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
private GreenMissionTask mGreenMissionTask;
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
switch (argActivity) {
case "ArrangeMissionActivity":
mIntent = new Intent(TaskMissionProjectActivity.this, ArrangeTeamMembersActivity.class);
mIntent.putExtra("id", mAarrangeMissionRcyAdapter.getData().get(position).getId());
mIntent.putExtra("position", position);
startActivity(mIntent);
break;
case "TaskExecutionActivity":
case "ReportTaskActivity":
mIntent = new Intent(TaskMissionProjectActivity.this, MissionActivity.class);
mIntent.putExtra("id", mAarrangeMissionRcyAdapter.getData().get(position).getId());
mIntent.putExtra("position", position);
startActivity(mIntent);
break;
case "TrajectoryListActivity":
mGreenMissionTask = mAarrangeMissionRcyAdapter.getData().get(position);
GreenMissionLog unique = GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao()
.queryBuilder().where(GreenMissionLogDao.Properties.Task_id.eq(mGreenMissionTask.getTaskid())).unique();
if (unique == null || unique.getLocJson() == null|| unique.getLocJson().equals("[]")|| TextUtils.isEmpty(unique.getLocJson())) {
RxToast.error("抱歉后台哥们不给力,木有轨迹返回给我~~~~");
} else {
mIntent = new Intent(TaskMissionProjectActivity.this, TrajectoryActivity.class);
mIntent.putExtra("id", mAarrangeMissionRcyAdapter.getData().get(position).getTaskid());
mIntent.putExtra("taskName", mGreenMissionTask.getTask_name());
mIntent.putExtra("publishname", mGreenMissionTask.getPublisher_name());
mIntent.putExtra("executeBeginTime", mGreenMissionTask.getExecute_start_time());
mIntent.putExtra("executeEndTime", mGreenMissionTask.getExecute_end_time());
mIntent.putExtra("area", mGreenMissionTask.getTask_area());
mIntent.putExtra("locJson", unique.getLocJson());
startActivity(mIntent);
}
break;
case "CompleteListActivity":
mGreenMissionTask = mAarrangeMissionRcyAdapter.getData().get(position);
Intent intent = new Intent(TaskMissionProjectActivity.this, MissionRecorActivity.class);
intent.putExtra("id", mGreenMissionTask.getId());
intent.putExtra("taskId", mGreenMissionTask.getTaskid());
startActivity(intent);
break;
default:
break;
}
}
});
}
private void initData() {
Intent intent = getIntent();
argActivity = intent.getStringExtra("activity");
EventBus.getDefault().register(this);
switch (argActivity) {
case "ArrangeMissionActivity":
mTvTitle.setText("待安排队员列表");
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
mGreenMissionTasks = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao()
.queryBuilder().where(GreenMissionTaskDao.Properties.Userid.eq(OkingContract.CURRENTUSER.getUserid()), GreenMissionTaskDao.Properties.Status.eq(2))
.list();
if (mGreenMissionTasks.size() > 0) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.VISIBLE);
tv.setVisibility(View.GONE);
mAarrangeMissionRcyAdapter.setNewData(mGreenMissionTasks);
}
});
} else {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.GONE);
tv.setVisibility(View.VISIBLE);
}
});
}
}
});
break;
case "TaskExecutionActivity":
mTvTitle.setText("待执行任务列表");
mGreenMissionTasks = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao()
.queryBuilder().where(GreenMissionTaskDao.Properties.Userid.eq(OkingContract.CURRENTUSER.getUserid()))
.whereOr(GreenMissionTaskDao.Properties.Status.eq(3), GreenMissionTaskDao.Properties.Status.eq(4))
.list();
if (mGreenMissionTasks.size() > 0) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.VISIBLE);
tv.setVisibility(View.GONE);
mAarrangeMissionRcyAdapter.setNewData(mGreenMissionTasks);
}
});
} else {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.GONE);
tv.setVisibility(View.VISIBLE);
}
});
}
break;
case "ReportTaskActivity":
mTvTitle.setText("待上报任务列表");
mGreenMissionTasks = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao()
.queryBuilder().where(GreenMissionTaskDao.Properties.Userid.eq(OkingContract.CURRENTUSER.getUserid()), GreenMissionTaskDao.Properties.Status.eq(100))
.list();
if (mGreenMissionTasks.size() > 0) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.VISIBLE);
tv.setVisibility(View.GONE);
mAarrangeMissionRcyAdapter.setNewData(mGreenMissionTasks);
}
});
} else {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.GONE);
tv.setVisibility(View.VISIBLE);
}
});
}
break;
case "TrajectoryListActivity":
mTvTitle.setText("任务轨迹列表");
QueryBuilder<GreenMissionTask> greenMissionTaskQueryBuilder = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao()
.queryBuilder();
greenMissionTaskQueryBuilder.where(GreenMissionTaskDao.Properties.Userid.eq(OkingContract.CURRENTUSER.getUserid()));
greenMissionTaskQueryBuilder.whereOr(GreenMissionTaskDao.Properties.Status.eq(100), GreenMissionTaskDao.Properties.Status.eq(5));
mGreenMissionTasks = greenMissionTaskQueryBuilder.list();
if (mGreenMissionTasks.size() > 0) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.VISIBLE);
tv.setVisibility(View.GONE);
mAarrangeMissionRcyAdapter.setNewData(mGreenMissionTasks);
}
});
} else {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.GONE);
tv.setVisibility(View.VISIBLE);
}
});
}
break;
case "CompleteListActivity":
mTvTitle.setText("任务完成列表");
mGreenMissionTasks = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao()
.queryBuilder().where(GreenMissionTaskDao.Properties.Userid.eq(OkingContract.CURRENTUSER.getUserid()), GreenMissionTaskDao.Properties.Status.eq(5)).list();
if (mGreenMissionTasks.size() > 0) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.VISIBLE);
tv.setVisibility(View.GONE);
mAarrangeMissionRcyAdapter.setNewData(mGreenMissionTasks);
}
});
} else {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
ryArrange.setVisibility(View.GONE);
tv.setVisibility(View.VISIBLE);
}
});
}
break;
default:
break;
}
}
private void initView() {
ryArrange.setLayoutManager(new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false));
ryArrange.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 8, BaseApplication.getApplictaion().getResources().getColor(R.color.activity_bg)));
mAarrangeMissionRcyAdapter = new ArrangeMissionRcyAdapter(R.layout.arrangemission_task_item, null);
mAarrangeMissionRcyAdapter.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
ryArrange.setAdapter(mAarrangeMissionRcyAdapter);
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void handleEvent1(UpdateGreenMissionTaskOV event) {
mAarrangeMissionRcyAdapter.setData(event.getPosition(), event.getMissionTask());
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
EventBus.getDefault().unregister(this);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import android.os.Environment;
import android.util.Log;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.beans.GreenMediaDao;
import com.zhang.okinglawenforcementphone.beans.GreenMember;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLogDao;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.GetHttpMissionLogContract;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.File;
import io.reactivex.Observable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/23/023.
*/
public class GetHttpMissionLogModel implements GetHttpMissionLogContract.Model {
private GetHttpMissionLogContract.Presenter mPresenter;
private GreenMissionLog mUnique;
public GetHttpMissionLogModel(GetHttpMissionLogContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void getHttpMissionLog(final GreenMissionTask mission) {
BaseHttpFactory.getInstence()
.createService(GDWaterService.class, Api.BASE_URL)
.getHttpMissionLog("-1", mission.getTaskid())
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.observeOn(Schedulers.io())
.concatMap(new Function<ResponseBody, Observable<ResponseBody>>() {
@Override
public Observable<ResponseBody> apply(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
Log.i("Oking5", "获取任务到任务" + result);
JSONObject object = new JSONObject(result);
int count = object.optInt("total");
if (count > 0) {
JSONObject rows = object.getJSONArray("rows").getJSONObject(0);
String mId = rows.optString("id");
mUnique = GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().queryBuilder()
.where(GreenMissionLogDao.Properties.Server_id.eq(mId)).unique();
if (mUnique == null) {
mUnique = new GreenMissionLog();
mUnique.setAddr(rows.optString("addr"));
mUnique.setArea(rows.optString("area"));
mUnique.setDeal(rows.optString("deal"));
mUnique.setDzyj(rows.optString("dzyj"));
mUnique.setEquipment(rows.optString("equipment"));
mUnique.setServer_id(mId);
mUnique.setId_card(rows.optString("id_card"));
mUnique.setItem(rows.optInt("item"));
mUnique.setName(rows.optString("name"));
mUnique.setOther_part(rows.optString("other_part"));
mUnique.setOther_person(rows.optInt("other_person"));
mUnique.setPatrol(rows.optString("patrol"));
mUnique.setPlan(rows.optInt("plan"));
mUnique.setPost(rows.optString("post"));
mUnique.setResult(rows.optString("result"));
mUnique.setRoute(rows.optString("route"));
mUnique.setStatus(rows.optInt("status"));
mUnique.setTask_id(rows.optString("task_id"));
mUnique.setTime(rows.optString("time"));
mUnique.setType(rows.optInt("type"));
mUnique.setWeather(rows.optString("weather"));
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().insert(mUnique);
} else {
mUnique.setAddr(rows.optString("addr"));
mUnique.setArea(rows.optString("area"));
mUnique.setDeal(rows.optString("deal"));
mUnique.setDzyj(rows.optString("dzyj"));
mUnique.setEquipment(rows.optString("equipment"));
mUnique.setServer_id(mId);
mUnique.setId_card(rows.optString("id_card"));
mUnique.setItem(rows.optInt("item"));
mUnique.setName(rows.optString("name"));
mUnique.setOther_part(rows.optString("other_part"));
mUnique.setOther_person(rows.optInt("other_person"));
mUnique.setPatrol(rows.optString("patrol"));
mUnique.setPlan(rows.optInt("plan"));
mUnique.setPost(rows.optString("post"));
mUnique.setResult(rows.optString("result"));
mUnique.setRoute(rows.optString("route"));
mUnique.setStatus(rows.optInt("status"));
mUnique.setTask_id(rows.optString("task_id"));
mUnique.setTime(rows.optString("time"));
mUnique.setType(rows.optInt("type"));
mUnique.setWeather(rows.optString("weather"));
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().update(mUnique);
}
//获取日志图片路径
return BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL).getMissionRecordPicPath(mId, 0);
} else {
return Observable.error(new Throwable("NOTASKLOG"));
}
}
}).concatMap(new Function<ResponseBody, Observable<ResponseBody>>() {
@Override
public Observable<ResponseBody> apply(ResponseBody responseBody) throws Exception {
if (mUnique.getGreenMedia().size() < 1) {
String result = responseBody.string();
Log.i("Oking", "获取任务到任务图片路径:" + result);
JSONArray paths = new JSONArray(result);
for (int i = 0; i < paths.length(); i++) {
String path = paths.getJSONObject(i).optString("path");
GreenMedia unique = GreenDAOManager.getInstence().getDaoSession().getGreenMediaDao()
.queryBuilder().where(GreenMediaDao.Properties.Path.eq(Api.BASE_URL + path)).unique();
if (unique == null) {
GreenMedia greenMedia = new GreenMedia();
greenMedia.setType(1);
greenMedia.setPath(Api.BASE_URL + path);
greenMedia.setGreenMissionLogId(mUnique.getId());
GreenDAOManager.getInstence().getDaoSession().getGreenMediaDao().insert(greenMedia);
}
}
}
//获取签名图片
return BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL).getMissionRecordPicPath(mUnique.getServer_id(), 1);
}
}).concatMap(new Function<ResponseBody, Observable<ResponseBody>>() {
@Override
public Observable<ResponseBody> apply(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
Log.i("Oking", "获取到签名路径:" + result);
JSONArray paths = new JSONArray(result);
for (int i = 0; i < paths.length(); i++) {
String userid = paths.getJSONObject(i).optString("userid");
for (int j = 0; j < mission.getMembers().size(); j++) {
GreenMember member = mission.getMembers().get(j);
if (member!=null&&userid.equals(member.getUserid())&& member.getSignPic() == null) {
String path = paths.getJSONObject(i).optString("path");
GreenMedia greenMedia = new GreenMedia();
greenMedia.setType(4);
greenMedia.setPath(path);
greenMedia.setUserid(member.getUserid());
GreenDAOManager.getInstence().getDaoSession().getGreenMediaDao().insert(greenMedia);
}
}
}
//获取巡查视频
return BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL).getMissionRecordPicPath(mUnique.getServer_id(), 2);
}
}).observeOn(AndroidSchedulers.mainThread())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
Log.i("Oking", "获取任务日志成功");
mPresenter.loadHttpMissionLogSucc(mUnique);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
GreenMissionLog unique = GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().queryBuilder()
.where(GreenMissionLogDao.Properties.Task_id.eq(mission.getTaskid())).unique();
if (unique == null) {
mUnique = new GreenMissionLog();
mUnique.setTask_id(mission.getTaskid());
mUnique.setName(OkingContract.CURRENTUSER.getUserid());
mUnique.setStatus(0);
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().insert(mUnique);
mPresenter.loadEmpty(mUnique);
} else {
mPresenter.loadEmpty(unique);
}
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
/**
* Created by Administrator on 2018/4/13.
*/
public class SaveOrRemoveDataEvent {
public int type;
public SaveOrRemoveDataEvent(int type) {
this.type = type;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
/**
* Created by Administrator on 2018/4/8.
*/
public class UpdateGreenMissionTaskOV {
private int type;
private int position;
private GreenMissionTask missionTask;
public int getType() {
return type;
}
public void setType(int type) {
this.type = type;
}
public int getPosition() {
return position;
}
public void setPosition(int position) {
this.position = position;
}
public GreenMissionTask getMissionTask() {
return missionTask;
}
public void setMissionTask(GreenMissionTask missionTask) {
this.missionTask = missionTask;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.http;
/**
* Created by Administrator on 2018/4/18.
*/
public class Api {
public static final String BASE_URL = "http://10.44.21.26:8087";
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
/**
* Created by Administrator on 2018/5/28/028.
*/
public class BinnerItem {
private String title;
private int picPath;
private String toContent;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getPicPath() {
return picPath;
}
public void setPicPath(int picPath) {
this.picPath = picPath;
}
public String getToContent() {
return toContent;
}
public void setToContent(String toContent) {
this.toContent = toContent;
}
@Override
public String toString() {
return "BinnerItem{" +
"title='" + title + '\'' +
", picPath='" + picPath + '\'' +
", toContent='" + toContent + '\'' +
'}';
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import java.util.List;
/**
* Created by Administrator on 2018/5/17.
*/
public class StatisRcyAdapter extends BaseQuickAdapter<GreenMissionTask, BaseViewHolder> {
public StatisRcyAdapter(int layoutResId, @Nullable List<GreenMissionTask> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, GreenMissionTask item) {
helper.setText(R.id.tv_taskname,"任务名称:"+item.getTask_name());
helper.setText(R.id.tv_taskid,"任务编号:"+item.getTaskid());
helper.setText(R.id.tv_fbr,"发布人:"+item.getPublisher_name());
switch (item.getStatus()) {
case "0":
case "1":
case "2":
helper.setText(R.id.tv_state,"未安排人员");
break;
case "3":
helper.setText(R.id.tv_state,"已安排,待执行");
break;
case "4":
helper.setText(R.id.tv_state,"巡查中");
break;
case "100":
helper.setText(R.id.tv_state,"巡查结束");
break;
case "5":
helper.setText(R.id.tv_state,"已上报");
break;
case "9":
helper.setText(R.id.tv_state,"退回修改");
break;
default:
break;
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.os.SystemClock;
import android.text.TextUtils;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.Chronometer;
import android.widget.ImageView;
import android.widget.Toast;
import android.widget.VideoView;
import com.hyphenate.easeui.ui.EaseBaseActivity;
import com.hyphenate.easeui.utils.EaseCommonUtils;
import com.hyphenate.util.EMLog;
import com.zhang.baselib.utils.Util;
import com.zhang.okinglawenforcementphone.R;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.List;
public class VideoRecordActivity extends EaseBaseActivity implements
View.OnClickListener, SurfaceHolder.Callback, MediaRecorder.OnErrorListener,
MediaRecorder.OnInfoListener {
private static final String TAG = "VideoRecordActivity";
private final static String CLASS_LABEL = "VideoRecordActivity";
private PowerManager.WakeLock mWakeLock;
private ImageView btnStart;
private ImageView btnStop;
private MediaRecorder mediaRecorder;
private VideoView mVideoView;// to display video
String localPath = "";// path to save recorded video
private Camera mCamera;
private int previewWidth = 640;
private int previewHeight = 480;
private Chronometer chronometer;
private int frontCamera = 0; // 0 is back camera,1 is front camera
private Button btn_switch;
private SurfaceHolder mSurfaceHolder;
int defaultVideoFrameRate = -1;
SimpleDateFormat mSdf = new SimpleDateFormat("yyyyMMdd_HHmmss");
private String mFileName;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);// no title
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);// full screen
// translucency mode,used in surface view
getWindow().setFormat(PixelFormat.TRANSLUCENT);
setContentView(R.layout.em_recorder_activity);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
initViews();
}
private void initViews() {
btn_switch = (Button) findViewById(R.id.switch_btn);
btn_switch.setOnClickListener(this);
// btn_switch.setVisibility(View.VISIBLE);
mVideoView = (VideoView) findViewById(R.id.mVideoView);
btnStart = (ImageView) findViewById(R.id.recorder_start);
btnStop = (ImageView) findViewById(R.id.recorder_stop);
btnStart.setOnClickListener(this);
btnStop.setOnClickListener(this);
mSurfaceHolder = mVideoView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
chronometer = (Chronometer) findViewById(R.id.chronometer);
}
public void back(View view) {
releaseRecorder();
releaseCamera();
finish();
}
@Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
// keep screen on
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
}
}
@SuppressLint("NewApi")
private boolean initCamera() {
try {
if (frontCamera == 0) {
mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
} else {
mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
}
Camera.Parameters camParams = mCamera.getParameters();
mCamera.lock();
mSurfaceHolder = mVideoView.getHolder();
mCamera.cancelAutoFocus(); //自动对焦
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setDisplayOrientation(90);
} catch (RuntimeException ex) {
EMLog.e("video", "init Camera fail " + ex.getMessage());
return false;
}
return true;
}
private void handleSurfaceChanged() {
if (mCamera == null) {
finish();
return;
}
boolean hasSupportRate = false;
List<Integer> supportedPreviewFrameRates = mCamera.getParameters()
.getSupportedPreviewFrameRates();
if (supportedPreviewFrameRates != null
&& supportedPreviewFrameRates.size() > 0) {
Collections.sort(supportedPreviewFrameRates);
for (int i = 0; i < supportedPreviewFrameRates.size(); i++) {
int supportRate = supportedPreviewFrameRates.get(i);
if (supportRate == 15) {
hasSupportRate = true;
}
}
if (hasSupportRate) {
defaultVideoFrameRate = 15;
} else {
defaultVideoFrameRate = supportedPreviewFrameRates.get(0);
}
}
// get all resolutions which camera provide
List<Camera.Size> resolutionList = Util.getResolutionList(mCamera);
if (resolutionList != null && resolutionList.size() > 0) {
Collections.sort(resolutionList, new Util.ResolutionComparator());
Camera.Size previewSize = null;
boolean hasSize = false;
// use 60*480 if camera support
for (int i = 0; i < resolutionList.size(); i++) {
Camera.Size size = resolutionList.get(i);
if (size != null && size.width == 640 && size.height == 480) {
previewSize = size;
previewWidth = previewSize.width;
previewHeight = previewSize.height;
hasSize = true;
break;
}
}
// use medium resolution if camera don't support the above resolution
if (!hasSize) {
int mediumResolution = resolutionList.size() / 2;
if (mediumResolution >= resolutionList.size())
mediumResolution = resolutionList.size() - 1;
previewSize = resolutionList.get(mediumResolution);
previewWidth = previewSize.width;
previewHeight = previewSize.height;
}
}
}
@Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
releaseRecorder();
releaseCamera();
finish();
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.switch_btn:
switchCamera();
break;
case R.id.recorder_start:
if (!startRecording())
return;
Toast.makeText(this, R.string.The_video_to_start, Toast.LENGTH_SHORT).show();
btn_switch.setVisibility(View.INVISIBLE);
btnStart.setVisibility(View.INVISIBLE);
btnStart.setEnabled(false);
btnStop.setVisibility(View.VISIBLE);
chronometer.setBase(SystemClock.elapsedRealtime());
chronometer.start();
break;
case R.id.recorder_stop:
btnStop.setEnabled(false);
stopRecording();
btn_switch.setVisibility(View.VISIBLE);
chronometer.stop();
btnStart.setVisibility(View.VISIBLE);
btnStop.setVisibility(View.INVISIBLE);
sendVideo(null);
break;
default:
break;
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
mSurfaceHolder = holder;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (mCamera == null) {
if (!initCamera()) {
showFailDialog();
return;
}
}
try {
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
handleSurfaceChanged();
} catch (Exception e1) {
EMLog.e("video", "start preview fail " + e1.getMessage());
showFailDialog();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder arg0) {
EMLog.v("video", "surfaceDestroyed");
}
public boolean startRecording() {
if (mediaRecorder == null) {
if (!initRecorder())
return false;
}
mediaRecorder.setOnInfoListener(this);
mediaRecorder.setOnErrorListener(this);
mediaRecorder.start();
return true;
}
@SuppressLint("NewApi")
private boolean initRecorder() {
if (!EaseCommonUtils.isSdcardExist()) {
showNoSDCardDialog();
return false;
}
if (mCamera == null) {
if (!initCamera()) {
showFailDialog();
return false;
}
}
mVideoView.setVisibility(View.VISIBLE);
mCamera.stopPreview();
mediaRecorder = new MediaRecorder();
mCamera.unlock();
mediaRecorder.setCamera(mCamera);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// mediaRecorder.setOrientationHint(90);
if (frontCamera == 1) {
mediaRecorder.setOrientationHint(270);
} else {
mediaRecorder.setOrientationHint(90);
}
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
// set resolution, should be set after the format and encoder was set
mediaRecorder.setVideoSize(previewWidth, previewHeight);
mediaRecorder.setVideoEncodingBitRate(900*1024);
// set frame rate, should be set after the format and encoder was set
if (defaultVideoFrameRate != -1) {
mediaRecorder.setVideoFrameRate(defaultVideoFrameRate);
}
// set the path for video file
mFileName = mSdf.format(System.currentTimeMillis());
localPath = "/storage/emulated/0/oking/mission_video/"
+ mFileName + ".mp4";
File file = new File("/storage/emulated/0/oking/mission_video/");
if (!file.exists()) {
file.mkdir();
}
mediaRecorder.setOutputFile(localPath);
mediaRecorder.setMaxDuration(300000);
mediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
public void stopRecording() {
if (mediaRecorder != null) {
mediaRecorder.setOnErrorListener(null);
mediaRecorder.setOnInfoListener(null);
try {
mediaRecorder.stop();
} catch (Exception e) {
EMLog.e("video", "stopRecording error:" + e.getMessage());
}
}
releaseRecorder();
if (mCamera != null) {
mCamera.stopPreview();
releaseCamera();
}
}
private void releaseRecorder() {
if (mediaRecorder != null) {
mediaRecorder.release();
mediaRecorder = null;
}
}
protected void releaseCamera() {
try {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
} catch (Exception e) {
}
}
@SuppressLint("NewApi")
public void switchCamera() {
if (mCamera == null) {
return;
}
if (Camera.getNumberOfCameras() >= 2) {
btn_switch.setEnabled(false);
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
switch (frontCamera) {
case 0:
mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
frontCamera = 1;
break;
case 1:
mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
frontCamera = 0;
break;
}
try {
mCamera.lock();
// mCamera.setDisplayOrientation(90);
mCamera.setPreviewDisplay(mVideoView.getHolder());
mCamera.startPreview();
} catch (IOException e) {
mCamera.release();
mCamera = null;
}
btn_switch.setEnabled(true);
}
}
public void sendVideo(View view) {
if (TextUtils.isEmpty(localPath)) {
return;
}
File file = new File(localPath);
Intent intent = getIntent();
intent.setData(Uri.fromFile(file));
setResult(RESULT_OK, intent);
finish();
}
@Override
public void onInfo(MediaRecorder mr, int what, int extra) {
EMLog.v("video", "onInfo");
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
EMLog.v("video", "max duration reached");
stopRecording();
btn_switch.setVisibility(View.VISIBLE);
chronometer.stop();
btnStart.setVisibility(View.GONE);
btnStop.setVisibility(View.VISIBLE);
chronometer.stop();
if (localPath == null) {
return;
}
new AlertDialog.Builder(this)
.setCancelable(false)
.setMessage("单个视频时长已达最长")
.setPositiveButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface arg0,
int arg1) {
arg0.dismiss();
sendVideo(null);
}
}).setNegativeButton(R.string.cancel, null)
.setCancelable(false).show();
}
}
@Override
public void onError(MediaRecorder mr, int what, int extra) {
EMLog.e("video", "recording onError:");
stopRecording();
Toast.makeText(this,
"Recording error has occurred. Stopping the recording",
Toast.LENGTH_SHORT).show();
}
public void saveBitmapFile(Bitmap bitmap) {
File file = new File(Environment.getExternalStorageDirectory(), "a.jpg");
try {
BufferedOutputStream bos = new BufferedOutputStream(
new FileOutputStream(file));
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
bos.flush();
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
releaseCamera();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
public void onBackPressed() {
back(null);
}
private void showFailDialog() {
new AlertDialog.Builder(this)
.setTitle(R.string.prompt)
.setMessage(R.string.Open_the_equipment_failure)
.setPositiveButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
finish();
}
}).setCancelable(false).show();
}
private void showNoSDCardDialog() {
new AlertDialog.Builder(this)
.setTitle(R.string.prompt)
.setMessage("No sd card!")
.setPositiveButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
finish();
}
}).setCancelable(false).show();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import com.chad.library.adapter.base.entity.MultiItemEntity;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Generated;
import org.greenrobot.greendao.annotation.Id;
/**
* Created by Administrator on 2018/4/11.
*/
@Entity
public class GreenEquipment implements MultiItemEntity {
@Id(autoincrement = true)
private Long id;
private String type2;
private String mc1; //装备类型
private String type; //装备类型code
private String mc2; //装备名称
private String value;
private String ly;
private String remarks;
private String deptId;
private int itemType = 0;
@Generated(hash = 996505078)
public GreenEquipment(Long id, String type2, String mc1, String type,
String mc2, String value, String ly, String remarks, String deptId,
int itemType) {
this.id = id;
this.type2 = type2;
this.mc1 = mc1;
this.type = type;
this.mc2 = mc2;
this.value = value;
this.ly = ly;
this.remarks = remarks;
this.deptId = deptId;
this.itemType = itemType;
}
@Generated(hash = 119980583)
public GreenEquipment() {
}
@Override
public int getItemType() {
return itemType;
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public String getType2() {
return this.type2;
}
public void setType2(String type2) {
this.type2 = type2;
}
public String getMc1() {
return this.mc1;
}
public void setMc1(String mc1) {
this.mc1 = mc1;
}
public String getType() {
return this.type;
}
public void setType(String type) {
this.type = type;
}
public String getMc2() {
return this.mc2;
}
public void setMc2(String mc2) {
this.mc2 = mc2;
}
public String getValue() {
return this.value;
}
public void setValue(String value) {
this.value = value;
}
public String getLy() {
return this.ly;
}
public void setLy(String ly) {
this.ly = ly;
}
public String getRemarks() {
return this.remarks;
}
public void setRemarks(String remarks) {
this.remarks = remarks;
}
public String getDeptId() {
return this.deptId;
}
public void setDeptId(String deptId) {
this.deptId = deptId;
}
public void setItemType(int itemType) {
this.itemType = itemType;
}
@Override
public String toString() {
return "GreenEquipment{" +
"id=" + id +
", type2='" + type2 + '\'' +
", mc1='" + mc1 + '\'' +
", type='" + type + '\'' +
", mc2='" + mc2 + '\'' +
", value='" + value + '\'' +
", ly='" + ly + '\'' +
", remarks='" + remarks + '\'' +
", deptId='" + deptId + '\'' +
", itemType=" + itemType +
'}';
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.TextView;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.LawEnforcementSpecificationFragment;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class FromAllLawEnforcementSpecificationActivity extends BaseActivity {
@BindView(R.id.tv_title)
TextView mTvTitle;
@BindView(R.id.toolbar)
Toolbar mToolbar;
private Unbinder mBind;
private LawEnforcementSpecificationFragment mLawEnforcementSpecificationFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_from_all_law_enforcement_specification);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
Fragment enforcementInspectionNormsFragment = getSupportFragmentManager().findFragmentByTag("EnforcementInspectionNormsFragment");
Fragment enforcementLanguageSpecificationFragment = getSupportFragmentManager().findFragmentByTag("EnforcementLanguageSpecificationFragment");
Fragment administrativeEnforcementFragment = getSupportFragmentManager().findFragmentByTag("AdministrativeEnforcementFragment");
if (enforcementInspectionNormsFragment!=null&&!enforcementInspectionNormsFragment.isHidden()){
fragmentTransaction.hide(enforcementInspectionNormsFragment);
fragmentTransaction.show(mLawEnforcementSpecificationFragment).commit();
}else if (enforcementLanguageSpecificationFragment!=null&&!enforcementLanguageSpecificationFragment.isHidden()){
fragmentTransaction.hide(enforcementLanguageSpecificationFragment);
fragmentTransaction.show(mLawEnforcementSpecificationFragment).commit();
}else if (administrativeEnforcementFragment!=null&&!administrativeEnforcementFragment.isHidden()){
fragmentTransaction.hide(administrativeEnforcementFragment);
fragmentTransaction.show(mLawEnforcementSpecificationFragment).commit();
}else {
finish();
}
}
});
}
private void initData() {
mLawEnforcementSpecificationFragment = LawEnforcementSpecificationFragment.newInstance(null, null);
getSupportFragmentManager().beginTransaction().replace(R.id.rl_administrative_content, mLawEnforcementSpecificationFragment, "LawEnforcementSpecificationFragment").commit();
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
public void setTitleText(String title) {
mTvTitle.setText(title);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.PowerManager;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.hyphenate.EMError;
import com.hyphenate.easeui.model.EaseVoiceRecorder;
import com.hyphenate.easeui.utils.EaseCommonUtils;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxDialogSure;
import com.zhang.baselib.ui.views.RxDialogSureCancel;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.MediaManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.CaseSimpleAdapter;
import com.zhang.okinglawenforcementphone.adapter.SoundSimpleAdapter;
import com.zhang.okinglawenforcementphone.adapter.SourceArrayRecyAdapter;
import com.zhang.okinglawenforcementphone.adapter.SpinnerArrayAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenCase;
import com.zhang.okinglawenforcementphone.beans.GreenEvidence;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceDao;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceMedia;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceSTZJOV;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.SaveOrRemoveDataEvent;
import com.zhang.okinglawenforcementphone.beans.SourceArrayOV;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.EvidenceManagerActivity;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.VideoRecordActivity;
import com.zhang.okinglawenforcementphone.utils.DialogUtil;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import static android.app.Activity.RESULT_OK;
/**
* A simple {@link Fragment} subclass.
*/
public class CaseAudioVideoEvidenceFragment extends Fragment {
private static final String ARG_PARAM1 = "param1";
private static final String ARG_TYPE = "type";
private static final String ARG_EVIDENCE = "evidence";
// private static final int PHOTO_FROM_CAMERA = 100;
// private static final int PHOTO_FROM_GALLERY = 101;
private static final int VIDEO_FROM_CAMERA = 102;
private static final int VIDEO_FROM_GALLERY = 103;
private GreenEvidence myEvidence;
private GreenCase mycase;
private TextView evidence_name_tv, evidence_content_tv, evidence_remark_tv;
private TextView evidence_getLocation_tv, evidence_man_textView, evidence_dept_tv, evidence_pagerCount_tv;
private TextView type_spinner, evidence_source_spinner;
private Button save_button, close_button;
private GridView video_gridView, sound_gridView;
private SoundSimpleAdapter soundAdapter;
private CaseSimpleAdapter videoAdapter;
private ArrayList<GreenEvidenceMedia> soundList = new ArrayList<>();
private ArrayList<GreenEvidenceMedia> videoList = new ArrayList<>();
private int mType;
private long mEvidenceId;
// private Uri photouri, videouri;
private RxDialogSureCancel mRxDialogSureCancel;
private View mInflate;
private TextView mTv_stop;
private TextView mTv_cancel;
private AlertDialog mAlertDialog;
private EvidenceManagerActivity mEvidenceManagerActivity;
private ArrayList<SourceArrayOV> mPlandataOVS;
private ArrayList<SourceArrayOV> mTypedataOVS;
private DialogUtil mDialogUtil;
private View mButtonDailog;
private TextView mTv_title;
private SourceArrayRecyAdapter mSourceArrayRecyAdapter;
public CaseAudioVideoEvidenceFragment() {
// Required empty public constructor
}
public static CaseAudioVideoEvidenceFragment newInstance(int type) {
CaseAudioVideoEvidenceFragment fragment = new CaseAudioVideoEvidenceFragment();
Bundle args = new Bundle();
args.putInt(ARG_TYPE, type);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mType = getArguments().getInt(ARG_TYPE);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_case_forensics, container, false);
}
initView(mInflate);
return mInflate;
}
@Override
public void onDestroyView() {
// getActivity().unregisterReceiver(mReceiver);
if (MediaManager.mPlayer != null) {
MediaManager.mPlayer.reset();
MediaManager.mPlayer = null;
}
super.onDestroyView();
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
mEvidenceManagerActivity = (EvidenceManagerActivity) context;
mEvidenceManagerActivity.setVisibleAdd(false);
}
public void initView(View rootView) {
evidence_name_tv = (TextView) rootView.findViewById(R.id.evidence_name_tv);
evidence_source_spinner = (TextView) rootView.findViewById(R.id.evidence_source_spinner);
video_gridView = (GridView) rootView.findViewById(R.id.video_gridView);
sound_gridView = (GridView) rootView.findViewById(R.id.sound_gridView);
String[] plandataset = getResources().getStringArray(R.array.spinner_evidence_source);
mPlandataOVS = new ArrayList<>();
for (String s : plandataset) {
SourceArrayOV sourceArrayOV = new SourceArrayOV();
sourceArrayOV.setType(0);
sourceArrayOV.setSource(s);
mPlandataOVS.add(sourceArrayOV);
}
if (mDialogUtil == null) {
mDialogUtil = new DialogUtil();
mButtonDailog = View.inflate(BaseApplication.getApplictaion(), R.layout.maptask_dialog, null);
mTv_title = mButtonDailog.findViewById(R.id.tv_title);
RecyclerView recyList = mButtonDailog.findViewById(R.id.recy_task);
recyList.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
recyList.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 3, Color.TRANSPARENT));
mSourceArrayRecyAdapter = new SourceArrayRecyAdapter(R.layout.source_item, null);
mSourceArrayRecyAdapter.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
recyList.setAdapter(mSourceArrayRecyAdapter);
mSourceArrayRecyAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
List<SourceArrayOV> data = adapter.getData();
SourceArrayOV sourceArrayOV = data.get(position);
switch (sourceArrayOV.getType()) {
case 0:
switch (sourceArrayOV.getSource()) {
case "当事人提供":
myEvidence.setZJLY("DSRTG");
myEvidence.setZJLYMC(sourceArrayOV.getSource());
break;
case "当事人口述":
myEvidence.setZJLY("DSRKS");
myEvidence.setZJLYMC(sourceArrayOV.getSource());
break;
case "调查搜集":
myEvidence.setZJLY("DCSJ");
myEvidence.setZJLYMC(sourceArrayOV.getSource());
break;
case "执法人员制作":
myEvidence.setZJLY("ZFRYZZ");
myEvidence.setZJLYMC(sourceArrayOV.getSource());
break;
case "执法人员拍摄":
myEvidence.setZJLY("ZFRYPS");
myEvidence.setZJLYMC(sourceArrayOV.getSource());
break;
case "局审批科室":
myEvidence.setZJLY("JSPKS");
myEvidence.setZJLYMC(sourceArrayOV.getSource());
break;
default:
break;
}
evidence_source_spinner.setText(sourceArrayOV.getSource());
break;
case 1:
switch (position) {
case 0:
video_gridView.setVisibility(View.VISIBLE);
((LinearLayout) sound_gridView.getParent()).setVisibility(View.INVISIBLE);
break;
case 1:
video_gridView.setVisibility(View.INVISIBLE);
((LinearLayout) sound_gridView.getParent()).setVisibility(View.VISIBLE);
break;
default:
break;
}
type_spinner.setText(sourceArrayOV.getSource());
break;
default:
break;
}
mDialogUtil.cancelDialog();
}
});
}
type_spinner = (TextView) rootView.findViewById(R.id.type_spinner);
String[] typedataset = getResources().getStringArray(R.array.spinner_data_type);
mTypedataOVS = new ArrayList<>();
for (String s : typedataset) {
SourceArrayOV sourceArrayOV = new SourceArrayOV();
sourceArrayOV.setType(1);
sourceArrayOV.setSource(s);
mTypedataOVS.add(sourceArrayOV);
}
evidence_content_tv = (TextView) rootView.findViewById(R.id.evidence_content_tv);
evidence_remark_tv = (TextView) rootView.findViewById(R.id.evidence_remark_tv);
evidence_getLocation_tv = (TextView) rootView.findViewById(R.id.evidence_getLocation_tv);
evidence_man_textView = (TextView) rootView.findViewById(R.id.evidence_man_textView);
evidence_dept_tv = (TextView) rootView.findViewById(R.id.evidence_dept_tv);
evidence_pagerCount_tv = (TextView) rootView.findViewById(R.id.evidence_pagerCount_tv);
save_button = (Button) rootView.findViewById(R.id.save_button);
if (myEvidence != null) {
mEvidenceId = myEvidence.getId();
evidence_name_tv.setText(myEvidence.getZJMC());
evidence_content_tv.setText(myEvidence.getZJNR());
evidence_remark_tv.setText(myEvidence.getBZ());
evidence_getLocation_tv.setText(myEvidence.getCJDD());
evidence_man_textView.setText(myEvidence.getJZR());
evidence_dept_tv.setText(myEvidence.getDW());
evidence_pagerCount_tv.setText(myEvidence.getYS());
myEvidence.resetGreenMedia();
List<GreenEvidenceMedia> greenMedia = myEvidence.getGreenMedia();
if (greenMedia != null && greenMedia.size() > 0) {
videoList.clear();
soundList.clear();
for (GreenEvidenceMedia media : greenMedia) {
if (media.getType() == 2) {
videoList.add(media);
}
}
for (GreenEvidenceMedia media : greenMedia) {
if (media.getType() == 3) {
soundList.add(media);
}
}
}
if (videoList != null && videoList.size() > 0) {
type_spinner.setText("视频");
video_gridView.setVisibility(View.VISIBLE);
((LinearLayout) sound_gridView.getParent()).setVisibility(View.INVISIBLE);
} else if (soundList != null && soundList.size() > 0) {
type_spinner.setText("语音");
video_gridView.setVisibility(View.INVISIBLE);
((LinearLayout) sound_gridView.getParent()).setVisibility(View.VISIBLE);
} else {
type_spinner.setText("视频");
video_gridView.setVisibility(View.INVISIBLE);
((LinearLayout) sound_gridView.getParent()).setVisibility(View.INVISIBLE);
}
} else {
myEvidence = new GreenEvidence();
myEvidence.setGreenCaseId(mycase.getId());
myEvidence.setZJID(UUID.randomUUID().toString());
myEvidence.setAJID(mycase.getAJID());
myEvidence.setZJLX("STZL");
mEvidenceId = GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().insert(myEvidence);
}
if (mType == 0) {
save_button.setVisibility(View.GONE);
} else {
save_button.setVisibility(View.VISIBLE);
}
evidence_source_spinner.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mSourceArrayRecyAdapter.setNewData(mPlandataOVS);
mDialogUtil.showBottomDialog(mEvidenceManagerActivity, mButtonDailog, 300f);
}
});
type_spinner.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mSourceArrayRecyAdapter.setNewData(mTypedataOVS);
mDialogUtil.showBottomDialog(mEvidenceManagerActivity, mButtonDailog, 300f);
}
});
save_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (localSaveEvidence()) {
final RxDialogSure rxDialogSure = new RxDialogSure(getActivity());
rxDialogSure.setTitle("提示");
rxDialogSure.setContent("保存成功!");
rxDialogSure.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
rxDialogSure.cancel();
mEvidenceManagerActivity.setVisibleAdd(true);
GreenEvidenceSTZJOV greenEvidenceOV = new GreenEvidenceSTZJOV();
greenEvidenceOV.setType(mType);
greenEvidenceOV.setGreenEvidence(myEvidence);
EventBus.getDefault().post(greenEvidenceOV);
getFragmentManager().beginTransaction().remove(CaseAudioVideoEvidenceFragment.this).commit();
}
});
rxDialogSure.show();
}
}
});
close_button = (Button) rootView.findViewById(R.id.close_button);
close_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mEvidenceManagerActivity.setVisibleAdd(true);
getFragmentManager().beginTransaction().remove(CaseAudioVideoEvidenceFragment.this).commit();
}
});
initVoiceRecorder();
setSoundGridView();
setVideoGridView();
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void handleEvent1(SaveOrRemoveDataEvent event) {
if (event.type == 0) { //保存数据
} else { //不保存数据
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().deleteByKey(mEvidenceId);
}
}
private PowerManager.WakeLock wakeLock;
private EaseVoiceRecorder voiceRecorder;
private Drawable[] micImages;
private ImageView mMic_image;
private Handler micImageHandler = new Handler() {
@Override
public void handleMessage(android.os.Message msg) {
// change image
mMic_image.setImageDrawable(micImages[msg.what]);
}
};
/**
* 初始化录音
*/
private void initVoiceRecorder() {
wakeLock = ((PowerManager) BaseApplication.getApplictaion().getSystemService(Context.POWER_SERVICE)).newWakeLock(
PowerManager.SCREEN_DIM_WAKE_LOCK, "oking");
micImages = new Drawable[]{BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_01),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_02),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_03),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_04),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_05),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_06),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_07),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_08),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_09),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_10),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_11),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_12),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_13),
BaseApplication.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_14),};
voiceRecorder = new EaseVoiceRecorder(micImageHandler);
}
private void setSoundGridView() {
soundAdapter = new SoundSimpleAdapter(soundList, !myEvidence.getIsUpload());
soundAdapter.setOnClickListener(new SoundSimpleAdapter.OnClickListener() {
private AlertDialog.Builder mBuilder;
@Override
public void onLongItemClick(final SoundSimpleAdapter adapter, final ArrayList<GreenEvidenceMedia> data, final int position) {
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(getActivity());
}
mRxDialogSureCancel.setTitle("提示");
mRxDialogSureCancel.setContent("是否删除声音文件?");
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
String path = data.get(position).getPath();
File file = new File(path);
if (file.exists()) {
file.delete();
//ACTION_MEDIA_SCANNER_SCAN_FILE
Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_FINISHED);
intent.setData(Uri.fromFile(file));
CaseAudioVideoEvidenceFragment.this.getContext().sendBroadcast(intent);
}
data.remove(position);
adapter.notifyDataSetChanged();
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.show();
}
@Override
public void onAddSoundClick() {
if (mBuilder == null) {
mBuilder = new AlertDialog.Builder(getActivity());
View recordingView = View.inflate(getActivity(), R.layout.voice_recorder_dialog, null);
mMic_image = recordingView.findViewById(R.id.mic_image);
mTv_stop = recordingView.findViewById(R.id.tv_stop);
mTv_cancel = recordingView.findViewById(R.id.tv_cancel);
mBuilder.setView(recordingView);
mAlertDialog = mBuilder.create();
mAlertDialog.setCanceledOnTouchOutside(false);
mAlertDialog.show();
WindowManager.LayoutParams params =
mAlertDialog.getWindow().getAttributes();
params.width = 580;
params.height = 530;
mAlertDialog.getWindow().setAttributes(params);
}
startRecording();
mTv_stop.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
int length = stopRecoding();
if (length > 0) {
GreenEvidenceMedia greenMedia = new GreenEvidenceMedia();
greenMedia.setGreenEvidenceId(myEvidence.getGreenCaseId());
greenMedia.setPath(Environment.getExternalStorageDirectory() + "/oking/mission_voice/" + getVoiceFileName());
greenMedia.setType(3);
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceMediaDao().insert(greenMedia);
soundList.add(greenMedia);
soundAdapter.notifyDataSetChanged();
} else if (length == EMError.FILE_INVALID) {
RxToast.error(BaseApplication.getApplictaion(), "录音失败", Toast.LENGTH_SHORT).show();
} else {
RxToast.warning(BaseApplication.getApplictaion(), "录音时间太短", Toast.LENGTH_SHORT).show();
}
} catch (Exception e) {
e.printStackTrace();
RxToast.error(BaseApplication.getApplictaion(), "录音失败", Toast.LENGTH_SHORT).show();
}
mAlertDialog.cancel();
}
});
mTv_cancel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
stopRecoding();
mAlertDialog.cancel();
}
});
}
});
sound_gridView.setAdapter(soundAdapter);
}
public String getVoiceFileName() {
return voiceRecorder.getVoiceFileName();
}
private int stopRecoding() {
if (wakeLock.isHeld()) {
wakeLock.release();
}
return voiceRecorder.stopRecoding();
}
private void startRecording() {
if (!EaseCommonUtils.isSdcardExist()) {
RxToast.error(BaseApplication.getApplictaion(), "请插上sd卡", Toast.LENGTH_SHORT).show();
return;
}
try {
wakeLock.acquire();
voiceRecorder.setRecordTag(true);
voiceRecorder.startRecording(BaseApplication.getApplictaion());
} catch (Exception e) {
e.printStackTrace();
if (wakeLock.isHeld()) {
wakeLock.release();
}
if (voiceRecorder != null) {
voiceRecorder.discardRecording();
}
RxToast.error(BaseApplication.getApplictaion(), "录音失败,请重试!", Toast.LENGTH_SHORT).show();
return;
}
}
private void setVideoGridView() {
videoAdapter = new CaseSimpleAdapter(videoList, this, mType != 0, "视听资料");
videoAdapter.setOnClickListener(new CaseSimpleAdapter.OnClickListener() {
@Override
public void onAddVideo() {
Intent intent = new Intent();
intent.setClass(getActivity(), VideoRecordActivity.class);
CaseAudioVideoEvidenceFragment.this.startActivityForResult(intent, VIDEO_FROM_CAMERA);
}
@Override
public void onLongItemClick(final CaseSimpleAdapter adapter, final ArrayList<GreenEvidenceMedia> data, final int position) {
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(getActivity());
}
mRxDialogSureCancel.setTitle("提示");
mRxDialogSureCancel.setContent("是否删除原视频?");
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
String path = data.get(position).getPath();
File file = new File(path);
if (file.exists()) {
file.delete();
}
data.remove(position);
adapter.notifyDataSetChanged();
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.show();
}
});
video_gridView.setAdapter(videoAdapter);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
switch (requestCode) {
case VIDEO_FROM_CAMERA:
Uri videouri = data.getData();
GreenEvidenceMedia greenMedia = new GreenEvidenceMedia();
greenMedia.setType(2);
greenMedia.setPath(videouri.toString());
greenMedia.setGreenEvidenceId(myEvidence.getId());
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceMediaDao().insert(greenMedia);
videoList.add(greenMedia);
videoAdapter.notifyDataSetChanged();
//通知系统扫描文件
Intent intent2 = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
intent2.setData(videouri);
CaseAudioVideoEvidenceFragment.this.getContext().sendBroadcast(intent2);
break;
case VIDEO_FROM_GALLERY:
Uri videoUri = data.getData();
GreenEvidenceMedia greenMedia2 = new GreenEvidenceMedia();
greenMedia2.setType(2);
greenMedia2.setPath(videoUri.toString());
greenMedia2.setGreenEvidenceId(myEvidence.getId());
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceMediaDao().insert(greenMedia2);
videoList.add(greenMedia2);
videoAdapter.notifyDataSetChanged();
break;
default:
break;
}
// localSaveEvidence();
}
super.onActivityResult(requestCode, resultCode, data);
}
private boolean localSaveEvidence() {
if (type_spinner.getText().toString().trim().equals("*请选择")) {
RxToast.warning("证据类型不能为空!");
return false;
}
if (evidence_source_spinner.getText().toString().trim().equals("*请选择")) {
RxToast.warning("证据来源不能为空!");
return false;
}
if ("".equals(evidence_name_tv.getText().toString())) {
Toast.makeText(getContext(), "证据名称不能为空!", Toast.LENGTH_SHORT).show();
return false;
}
// if (evidence_source_spinner.getText().toString().equals("")) {
// Toast.makeText(getContext(), "证据来源不能为空!", Toast.LENGTH_SHORT).show();
// return false;
// }
if ("".equals(evidence_content_tv.getText().toString())) {
Toast.makeText(getContext(), "证据内容不能为空!", Toast.LENGTH_SHORT).show();
return false;
}
if ("".equals(evidence_getLocation_tv.getText().toString())) {
Toast.makeText(getContext(), "采集地点不能为空!", Toast.LENGTH_SHORT).show();
return false;
}
if (soundList.size() > 0 || videoList.size() > 0) {
} else {
Toast.makeText(getContext(), "请录入语音或视频!", Toast.LENGTH_SHORT).show();
return false;
}
myEvidence.setZJMC(evidence_name_tv.getText().toString());
myEvidence.setZJNR(evidence_content_tv.getText().toString());
myEvidence.setBZ(evidence_remark_tv.getText().toString());
myEvidence.setCJDD(evidence_getLocation_tv.getText().toString());
myEvidence.setJZR(evidence_man_textView.getText().toString());
myEvidence.setDW(evidence_dept_tv.getText().toString());
myEvidence.setYS(evidence_pagerCount_tv.getText().toString());
myEvidence.setCJSJ(System.currentTimeMillis());
myEvidence.setCJR(OkingContract.CURRENTUSER.getUserName());
if (videoList.size() > 0 && soundList.size() > 0) {
myEvidence.setOtype("YYSP"); //语音、视频
} else if (videoList.size() > 0 && soundList.size() < 1) {
myEvidence.setOtype("SP"); //视频
} else if (videoList.size() < 1 && soundList.size() > 0) {
myEvidence.setOtype("YY"); //语音
}
if (mType == 0 || mType == 2 && myEvidence != null) {
mycase.getGreenEvidence().add(myEvidence);
}
GreenEvidence unique = GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().queryBuilder().where(GreenEvidenceDao.Properties.Id.eq(mEvidenceId)).unique();
if (unique == null) {
myEvidence.setGreenCaseId(mycase.getId());
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().insert(myEvidence);
} else {
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().update(myEvidence);
}
return true;
}
public void setGreenCase(GreenCase greenCase, GreenEvidence greenEvidence) {
this.mycase = greenCase;
this.myEvidence = greenEvidence;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.utils;
import com.zhang.okinglawenforcementphone.beans.RecipientBean;
import net.sourceforge.pinyin4j.PinyinHelper;
import java.util.Comparator;
/**
* Created by Administrator on 2018/1/8.
*/
public class CBRPinyinComparator implements Comparator<RecipientBean> {
@Override
public int compare(RecipientBean t0, RecipientBean t1) {
String us0 = t0.getUSERNAME();
String us1 = t1.getUSERNAME();
if (us0.startsWith("曾")){
us0 = us0.replace("曾","增");
}
if (us1.startsWith("曾")){
us1=us1.replace("曾","增");
}
return concatPinyinStringArray(PinyinHelper.toHanyuPinyinStringArray(us0.charAt(0)))
.compareTo(concatPinyinStringArray(PinyinHelper
.toHanyuPinyinStringArray(us1.charAt(0))));
}
private String concatPinyinStringArray(String[] pinyinArray) {
StringBuffer pinyinSbf = new StringBuffer();
if ((pinyinArray != null) && (pinyinArray.length > 0)) {
for (int i = 0; i < pinyinArray.length; i++) {
pinyinSbf.append(pinyinArray[i]);
}
}
return pinyinSbf.toString();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
/**
* Created by Administrator on 2018/6/8/008.
*/
public interface LoadTaskPicContract {
interface Model {
void loadTaskPic(String logId);
}
interface View {
void loadTaskPicSucc(String result);
void loadTaskPicFail(Throwable e);
}
interface Presenter {
void loadTaskPic(String logId);
void loadTaskPicSucc(String result);
void loadTaskPicFail(Throwable e);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.UpdateMissionStateContract;
import io.reactivex.functions.Consumer;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/20.
*/
public class UpdateMissionStateModel implements UpdateMissionStateContract.Model {
private UpdateMissionStateContract.Presenter mPresenter;
public UpdateMissionStateModel(UpdateMissionStateContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void updateMissionState(String id, String executeStartTime, String executeEndTime, int status) {
BaseHttpFactory.getInstence()
.createService(GDWaterService.class, Api.BASE_URL)
.updateMissionState(id, executeStartTime, executeEndTime, status)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
mPresenter.updateMissionStateSucc(result);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.updateMissionStateFail(throwable);
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
/**
* Created by Administrator on 2017/10/27.
*/
public class LawBean {
private long rowId;
private String levelEffectiveness;
private String publishingDepartment;
private String releaseTime;
private String implementationTime;
private String mmid;
private String title;
private String rulesContent;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getLevelEffectiveness() {
return levelEffectiveness;
}
public void setLevelEffectiveness(String levelEffectiveness) {
this.levelEffectiveness = levelEffectiveness;
}
public String getPublishingDepartment() {
return publishingDepartment;
}
public void setPublishingDepartment(String publishingDepartment) {
this.publishingDepartment = publishingDepartment;
}
public String getReleaseTime() {
return releaseTime;
}
public void setReleaseTime(String releaseTime) {
this.releaseTime = releaseTime;
}
public String getImplementationTime() {
return implementationTime;
}
public void setImplementationTime(String implementationTime) {
this.implementationTime = implementationTime;
}
public String getMmid() {
return mmid;
}
public void setMmid(String mmid) {
this.mmid = mmid;
}
public String getRulesContent() {
return rulesContent;
}
public void setRulesContent(String rulesContent) {
this.rulesContent = rulesContent;
}
public long getRowId() {
return rowId;
}
public void setRowId(long rowId) {
this.rowId = rowId;
}
@Override
public String toString() {
return "LawBean{" +
"rowId=" + rowId +
", levelEffectiveness='" + levelEffectiveness + '\'' +
", publishingDepartment='" + publishingDepartment + '\'' +
", releaseTime='" + releaseTime + '\'' +
", implementationTime='" + implementationTime + '\'' +
", mmid='" + mmid + '\'' +
", title='" + title + '\'' +
", rulesContent='" + rulesContent + '\'' +
'}';
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
import com.google.gson.Gson;
import com.zhang.okinglawenforcementphone.beans.RecordLogOV;
/**
* Created by Administrator on 2018/4/26/026.
*/
public interface UploadJobLogContract {
interface Model {
void uploadJobLog(RecordLogOV recordLogOV, Gson gson);
}
interface View {
void uploadJobLogSucc(String result);
void uploadJobLogFail(Throwable ex);
}
interface Presenter {
void uploadJobLog(RecordLogOV recordLogOV, Gson gson);
void uploadJobLogSucc(String result);
void uploadJobLogFail(Throwable ex);
}
}
<file_sep>package com.zhang.baselib.utils;
import android.graphics.Bitmap;
import android.graphics.Matrix;
/**
* Created by Administrator on 2018/5/22/022.
*/
public class PicUtil {
/**
* 将图片按照某个角度进行旋转
*
* @param bm
* 需要旋转的图片
* @param degree
* 旋转角度
* @return 旋转后的图片
*/
public static Bitmap rotateBitmapByDegree(Bitmap bm, int degree) {
Bitmap returnBm = null;
// 根据旋转角度,生成旋转矩阵
Matrix matrix = new Matrix();
matrix.postRotate(degree);
try {
// 将原始图片按照旋转矩阵进行旋转,并得到新的图片
returnBm = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(),
bm.getHeight(), matrix, true);
} catch (OutOfMemoryError e) {
}
if (returnBm == null) {
returnBm = bm;
}
if (bm != returnBm) {
bm.recycle();
}
return returnBm;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadCaseInAdvanceContract;
import com.zhang.okinglawenforcementphone.mvp.model.UploadCaseInAdvanceModel;
import java.util.Map;
/**
* Created by Administrator on 2018/5/4/004.
*/
public class UploadCaseInAdvancePresenter implements UploadCaseInAdvanceContract.Presenter {
private UploadCaseInAdvanceContract.Model mModel;
private UploadCaseInAdvanceContract.View mView;
public UploadCaseInAdvancePresenter(UploadCaseInAdvanceContract.View view) {
mView = view;
mModel = new UploadCaseInAdvanceModel(this);
}
@Override
public void uploadCaseInAdvance(Map<String, Object> params) {
mModel.uploadCaseInAdvance(params);
}
@Override
public void uploadCaseInAdvanceSucc(String result) {
mView.uploadCaseInAdvanceSucc(result);
}
@Override
public void uploadCaseInAdvanceFail(Throwable ex) {
mView.uploadCaseInAdvanceFail(ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadSignaturePicContract;
import com.zhang.okinglawenforcementphone.mvp.model.UploadSignaturePicModel;
import java.util.Map;
import okhttp3.RequestBody;
/**
* Created by Administrator on 2018/4/26/026.
*/
public class UploadSignaturePicPresenter implements UploadSignaturePicContract.Presenter {
private UploadSignaturePicContract.Model mModel;
private UploadSignaturePicContract.View mView;
public UploadSignaturePicPresenter(UploadSignaturePicContract.View view) {
mView = view;
mModel = new UploadSignaturePicModel(this);
}
@Override
public void uploadSignaturePic(GreenMissionLog mGreenMissionLog, GreenMissionTask missionTask, Map<String, RequestBody> photoParams) {
mModel.uploadSignaturePic(mGreenMissionLog,missionTask,photoParams);
}
@Override
public void uploadSignaturePicSucc(String result) {
mView.uploadSignaturePicSucc(result);
}
@Override
public void uploadIsCount(int pos) {
mView.uploadIsCount(pos);
}
@Override
public void uploadRetry(Throwable ex) {
mView.uploadRetry(ex);
}
@Override
public void uploadSignatureFail(Throwable ex) {
mView.uploadSignatureFail(ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import org.greenrobot.greendao.DaoException;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Generated;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.ToOne;
import org.greenrobot.greendao.annotation.Transient;
import java.sql.Date;
/**
* Created by Administrator on 2018/3/27.
*/
@Entity
public class GreenMedia {
@Id(autoincrement = true)
private Long id;
private Long greenMissionLogId;
private Long time;
private String path;
private String userid;
private String taskid;
@Transient
private GreenLocation souceLocation;
private Integer type; //1表示日志图片 2表示视频 3表示语音 4签名图片
private Long greenGreenLocationId;
@ToOne(joinProperty = "greenGreenLocationId")
private GreenLocation location;
/** Used to resolve relations */
@Generated(hash = 2040040024)
private transient DaoSession daoSession;
/** Used for active entity operations. */
@Generated(hash = 1532472546)
private transient GreenMediaDao myDao;
@Generated(hash = 24618416)
public GreenMedia(Long id, Long greenMissionLogId, Long time, String path,
String userid, String taskid, Integer type, Long greenGreenLocationId) {
this.id = id;
this.greenMissionLogId = greenMissionLogId;
this.time = time;
this.path = path;
this.userid = userid;
this.taskid = taskid;
this.type = type;
this.greenGreenLocationId = greenGreenLocationId;
}
@Generated(hash = 1581839435)
public GreenMedia() {
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public Long getGreenMissionLogId() {
return this.greenMissionLogId;
}
public void setGreenMissionLogId(Long greenMissionLogId) {
this.greenMissionLogId = greenMissionLogId;
}
public Long getTime() {
return this.time;
}
public void setTime(Long time) {
this.time = time;
}
public String getPath() {
return this.path;
}
public void setPath(String path) {
this.path = path;
}
public String getUserid() {
return this.userid;
}
public void setUserid(String userid) {
this.userid = userid;
}
public String getTaskid() {
return this.taskid;
}
public void setTaskid(String taskid) {
this.taskid = taskid;
}
public Integer getType() {
return this.type;
}
public void setType(Integer type) {
this.type = type;
}
public Long getGreenGreenLocationId() {
return this.greenGreenLocationId;
}
public void setGreenGreenLocationId(Long greenGreenLocationId) {
this.greenGreenLocationId = greenGreenLocationId;
}
public GreenLocation getSouceLocation() {
return souceLocation;
}
public void setSouceLocation(GreenLocation souceLocation) {
this.souceLocation = souceLocation;
}
@Generated(hash = 1068795426)
private transient Long location__resolvedKey;
/** To-one relationship, resolved on first access. */
@Generated(hash = 2016527076)
public GreenLocation getLocation() {
Long __key = this.greenGreenLocationId;
if (location__resolvedKey == null || !location__resolvedKey.equals(__key)) {
final DaoSession daoSession = this.daoSession;
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
GreenLocationDao targetDao = daoSession.getGreenLocationDao();
GreenLocation locationNew = targetDao.load(__key);
synchronized (this) {
location = locationNew;
location__resolvedKey = __key;
}
}
return location;
}
/** called by internal mechanisms, do not call yourself. */
@Generated(hash = 592716253)
public void setLocation(GreenLocation location) {
synchronized (this) {
this.location = location;
greenGreenLocationId = location == null ? null : location.getId();
location__resolvedKey = greenGreenLocationId;
}
}
/**
* Convenient call for {@link org.greenrobot.greendao.AbstractDao#delete(Object)}.
* Entity must attached to an entity context.
*/
@Generated(hash = 128553479)
public void delete() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.delete(this);
}
/**
* Convenient call for {@link org.greenrobot.greendao.AbstractDao#refresh(Object)}.
* Entity must attached to an entity context.
*/
@Generated(hash = 1942392019)
public void refresh() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.refresh(this);
}
/**
* Convenient call for {@link org.greenrobot.greendao.AbstractDao#update(Object)}.
* Entity must attached to an entity context.
*/
@Generated(hash = 713229351)
public void update() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.update(this);
}
@Override
public String toString() {
return "GreenMedia{" +
"id=" + id +
", greenMissionLogId=" + greenMissionLogId +
", time=" + time +
", path='" + path + '\'' +
", userid='" + userid + '\'' +
", taskid='" + taskid + '\'' +
", souceLocation=" + souceLocation +
", type=" + type +
", greenGreenLocationId=" + greenGreenLocationId +
", location=" + location +
", daoSession=" + daoSession +
", myDao=" + myDao +
", location__resolvedKey=" + location__resolvedKey +
'}';
}
/** called by internal mechanisms, do not call yourself. */
@Generated(hash = 1652320658)
public void __setDaoSession(DaoSession daoSession) {
this.daoSession = daoSession;
myDao = daoSession != null ? daoSession.getGreenMediaDao() : null;
}
}
<file_sep>buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'org.greenrobot:greendao-gradle-plugin:3.2.2'
}
}
apply plugin: 'com.android.application'
apply plugin: 'org.greenrobot.greendao'
android {
compileSdkVersion 27
defaultConfig {
applicationId "com.zhang.okinglawenforcementphone"
minSdkVersion 18
targetSdkVersion 27
versionCode 1
versionName "1.0.1"
multiDexEnabled true
ndk {
abiFilters "armeabi"
}
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
dexOptions {
javaMaxHeapSize "4g"
}
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation 'com.android.support:appcompat-v7:27.1.1'
implementation 'com.android.support.constraint:constraint-layout:1.1.0'
implementation 'com.android.support:support-v4:27.1.1'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
annotationProcessor 'com.jakewharton:butterknife-compiler:8.8.1'
implementation 'com.jakewharton:butterknife:8.8.1'
implementation 'com.youth.banner:banner:1.4.10'
implementation 'com.hyman:flowlayout-lib:1.1.2'
implementation project(':CircularProgressButtonLibrary')
implementation project(':baselib')
implementation files('libs/pinyin4j-2.5.0.jar')
implementation 'com.jzxiang.pickerview:TimePickerDialog:1.0.1'
implementation project(':easeui')
implementation 'top.zibin:Luban:1.1.3'
implementation project(':whiteboardlib')
implementation files('libs/ocr-sdk.jar')
implementation project(':ocr_ui')
implementation project(':android-pdf-viewer')
implementation 'com.github.sundeepk:compact-calendar-view:2.0.3-beta'
implementation 'com.github.PhilJay:MPAndroidChart:v3.0.3'
implementation files('libs/mail.jar')
implementation files('libs/activation.jar')
implementation files('libs/additionnal.jar')
implementation 'cn.ittiger:TigerVideoPlayer:1.7'
implementation files('libs/AMap3DMap_5.2.0_AMapNavi_5.1.0_AMapSearch_5.1.0_AMapLocation_3.4.0_20170602.jar')
implementation files('libs/jcore-android-1.2.3.jar')
implementation files('libs/jpush-android-3.1.5.jar')
}
configurations.all {
resolutionStrategy.eachDependency { DependencyResolveDetails details ->
def requested = details.requested
if (requested.group == 'com.android.support') {
if (!requested.name.startsWith("multidex")) {
details.useVersion '26.1.0'
}
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone;
import android.content.Context;
import com.zhang.baselib.ui.views.RxDialogSureCancel;
import com.zhang.baselib.utils.ActivityUtil;
import com.zhang.okinglawenforcementphone.beans.DaoMaster;
import com.zhang.okinglawenforcementphone.beans.DaoSession;
import com.zhang.okinglawenforcementphone.db.MyGreenDaoDbHelper;
import org.greenrobot.greendao.database.Database;
/**
* Created by Administrator on 2018/4/18.
*/
public class GreenDAOManager {
public static final boolean ENCRYPTED = true;
private static GreenDAOManager mGreenDAOMannager;
private DaoSession daoSession;
private MyGreenDaoDbHelper mDevOpenHelper;
private GreenDAOManager() {
}
public static GreenDAOManager getInstence() {
if (mGreenDAOMannager == null) {
synchronized (GreenDAOManager.class) {
if (mGreenDAOMannager == null) {
mGreenDAOMannager = new GreenDAOManager();
}
}
}
return mGreenDAOMannager;
}
public void initGreenDao(Context context){
MyGreenDaoDbHelper helper = new MyGreenDaoDbHelper(context, ENCRYPTED ? "gdWater-db-encrypted" : "gdWater-db");
Database db = ENCRYPTED ? helper.getEncryptedWritableDb("super-secret") : helper.getWritableDb();
daoSession = new DaoMaster(db).newSession();
mDevOpenHelper = new MyGreenDaoDbHelper(context, "gdWater-db");
}
public DaoSession getDaoSession(){
return daoSession;
}
//升级表
public void updateTable(){
mDevOpenHelper.onUpgrade(mDevOpenHelper.getWritableDatabase(), 1, 1);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.LoadCanSelectMemberContract;
import io.reactivex.functions.Consumer;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/20.
*/
public class LoadCanSelectMemberModel implements LoadCanSelectMemberContract.Model {
private LoadCanSelectMemberContract.Presenter mPresenter;
public LoadCanSelectMemberModel(LoadCanSelectMemberContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void loadCanSelectMember() {
BaseHttpFactory.getInstence()
.createService(GDWaterService.class, Api.BASE_URL)
.loadCanSelectMember("SZJC,CBR")
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
mPresenter.loadCanSelectMemberSucc(responseBody.string());
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.loadCanSelectMemberFail(throwable);
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import com.amap.api.maps.model.LatLng;
import java.util.ArrayList;
/**
* Created by Administrator on 2017/12/15.
*/
public class MapTaskInfo {
private String taskName;
private String taskState;
private String taskTime;
private String taskPeleasePeople;
private String taskApprover;
private String taskDescription;
private String taskAre;
private LatLng taskLatLng;
private LatLng moveCenterLatLng;
private LatLng startLatLng;
private LatLng endLatLng;
private ArrayList<LatLng>mPicLatLngs;
private ArrayList<LatLng>mRecordScreenLatLngs;
public LatLng getTaskLatLng() {
return taskLatLng;
}
public void setTaskLatLng(LatLng taskLatLng) {
this.taskLatLng = taskLatLng;
}
public String getTaskName() {
return taskName;
}
public void setTaskName(String taskName) {
this.taskName = taskName;
}
public String getTaskState() {
return taskState;
}
public void setTaskState(String taskState) {
this.taskState = taskState;
}
public String getTaskTime() {
return taskTime;
}
public void setTaskTime(String taskTime) {
this.taskTime = taskTime;
}
public String getTaskPeleasePeople() {
return taskPeleasePeople;
}
public void setTaskPeleasePeople(String taskPeleasePeople) {
this.taskPeleasePeople = taskPeleasePeople;
}
public String getTaskApprover() {
return taskApprover;
}
public void setTaskApprover(String taskApprover) {
this.taskApprover = taskApprover;
}
public String getTaskDescription() {
return taskDescription;
}
public void setTaskDescription(String taskDescription) {
this.taskDescription = taskDescription;
}
public String getTaskAre() {
return taskAre;
}
public void setTaskAre(String taskAre) {
this.taskAre = taskAre;
}
public LatLng getMoveCenterLatLng() {
return moveCenterLatLng;
}
public void setMoveCenterLatLng(LatLng moveCenterLatLng) {
this.moveCenterLatLng = moveCenterLatLng;
}
public LatLng getStartLatLng() {
return startLatLng;
}
public void setStartLatLng(LatLng startLatLng) {
this.startLatLng = startLatLng;
}
public LatLng getEndLatLng() {
return endLatLng;
}
public void setEndLatLng(LatLng endLatLng) {
this.endLatLng = endLatLng;
}
public ArrayList<LatLng> getPicLatLngs() {
return mPicLatLngs;
}
public void setPicLatLngs(ArrayList<LatLng> picLatLngs) {
mPicLatLngs = picLatLngs;
}
public ArrayList<LatLng> getRecordScreenLatLngs() {
return mRecordScreenLatLngs;
}
public void setRecordScreenLatLngs(ArrayList<LatLng> recordScreenLatLngs) {
mRecordScreenLatLngs = recordScreenLatLngs;
}
@Override
public String toString() {
return "MapTaskInfo{" +
"taskName='" + taskName + '\'' +
", taskState='" + taskState + '\'' +
", taskTime='" + taskTime + '\'' +
", taskPeleasePeople='" + taskPeleasePeople + '\'' +
", taskApprover='" + taskApprover + '\'' +
", taskDescription='" + taskDescription + '\'' +
", taskAre='" + taskAre + '\'' +
", taskLatLng=" + taskLatLng +
", moveCenterLatLng=" + moveCenterLatLng +
", startLatLng=" + startLatLng +
", endLatLng=" + endLatLng +
", mPicLatLngs=" + mPicLatLngs +
", mRecordScreenLatLngs=" + mRecordScreenLatLngs +
'}';
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
import com.google.gson.Gson;
import java.text.SimpleDateFormat;
/**
* Created by Administrator on 2018/4/20.
*/
public interface UploadLocationToServerContract {
interface Model {
void upploadLocationToServer(Long loginTime, SimpleDateFormat sdf, String imei, Gson gson);
}
interface View {
void uploadSucc(String result);
void uploadFail(Throwable ex);
}
interface Presenter {
void upploadLocationToServer(Long loginTime, SimpleDateFormat sdf, String imei, Gson gson);
void uploadSucc(String result);
void uploadFail(Throwable ex);
}
}
<file_sep>package com.zhang.baselib.utils;
import android.content.Context;
import android.location.LocationManager;
/**
* Created by Administrator on 2018/4/18.
*/
public class LocationUtil {
/**
* 判断Gps是否可用
*
* @return {@code true}: 是<br>{@code false}: 否
*/
public static boolean isGpsEnabled(Context context) {
LocationManager lm = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
return lm.isProviderEnabled(LocationManager.GPS_PROVIDER);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseMultiItemQuickAdapter;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.SeachBean;
import java.util.List;
/**
* Created by Administrator on 2018/5/18/018.
*/
public class SerchRecyAdapter extends BaseMultiItemQuickAdapter<SeachBean, BaseViewHolder> {
/**
* Same as QuickAdapter#QuickAdapter(Context,int) but with
* some initialization data.
*
* @param data A new list is created out of this one to avoid mutable list
*/
public SerchRecyAdapter(List<SeachBean> data) {
super(data);
addItemType(0, R.layout.arrangemission_task_item);
addItemType(1, R.layout.search_menu_item);
}
@Override
protected void convert(BaseViewHolder helper, SeachBean item) {
int itemViewType = helper.getItemViewType();
switch (itemViewType) {
case 0:
helper.setText(R.id.tv_taskname,"任务名称:"+item.getTaskName());
helper.setText(R.id.tv_taskid,"任务编号:"+item.getTaskId());
helper.setText(R.id.tv_fbr,"发布人:"+item.getPublisherName());
if ("0".equals(item.getState())) {
helper.setText(R.id.tv_state,"任务状态:未发布");
} else if ("1".equals(item.getState())) {
helper.setText(R.id.tv_state,"任务状态:已发布待审核");
} else if ("2".equals(item.getState())) {
helper.setText(R.id.tv_state,"任务状态:审核通过");
} else if ("3".equals(item.getState())) {
helper.setText(R.id.tv_state,"任务状态:已分配队员待执行");
} else if ("4".equals(item.getState())) {
helper.setText(R.id.tv_state,"任务状态:任务开始");
} else if ("5".equals(item.getState())) {
helper.setText(R.id.tv_state,"任务状态:任务完成");
}else if ("100".equals(item.getState())){
helper.setText(R.id.tv_state,"任务状态:巡查结束,待上传");
}
break;
case 1:
helper.setText(R.id.tv_menuname,"名称:"+item.getMenuItme());
break;
default:
break;
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.PowerManager;
import android.os.StrictMode;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.google.gson.Gson;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.DefaultContants;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxDialogSureCancel;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.ActivityUtil;
import com.zhang.baselib.utils.FileUtil;
import com.zhang.baselib.utils.NetUtil;
import com.zhang.baselib.utils.Util;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.OkingJPushManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.NavViewRecyAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.beans.GreenMember;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLogDao;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTaskDao;
import com.zhang.okinglawenforcementphone.beans.JPushMessageBean;
import com.zhang.okinglawenforcementphone.beans.NavBean;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.Point;
import com.zhang.okinglawenforcementphone.beans.RecordLogOV;
import com.zhang.okinglawenforcementphone.beans.UpdateGreenMissionTaskOV;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.GetHttpMissionLogContract;
import com.zhang.okinglawenforcementphone.mvp.contract.JPushMessageContract;
import com.zhang.okinglawenforcementphone.mvp.contract.UpdateMissionStateContract;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadJobLogContract;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadJobLogForPicContract;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadSignaturePicContract;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadVideoContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.GetHttpMissionLogPresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.UpdateMissionStatePresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.UploadJobLogForPicPresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.UploadJobLogPresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.UploadSignaturePicPresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.UploadVideoPresenter;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.TaskInfoFragment;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.TaskPatrolFragment;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.TaskPicFragment;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.TaskProcessingResultFragment;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.TaskVideoFragment;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import org.greenrobot.eventbus.EventBus;
import org.json.JSONException;
import org.json.JSONObject;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.concurrent.TimeUnit;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
import io.reactivex.Flowable;
import io.reactivex.Observable;
import io.reactivex.ObservableEmitter;
import io.reactivex.ObservableOnSubscribe;
import io.reactivex.ObservableSource;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
public class MissionRecorActivity extends BaseActivity {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.tv_title)
TextView mTvTitle;
@BindView(R.id.tv_savetag)
TextView mTvSavetag;
@BindView(R.id.nav_view)
RecyclerView mNavView;
@BindView(R.id.fab)
ImageView mFab;
@BindView(R.id.drawer_layout)
DrawerLayout mDrawerLayout;
@BindView(R.id.report_mission_button)
Button mReportMissionButton;
@BindView(R.id.complete_mission_button)
Button mCompleteMissionButton;
private int atPosition = 0;
private GreenMissionTask mission;
private GreenMissionLog mGreenMissionLog;
private RxDialogLoading mRxDialogLoading;
private PowerManager.WakeLock mWakeLock;
private Handler mHandler = new Handler();
private int picComPostion = 0;
private int veodComPosion = 0;
private int logSignPosion = 0;
private List<GreenMedia> mPhotoMedias = new ArrayList<>();
private List<GreenMedia> mVideoMedias = new ArrayList<>();
private boolean uploadLogPic = false, uploadSignPic = false, uploadLogVideo = false;
private UploadJobLogPresenter mUploadJobLogPresenter;
private UploadJobLogForPicPresenter mUploadJobLogForPicPresenter;
private UploadSignaturePicPresenter mUploadSignaturePicPresenter;
private UploadVideoPresenter mUploadVideoPresenter;
private Map<String, RequestBody> photoParams;
private SimpleDateFormat sdfVideo = new SimpleDateFormat("yyyyMMdd_HHmmss");
private int mDatePoor;
private long mBeforTime;
private String mLocJson;
private Gson mGson = new Gson();
private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
private BroadcastReceiver mNetWokReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
boolean connected = NetUtil.isConnected(BaseApplication.getApplictaion());
if (connected) {
RxToast.error(BaseApplication.getApplictaion(), "连接", Toast.LENGTH_SHORT).show();
} else {
if (mRxDialogLoading != null) {
RxToast.error(BaseApplication.getApplictaion(), "网络断开了~~请检查网络再进行提交数据", Toast.LENGTH_SHORT).show();
mRxDialogLoading.cancel();
}
}
}
};
private Unbinder mBind;
private String mTaskId;
private GetHttpMissionLogPresenter mGetHttpMissionLogPresenter;
private RxDialogSureCancel mRxDialogSureCancel;
private NavViewRecyAdapter mNavViewRecyAdapter;
private TaskInfoFragment mTaskInfoFragment;
private TaskPatrolFragment mTaskPatrolFragment;
private TaskProcessingResultFragment mTaskProcessingResultFragment;
private TaskPicFragment mTaskPicFragment;
private TaskVideoFragment mTaskVideoFragment;
private Subscription mSubscription;
private SimpleDateFormat mSimpleDateFormat = new SimpleDateFormat("HH:mm:ss");
private boolean mCanSaveComplete = false;
private boolean mSummarySwisopen;
private boolean mLeaderSummarySwisopen;
private Handler mainHandler;
private int mPosition;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mission_recor);
mBind = ButterKnife.bind(this);
StrictMode.VmPolicy.Builder builder = new StrictMode.VmPolicy.Builder();
StrictMode.setVmPolicy(builder.build());
builder.detectFileUriExposure();
initView();
initData();
setListerner();
}
private void setListerner() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mission.getStatus().equals("5")) {
finish();
} else {
saveTheRecord();
finish();
}
}
});
mNavViewRecyAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, final int position) {
Observable.create(new ObservableOnSubscribe<Integer>() {
@Override
public void subscribe(ObservableEmitter<Integer> e) throws Exception {
saveTheRecord();
e.onNext(200);
}
}).subscribe(new Consumer<Integer>() {
@Override
public void accept(Integer integer) throws Exception {
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
atPosition = position;
switch (position) {
case 0: //基本信息
mTvTitle.setText("基本信息");
if (mTaskPatrolFragment != null) {
fragmentTransaction.hide(mTaskPatrolFragment);
}
if (mTaskProcessingResultFragment != null) {
fragmentTransaction.hide(mTaskProcessingResultFragment);
}
if (mTaskPicFragment != null) {
fragmentTransaction.hide(mTaskPicFragment);
}
if (mTaskVideoFragment != null) {
fragmentTransaction.hide(mTaskVideoFragment);
}
if (mTaskInfoFragment != null) {
fragmentTransaction.show(mTaskInfoFragment);
} else {
mTaskInfoFragment = TaskInfoFragment.newInstance(null, null);
mTaskInfoFragment.setMission(mission);
mTaskInfoFragment.setGreenMissionLog(mGreenMissionLog);
fragmentTransaction.add(R.id.rl_mision, mTaskInfoFragment, "TaskInfoFragment");
}
fragmentTransaction.commitAllowingStateLoss();
break;
case 1: //巡查情况
mTvTitle.setText("巡查情况");
if (mTaskInfoFragment != null) {
fragmentTransaction.hide(mTaskInfoFragment);
}
if (mTaskProcessingResultFragment != null) {
fragmentTransaction.hide(mTaskProcessingResultFragment);
}
if (mTaskPicFragment != null) {
fragmentTransaction.hide(mTaskPicFragment);
}
if (mTaskVideoFragment != null) {
fragmentTransaction.hide(mTaskVideoFragment);
}
if (mTaskPatrolFragment != null) {
fragmentTransaction.show(mTaskPatrolFragment);
} else {
mTaskPatrolFragment = TaskPatrolFragment.newInstance(null, null);
mTaskPatrolFragment.setMission(mission);
mTaskPatrolFragment.setGreenMissionLog(mGreenMissionLog);
fragmentTransaction.add(R.id.rl_mision, mTaskPatrolFragment, "TaskPatrolFragment");
}
fragmentTransaction.commitAllowingStateLoss();
break;
case 2: //处理结果
switchResults(fragmentTransaction);
break;
case 3: //拍照
mTvTitle.setText("拍照");
if (mTaskInfoFragment != null) {
fragmentTransaction.hide(mTaskInfoFragment);
}
if (mTaskPatrolFragment != null) {
fragmentTransaction.hide(mTaskPatrolFragment);
}
if (mTaskProcessingResultFragment != null) {
fragmentTransaction.hide(mTaskProcessingResultFragment);
}
if (mTaskVideoFragment != null) {
fragmentTransaction.hide(mTaskVideoFragment);
}
if (mTaskPicFragment != null) {
fragmentTransaction.show(mTaskPicFragment);
} else {
mTaskPicFragment = TaskPicFragment.newInstance(null, null);
mTaskPicFragment.setMission(mission);
mTaskPicFragment.setGreenMissionLog(mGreenMissionLog);
fragmentTransaction.add(R.id.rl_mision, mTaskPicFragment, "TaskPicFragment");
}
fragmentTransaction.commitAllowingStateLoss();
break;
case 4: //录视频
mTvTitle.setText("录视频");
if (mTaskInfoFragment != null) {
fragmentTransaction.hide(mTaskInfoFragment);
}
if (mTaskPatrolFragment != null) {
fragmentTransaction.hide(mTaskPatrolFragment);
}
if (mTaskProcessingResultFragment != null) {
fragmentTransaction.hide(mTaskProcessingResultFragment);
}
if (mTaskPicFragment != null) {
fragmentTransaction.hide(mTaskPicFragment);
}
if (mTaskVideoFragment != null) {
fragmentTransaction.show(mTaskVideoFragment);
} else {
mTaskVideoFragment = TaskVideoFragment.newInstance(null, null);
mTaskVideoFragment.setMission(mission);
mTaskVideoFragment.setGreenMissionLog(mGreenMissionLog);
fragmentTransaction.add(R.id.rl_mision, mTaskVideoFragment, "TaskVideoFragment");
}
fragmentTransaction.commitAllowingStateLoss();
break;
default:
break;
}
mDrawerLayout.closeDrawers();
}
});
}
});
}
private void switchResults(FragmentTransaction fragmentTransaction) {
mTvTitle.setText("处理结果");
if (mTaskInfoFragment != null) {
fragmentTransaction.hide(mTaskInfoFragment);
}
if (mTaskPatrolFragment != null) {
fragmentTransaction.hide(mTaskPatrolFragment);
}
if (mTaskPicFragment != null) {
fragmentTransaction.hide(mTaskPicFragment);
}
if (mTaskVideoFragment != null) {
fragmentTransaction.hide(mTaskVideoFragment);
}
if (mTaskProcessingResultFragment != null) {
mTaskProcessingResultFragment.setMission(mission);
fragmentTransaction.show(mTaskProcessingResultFragment);
} else {
mTaskProcessingResultFragment = TaskProcessingResultFragment.newInstance(null, null);
mTaskProcessingResultFragment.setMission(mission);
mTaskProcessingResultFragment.setGreenMissionLog(mGreenMissionLog);
fragmentTransaction.add(R.id.rl_mision, mTaskProcessingResultFragment, "TaskProcessingResultFragment");
}
fragmentTransaction.commitAllowingStateLoss();
}
private void initData() {
BaseApplication.getApplictaion().registerReceiver(mNetWokReceiver, new IntentFilter("oking.network"));
//定时保存
if (mission != null) {
Flowable.interval(5, 10, TimeUnit.SECONDS)
.onBackpressureDrop()
.subscribe(new Subscriber<Long>() {
@Override
public void onSubscribe(Subscription s) {
mSubscription = s;
s.request(Long.MAX_VALUE);
}
@Override
public void onNext(Long aLong) {
//判断当前显示的是哪个页面
if (mission.getStatus().equals("5") || mission.getStatus().equals("100")) {
} else {
switch (atPosition) {
case 0:
Log.i("Oking5", "保存基本信息");
saveTaskInfo();
break;
case 1:
Log.i("Oking5", "保存巡查情况");
savePatrol();
break;
case 2:
Log.i("Oking5", "保存处理结果");
saveResults();
break;
case 3:
Log.i("Oking5", "保存拍照"); //不用再去调用保存操作,每次拍照都保存了
break;
case 4:
Log.i("Oking5", "保存视频");
break;
default:
break;
}
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
mTvSavetag.setText("上次保存时间:" + mSimpleDateFormat.format(System.currentTimeMillis()));
}
});
}
}
@Override
public void onError(Throwable t) {
}
@Override
public void onComplete() {
}
});
}
}
private void saveResults() {
mTaskProcessingResultFragment.saveResults();
}
private void savePatrol() {
mTaskPatrolFragment.savePatrol();
}
private void saveTaskInfo() {
mTaskInfoFragment.saveTaskInfo();
}
private void initView() {
ArrayList<NavBean> navBeans = new ArrayList<>();
NavBean navBean1 = new NavBean();
navBean1.setIcon(R.mipmap.icon_taskinfo);
navBean1.setTitle("基本信息");
navBeans.add(navBean1);
NavBean navBean2 = new NavBean();
navBean2.setIcon(R.mipmap.icon_taskpatrl);
navBean2.setTitle("巡查情况");
navBeans.add(navBean2);
NavBean navBean3 = new NavBean();
navBean3.setIcon(R.mipmap.icon_result);
navBean3.setTitle("处理结果");
navBeans.add(navBean3);
NavBean navBean4 = new NavBean();
navBean4.setIcon(R.mipmap.icon_tagpic);
navBean4.setTitle("拍照");
navBeans.add(navBean4);
NavBean navBean5 = new NavBean();
navBean5.setIcon(R.mipmap.icon_tagvideo);
navBean5.setTitle("录视频");
navBeans.add(navBean5);
mNavViewRecyAdapter = new NavViewRecyAdapter(R.layout.navview_item, navBeans);
mNavViewRecyAdapter.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
mNavView.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
mNavView.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 1, Color.DKGRAY));
mNavView.setAdapter(mNavViewRecyAdapter);
PowerManager pm = (PowerManager) BaseApplication.getApplictaion().getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
"MissionRecordFragment");
mWakeLock.acquire();
Intent intent = getIntent();
mTaskId = intent.getStringExtra("taskId");
mPosition = intent.getIntExtra("position", -1);
if (mTaskId != null) {
//网络获取Log(尝试用http获取服务器的Log,获取不了再单机生成新Log)
getNetData(mTaskId);
}
mDrawerLayout.openDrawer(Gravity.LEFT);
}
private void getNetData(String taskId) {
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(this, true, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
}
mRxDialogLoading.setLoadingText("初始化数据中,请稍等...");
mRxDialogLoading.show();
mission = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().queryBuilder().where(GreenMissionTaskDao.Properties.Taskid.eq(taskId)).unique();
if (mGetHttpMissionLogPresenter == null) {
mGetHttpMissionLogPresenter = new GetHttpMissionLogPresenter(new GetHttpMissionLogContract.View() {
@Override
public void loadHttpMissionLogSucc(GreenMissionLog greenMissionLog) {
mRxDialogLoading.cancel();
mGreenMissionLog = greenMissionLog;
initFragment();
}
@Override
public void loadEmpty(GreenMissionLog greenMissionLog) {
mRxDialogLoading.cancel();
mGreenMissionLog = greenMissionLog;
initFragment();
}
});
}
if (mission != null) {
mGetHttpMissionLogPresenter.getHttpMissionLog(mission);
} else {
mRxDialogLoading.cancel();
finish();
}
}
private void initFragment() {
if (mCompleteMissionButton != null && mReportMissionButton != null) {
if (mission.getStatus().equals("100")) {//任务已完成,不能再完成
mCompleteMissionButton.setVisibility(View.GONE);
} else if (mission.getStatus().equals("5")) {
//任务已上报,不能再上报
mReportMissionButton.setVisibility(View.GONE);
mCompleteMissionButton.setVisibility(View.GONE);
} else if (mission.getStatus().equals("9")) {//任务被退回修改
mReportMissionButton.setVisibility(View.GONE);
mCompleteMissionButton.setVisibility(View.GONE);
}
}
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
mTaskInfoFragment = TaskInfoFragment.newInstance(null, null);
mTaskInfoFragment.setMission(mission);
mTaskInfoFragment.setGreenMissionLog(mGreenMissionLog);
fragmentTransaction.replace(R.id.rl_mision, mTaskInfoFragment, "TaskInfoFragment").commitAllowingStateLoss();
}
@Override
public void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
public void onResume() {
super.onResume();
if (mWakeLock == null) {
// keep screen on
PowerManager pm = (PowerManager) BaseApplication.getApplictaion().getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
"MissionRecordFragment");
mWakeLock.acquire();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
BaseApplication.getApplictaion().unregisterReceiver(mNetWokReceiver);
if (mSubscription != null) {
mSubscription.cancel();
mSubscription = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
mBind.unbind();
}
public void invisiabelCompleteMissionButton() {
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(MissionRecorActivity.this);//提示弹窗
}
mRxDialogSureCancel.setContent("完成巡查后将停止记录巡查定位,是否继续?");
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
RxToast.warning(BaseApplication.getApplictaion(), "请去签名!!", Toast.LENGTH_SHORT, true).show();
mission.setExecute_end_time(System.currentTimeMillis());
mission.setStatus("100");
//把巡查轨迹插入本地数据库
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
String locationTrajectory = getLocationTrajectory();
Log.i("Oking1", ">>>>>>>>>>>>>" + locationTrajectory);
mGreenMissionLog.setLocJson(locationTrajectory);
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().update(mGreenMissionLog);
GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().update(mission);
}
});
if (DefaultContants.ISHTTPLOGIN) {
//改变任务状态
UpdateMissionStatePresenter updateMissionStatePresenter = new UpdateMissionStatePresenter(new UpdateMissionStateContract.View() {
@Override
public void updateMissionStateSucc(String result) {
Log.i("Oking", "巡查完毕" + result);
saveTheRecord();
mRxDialogSureCancel.cancel();
}
@Override
public void updateMissionStateFail(Throwable ex) {
Log.i("Oking", "巡查异常" + ex.getMessage());
saveTheRecord();
mRxDialogSureCancel.cancel();
}
});
updateMissionStatePresenter.updateMissionState(mission.getTaskid(), "",
OkingContract.SDF.format(mission.getExecute_end_time()), 4);
}
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
switchResults(fragmentTransaction);
mCompleteMissionButton.setVisibility(View.GONE);
}
});
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.show();
}
private String getLocationTrajectory() {
Log.i("Oking1", ">>>>>>>>>>>>>遍历轨迹");
Long beginTime = mission.getExecute_start_time();
if (beginTime == null) {
beginTime = System.currentTimeMillis() - 1000 * 60 * 20;
}
Long endTime = mission.getExecute_end_time();
if (endTime == null) {
endTime = System.currentTimeMillis();
mission.setExecute_end_time(endTime);
}
mBeforTime = beginTime - 24 * 60 * 60 * 1000;
final String file1 = sdf.format(beginTime);
final ArrayList<Point> locationPath = new ArrayList<>();
mDatePoor = Util.getDatePoor(beginTime, endTime);
if (mDatePoor < 1) { //表示在同一天
// Log.i("Oking","是同一天");
List<String> locationPos = FileUtil.readFile2List(Environment.getExternalStorageDirectory() + "/oking/location/" + file1 + ".txt", "UTF-8");
if (locationPos != null) {
for (String s : locationPos) {
String[] items = s.split(",");
if (items.length != 3) {
continue;
}
String mLatitude = items[0];
String mLongitude = items[1];
String mDatetime = items[2];
if (Long.parseLong(mDatetime) > beginTime && Long.parseLong(mDatetime) < endTime) {
Point location = new Point();
location.setLatitude(Double.valueOf(mLatitude));
location.setLongitude(Double.valueOf(mLongitude));
location.setDatetime(Long.valueOf(mDatetime));
locationPath.add(location);
}
}
}
} else {
for (int i = 0; i <= mDatePoor; i++) {
File file = new File(Environment.getExternalStorageDirectory() + "/oking/location/" + getAfterData(mBeforTime) + ".txt");
if (file.exists()) {
// Log.i("Oking","不是同一天"+file.getName());
List<String> locationPos = FileUtil.readFile2List(file, "UTF-8");
for (String s : locationPos) {
String[] items = s.split(",");
if (items.length != 3) {
continue;
}
String Latitude = items[0];
String Longitude = items[1];
String datetime = items[2];
if (Long.parseLong(datetime) > beginTime && Long.parseLong(datetime) < endTime) {
Point location = new Point();
location.setLatitude(Double.valueOf(Latitude));
location.setLongitude(Double.valueOf(Longitude));
location.setDatetime(Long.valueOf(datetime));
locationPath.add(location);
}
}
}
}
}
//筛选一下,不然点集太多
if (locationPath.size() > 100) {
ArrayList<Point> newLocationPath = new ArrayList<>();
for (int i = 0; i < locationPath.size(); i = i + 2) {
newLocationPath.add(locationPath.get(i));
}
mLocJson = mGson.toJson(newLocationPath);
} else {
mLocJson = mGson.toJson(locationPath);
}
return mLocJson;
}
@OnClick({R.id.report_mission_button, R.id.complete_mission_button, R.id.fab})
public void onViewClicked(View view) {
switch (view.getId()) {
case R.id.fab:
if (mDrawerLayout.isDrawerOpen(Gravity.LEFT)) {
mDrawerLayout.closeDrawers();
} else {
mDrawerLayout.openDrawer(Gravity.LEFT);
}
break;
case R.id.report_mission_button: //上报任务
reportingTasks();
break;
case R.id.complete_mission_button: //完成巡查
invisiabelCompleteMissionButton();
break;
default:
break;
}
}
private void reportingTasks() {
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(MissionRecorActivity.this);//提示弹窗
}
mRxDialogSureCancel.setContent("上报任务后不能修改日志,是否继续?");
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mSubscription.cancel();
saveTheRecord();
GreenMissionLog unique = GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().queryBuilder().where(GreenMissionLogDao.Properties.Task_id.eq(mTaskId)).unique();
if (!mission.getStatus().equals("100") &&
!mission.getStatus().equals("9")) {
RxToast.warning(BaseApplication.getApplictaion(), "任务未完成,不能上报", Toast.LENGTH_SHORT, true).show();
return;
}
if (mission.getMembers().size() > 0) {
for (GreenMember greenMember : mission.getMembers()) {
String signPic = greenMember.getSignPic();
if (signPic != null) {
File file = new File(signPic);
if (signPic != null && file.exists()) {
mCanSaveComplete = true;
} else {
mCanSaveComplete = false;
RxToast.warning(BaseApplication.getApplictaion(), "存在成员未签名,不能上报任务!", Toast.LENGTH_SHORT, true).show();
return;
}
} else {
mCanSaveComplete = false;
RxToast.warning(BaseApplication.getApplictaion(), "存在成员未签名,不能上报任务!", Toast.LENGTH_SHORT, true).show();
return;
}
}
}
if (mCanSaveComplete) {
mSummarySwisopen = unique.getSummarySwisopen();
mLeaderSummarySwisopen = unique.getLeaderSummarySwisopen();
String patrol = unique.getPatrol();
String dzyj = unique.getDzyj();
if (!mSummarySwisopen && TextUtils.isEmpty(patrol)) {
RxToast.warning(BaseApplication.getApplictaion(), "巡查情况未填写,不能上报任务!", Toast.LENGTH_SHORT, true).show();
return;
}
if (!mLeaderSummarySwisopen && TextUtils.isEmpty(dzyj)) {
RxToast.warning(BaseApplication.getApplictaion(), "处理结果未填写,不能上报任务!", Toast.LENGTH_SHORT, true).show();
return;
}
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(MissionRecorActivity.this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
}
unique.resetGreenMedia();
List<GreenMedia> greenMedias = unique.getGreenMedia();
for (GreenMedia media : greenMedias) {
if (media.getType() == 1) { //1表示日志图片
mPhotoMedias.add(media);
} else if (media.getType() == 2) { //2表示视频
mVideoMedias.add(media);
} else if (media.getType() == 3) {
//3表示语音
} else {
//签名图片
}
}
mRxDialogSureCancel.cancel();
mRxDialogLoading.setLoadingText("上传数据中...图片:" + picComPostion + "/" + mPhotoMedias.size() + "视频:" + veodComPosion + "/" + mVideoMedias.size());
mRxDialogLoading.show();
picComPostion = 0;
veodComPosion = 0;
logSignPosion = 0;
//上传数据
httpSaveRecord(unique);
} else {
RxToast.warning(BaseApplication.getApplictaion(), "存在成员未签名,不能上报任务!", Toast.LENGTH_SHORT, true).show();
return;
}
}
});
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.show();
}
private void httpSaveRecord(GreenMissionLog unique) {
RecordLogOV recordLogOV = new RecordLogOV();
recordLogOV.setArea(mission.getRwqyms());
recordLogOV.setEquipment(unique.getEquipment());
recordLogOV.setGreenMissionLog(mGreenMissionLog);
recordLogOV.setGreenMissionTask(mission);
recordLogOV.setLeaderSummary(unique.getDzyj());
if (mSummarySwisopen || mLeaderSummarySwisopen) {
recordLogOV.setSwisopen(true);
} else {
recordLogOV.setSwisopen(false);
}
recordLogOV.setParts(unique.getOther_part());
recordLogOV.setSeleMattersPos(unique.getItem());
recordLogOV.setSelePlanPos(unique.getPlan());
recordLogOV.setSummary(unique.getPatrol());
recordLogOV.setTime(OkingContract.SDF.format(System.currentTimeMillis()));
if (mUploadJobLogPresenter == null) {
mUploadJobLogPresenter = new UploadJobLogPresenter(new UploadJobLogContract.View() {
@Override
public void uploadJobLogSucc(String result) {
Log.i("Oking", result);
try {
JSONObject jsonObject = new JSONObject(result);
int code = jsonObject.getInt("code");
if (code == 0) {
String serverId = jsonObject.getString("id");
mGreenMissionLog.setServer_id(serverId);
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().update(mGreenMissionLog);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (mPhotoMedias.size() > 0) {
//上传巡查日志的图片
uploadPic();
} else {
uploadLogPic = true;
}
//上传签名图片
uploadSignedPic();
//上传巡查视频
if (mVideoMedias.size() > 0) {
uploadVideo();
} else {
uploadLogVideo = true;
}
}
@Override
public void uploadJobLogFail(Throwable ex) {
Log.i("Oking", "异常:" + ex.toString());
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!20", Toast.LENGTH_SHORT, true).show();
}
});
}
mUploadJobLogPresenter.uploadJobLog(recordLogOV, mGson);
}
private void uploadVideo() {
photoParams = new HashMap<>();
if (mUploadVideoPresenter == null) {
mUploadVideoPresenter = new UploadVideoPresenter(new UploadVideoContract.View() {
@Override
public void loadVideoSucc(String result) {
try {
JSONObject object = new JSONObject(result);
int code = object.getInt("code");
if (code == 200) {
veodComPosion++;
mRxDialogLoading.getTextView().setText("上传数据中...图片:" + picComPostion + "/" + mPhotoMedias.size() + "视频:" + veodComPosion + "/" + mVideoMedias.size());
if (veodComPosion == mVideoMedias.size()) {
uploadLogVideo = true;
checkChangeState();
}
} else {
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!1", Toast.LENGTH_SHORT, true).show();
}
} catch (JSONException e) {
e.printStackTrace();
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!2", Toast.LENGTH_SHORT, true).show();
}
}
@Override
public void uploadRetry(Throwable ex) {
veodComPosion = 0;
RxToast.warning("网络有点开小差~~正在努力重试!!");
}
@Override
public void loadVideoFail(Throwable ex) {
}
@Override
public void uploadIsCount(int pos) {
veodComPosion = pos;
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
mRxDialogLoading.getTextView().setText("上传数据中...图片:" + picComPostion + "/" + mPhotoMedias.size() + "视频:" + veodComPosion + "/" + mVideoMedias.size());
if (veodComPosion == mVideoMedias.size()) {
uploadLogVideo = true;
checkChangeState();
}
}
});
}
});
}
mUploadVideoPresenter.uploadVideo(mGreenMissionLog, mVideoMedias, photoParams, sdfVideo, mGson);
}
private void uploadSignedPic() {
photoParams = new HashMap<>();
if (mUploadSignaturePicPresenter == null) {
mUploadSignaturePicPresenter = new UploadSignaturePicPresenter(new UploadSignaturePicContract.View() {
@Override
public void uploadSignaturePicSucc(String result) {
Log.i("Oking1", "签名上传成功回掉》》》》》》》》》》》》》》》》" + result);
try {
JSONObject object = new JSONObject(result);
int code = object.getInt("code");
if (code == 200) {
logSignPosion++;
if (logSignPosion == mission.getMembers().size()) {
uploadSignPic = true;
checkChangeState();
}
} else {
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!14", Toast.LENGTH_SHORT, true).show();
}
} catch (JSONException e) {
e.printStackTrace();
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!15", Toast.LENGTH_SHORT, true).show();
}
}
@Override
public void uploadIsCount(int pos) {
logSignPosion = pos;
if (logSignPosion == mission.getMembers().size()) {
uploadSignPic = true;
checkChangeState();
}
}
@Override
public void uploadRetry(Throwable ex) {
logSignPosion = 0;
RxToast.warning("网络有点开小差~~正在努力重试!!");
}
@Override
public void uploadSignatureFail(Throwable ex) {
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!16", Toast.LENGTH_SHORT, true).show();
}
});
}
mUploadSignaturePicPresenter.uploadSignaturePic(mGreenMissionLog, mission, photoParams);
}
private void uploadPic() {
photoParams = new HashMap<>();
if (mUploadJobLogForPicPresenter == null) {
mUploadJobLogForPicPresenter = new UploadJobLogForPicPresenter(new UploadJobLogForPicContract.View() {
@Override
public void uploadSucc(String result) {
try {
JSONObject object = new JSONObject(result);
int code = object.getInt("code");
if (code == 200) {
picComPostion++;
mRxDialogLoading.getTextView().setText("上传数据中...图片:" + picComPostion + "/" + mPhotoMedias.size() + "视频:" + veodComPosion + "/" + mVideoMedias.size());
if (picComPostion == mPhotoMedias.size()) {
uploadLogPic = true;
checkChangeState();
}
} else {
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!1", Toast.LENGTH_SHORT, true).show();
}
} catch (JSONException e) {
e.printStackTrace();
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!2", Toast.LENGTH_SHORT, true).show();
}
}
@Override
public void uploadRetry(Throwable ex) {
picComPostion = 0;
RxToast.warning("网络有点开小差~~正在努力重试!!");
}
@Override
public void uploadFail(Throwable ex) {
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
Log.i("Oking", "上传失败,请稍后重试!3" + ex.toString());
RxToast.error(BaseApplication.getApplictaion(), "当前4G网络不稳定,上传失败,请稍后重试!3", Toast.LENGTH_SHORT, true).show();
}
@Override
public void uploadIsCount(int pos) {
picComPostion = pos;
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
mRxDialogLoading.getTextView().setText("上传数据中...图片:" + picComPostion + "/" + mPhotoMedias.size() + "视频:" + veodComPosion + "/" + mVideoMedias.size());
if (picComPostion == mPhotoMedias.size()) {
uploadLogPic = true;
checkChangeState();
}
}
});
}
@Override
public void uploadPositionFail(Throwable ex) {
Log.i("Oking", "位置数据解析异常" + ex.toString());
RxToast.error(BaseApplication.getApplictaion(), "位置数据解析异常", Toast.LENGTH_SHORT, true).show();
}
});
}
mUploadJobLogForPicPresenter.uploadJobLogForPic(mGson, photoParams, mGreenMissionLog, mPhotoMedias);
}
private void checkChangeState() {
Log.i("Oking", uploadLogPic + "----uploadLogPic");
Log.i("Oking", uploadSignPic + "----uploadSignPic");
Log.i("Oking", uploadLogVideo + "----uploadLogVideo");
if (uploadLogPic && uploadSignPic && uploadLogVideo) {
httpCompleteMission();
}
}
private void httpCompleteMission() {
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.updateMissionRecordState(mGreenMissionLog.getServer_id(), 1)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.observeOn(Schedulers.io())
.concatMap(new Function<ResponseBody, ObservableSource<ResponseBody>>() {
@Override
public Observable<ResponseBody> apply(ResponseBody responseBody) throws Exception {
return BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.updateMissionState(mission.getTaskid(), "", "", 5);
}
})
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
String string = responseBody.string();
mission.setStatus("5");
mission.setExamine_status(0);
GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().update(mission);
UpdateGreenMissionTaskOV greenMissionOV = new UpdateGreenMissionTaskOV();
greenMissionOV.setMissionTask(mission);
greenMissionOV.setPosition(mPosition);
EventBus.getDefault().post(greenMissionOV);
//发送一个远程通知
JPushMessageBean jPushMessageBean = new JPushMessageBean();
JPushMessageBean.AudienceBean audienceBean = new JPushMessageBean.AudienceBean();
ArrayList<String> alias = new ArrayList<>();
alias.add(mission.getApproved_person());
audienceBean.setAlias(alias);
jPushMessageBean.setAudience(audienceBean);
JPushMessageBean.NotificationBean notificationBean = new JPushMessageBean.NotificationBean();
notificationBean.setAlert("新消息:" + mission.getTask_name());
JPushMessageBean.NotificationBean.AndroidBean androidBean = new JPushMessageBean.NotificationBean.AndroidBean();
JPushMessageBean.NotificationBean.AndroidBean.ExtrasBean extrasBean = new JPushMessageBean.NotificationBean.AndroidBean.ExtrasBean();
extrasBean.setOpenType("2");
extrasBean.setTaskid(mission.getTaskid());
androidBean.setExtras(extrasBean);
notificationBean.setAndroid(androidBean);
ArrayList<String> platforms = new ArrayList<>();
platforms.add("android");
jPushMessageBean.setPlatform(platforms);
jPushMessageBean.setNotification(notificationBean);
OkingJPushManager.getInstence().pushMessage(jPushMessageBean, new JPushMessageContract.View() {
@Override
public void pushMessageSucc(String result) {
}
@Override
public void pushMessageFail(Throwable ex) {
RxToast.error(ex.getMessage());
}
});
if (mainHandler == null) {
mainHandler = new Handler();
}
mainHandler.postDelayed(new Runnable() {
@Override
public void run() {
Stack<Activity> activityStack = ActivityUtil.getActivityStack();
for (Activity activity : activityStack) {
if (activity.getClass().getSimpleName().equals("MissionActivity")) {
activity.finish();
}
}
finish();
}
}, 100);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Log.i("Oking", ">>>>>>>>>>异常");
if (mRxDialogLoading != null) {
mRxDialogLoading.cancel();
}
}
});
}
public void saveTheRecord() {
if (mission.getStatus().equals("5")) {
} else {
switch (atPosition) {
case 0:
saveTaskInfo();
break;
case 1:
savePatrol();
break;
case 2:
saveResults();
break;
case 3:
break;
case 4:
break;
default:
break;
}
}
}
private String getAfterData(long time) {
//如果需要向后计算日期 -改为+
Date newDate = new Date(time + 24 * 60 * 60 * 1000);
mBeforTime = newDate.getTime();
String dateOk = sdf.format(newDate);
return dateOk;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.TaskLogPicRecyAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenLocation;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.mvp.contract.LoadBasicLogContract;
import com.zhang.okinglawenforcementphone.mvp.contract.LoadTaskPicContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.LoadBasicLogPresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.LoadTaskPicPresenter;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.ImageViewActivity;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.PlayVideoOnlineActivity;
import org.apache.commons.lang3.StringUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class ApprovalPicVideoFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.pic_gridView)
RecyclerView mPicGridView;
Unbinder unbinder;
@BindView(R.id.swipeRefreshLayout)
SwipeRefreshLayout mSwipeRefreshLayout;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private View mInflate;
private TaskLogPicRecyAdapter mTaskLogPicRecyAdapter;
private GreenMissionTask mGreenMissionTask;
private GreenMissionLog mGreenMissionLog;
private LoadBasicLogPresenter mLoadBasicLogPresenter;
private LoadTaskPicPresenter mLoadTaskPicPresenter;
private boolean mIsLoadMeadiaSucc = false;
public ApprovalPicVideoFragment() {
// Required empty public constructor
}
public static ApprovalPicVideoFragment newInstance(String param1, String param2) {
ApprovalPicVideoFragment fragment = new ApprovalPicVideoFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_approval_pic_video, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
setListerner();
return mInflate;
}
private void setListerner() {
mSwipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
loadLogData();
}
});
mTaskLogPicRecyAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
List<GreenMedia> mediaList = adapter.getData();
GreenMedia greenMedia = mediaList.get(position);
if (greenMedia.getType() == 2) {
Intent intent = new Intent(getActivity(), PlayVideoOnlineActivity.class);
/**
* 有毒,请截取掉.MP4后缀!!!请截取掉.MP4后缀!!!(后台框架限制)
*/
intent.putExtra("path", StringUtils.substring(greenMedia.getPath(), 0, greenMedia.getPath().length() - 4));
startActivity(intent);
} else if (greenMedia.getType() == 1) {
Intent intent = new Intent(getActivity(), ImageViewActivity.class);
intent.putExtra("time", OkingContract.SDF.format(greenMedia.getTime()));
intent.putExtra("picLocation", greenMedia.getSouceLocation().getLatitude() + "," + greenMedia.getSouceLocation().getLongitude());
intent.setData(Uri.parse(greenMedia.getPath()));
startActivity(intent);
} else {
Intent intent = new Intent(getActivity(), ImageViewActivity.class);
intent.setData(Uri.parse(greenMedia.getPath()));
startActivity(intent);
}
}
});
}
private void initData() {
mPicGridView.setLayoutManager(new GridLayoutManager(BaseApplication.getApplictaion(), 5));
mTaskLogPicRecyAdapter = new TaskLogPicRecyAdapter(getActivity(), R.layout.pic_item, null);
mPicGridView.setAdapter(mTaskLogPicRecyAdapter);
loadLogData();
}
private void loadLogData() {
if (mGreenMissionLog == null) { //先去获取日志
if (mLoadBasicLogPresenter == null) {
mLoadBasicLogPresenter = new LoadBasicLogPresenter(new LoadBasicLogContract.View() {
@Override
public void getBasicLogSucc(String result) {
//{"msg":"查询成功!","datas":[{"OTHER_PART":"交通,城管","EQUIPMENT":"交通工具:001003,001001,001001 ","PLAN":"0","TYPE":"0"}],"status":"1"}
try {
JSONObject jsonObject = new JSONObject(result);
String status = jsonObject.getString("status");
if (status.equals("1")) {
JSONArray datas = jsonObject.getJSONArray("datas");
mGreenMissionLog = new GreenMissionLog();
for (int i = 0; i < datas.length(); i++) {
JSONObject object = datas.getJSONObject(i);
mGreenMissionLog.setEquipment(object.getString("EQUIPMENT"));
mGreenMissionLog.setServer_id(object.getString("LOG_ID"));
mGreenMissionLog.setTask_id(mGreenMissionTask.getTaskid());
mGreenMissionLog.setOther_part(object.getString("OTHER_PART"));
mGreenMissionLog.setPlan(Integer.parseInt(object.getString("PLAN")));
mGreenMissionLog.setPatrol(object.getString("PATROL"));
mGreenMissionLog.setDzyj(object.getString("DZYJ"));
}
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao()
.insert(mGreenMissionLog);
loadLogMedia();
}
} catch (JSONException e) {
e.printStackTrace();
}
//
}
@Override
public void getBasicLogFail(Throwable ex) {
RxToast.error("获取日志失败");
}
});
}
mLoadBasicLogPresenter.getBasicLog(mGreenMissionTask.getTaskid());
} else {
loadLogMedia();
}
}
private void loadLogMedia() {
if (!mIsLoadMeadiaSucc&&mGreenMissionLog.getServer_id()!=null) {
if (mLoadTaskPicPresenter == null) {
mLoadTaskPicPresenter = new LoadTaskPicPresenter(new LoadTaskPicContract.View() {
@Override
public void loadTaskPicSucc(String result) {
mSwipeRefreshLayout.setRefreshing(false);
mIsLoadMeadiaSucc = true;
List<GreenMedia> mediaList = new ArrayList<>();
try {
JSONArray jsonArray = new JSONArray(result);
for (int i = 0; i < jsonArray.length(); i++) {
GreenMedia greenMedia = new GreenMedia();
JSONObject jsonObject = jsonArray.getJSONObject(i);
// String id = jsonObject.optString("ID");
// String logId = jsonObject.optString("LOG_ID");
String path = jsonObject.optString("PATH");
String userId = jsonObject.optString("USER_ID");
String ext = jsonObject.optString("EXT");
String type = jsonObject.getString("TYPE");
String smallimg = jsonObject.getString("SMALLIMG");
//文件类型 0-巡查日志照片 1-队员签名图片 2-视频 3-视频缩略图
switch (type) {
case "0":
greenMedia.setType(1);//1表示日志图片 2表示视频 3表示语音 4签名图片
greenMedia.setPath(Api.BASE_URL + "/upload" + path);
greenMedia.setGreenMissionLogId(mGreenMissionLog.getId());
if (ext != null) {
JSONObject extObj = new JSONObject(ext);
String datetime = extObj.optString("datetime");
if (datetime != null) {
try {
greenMedia.setTime(OkingContract.SDF.parse(datetime).getTime());
} catch (ParseException e) {
e.printStackTrace();
}
}
GreenLocation greenLocation = new GreenLocation();
greenLocation.setLatitude(extObj.optString("latitude"));
greenLocation.setLongitude(extObj.optString("longitude"));
greenMedia.setSouceLocation(greenLocation);
}
mediaList.add(greenMedia);
break;
case "1":
greenMedia.setType(4);
greenMedia.setPath(Api.BASE_URL + "/upload" + path);
greenMedia.setUserid(userId);
greenMedia.setGreenMissionLogId(mGreenMissionLog.getId());
mediaList.add(greenMedia);
break;
case "2":
greenMedia.setType(2);
greenMedia.setPath(Api.BASE_URL + "/upload" + smallimg);
greenMedia.setGreenMissionLogId(mGreenMissionLog.getId());
mediaList.add(greenMedia);
break;
default:
break;
}
}
} catch (JSONException e) {
e.printStackTrace();
Log.i("Oking2", e.getMessage());
}
mTaskLogPicRecyAdapter.setNewData(mediaList);
}
@Override
public void loadTaskPicFail(Throwable e) {
Log.i("Oking1", "失败" + e.toString());
mSwipeRefreshLayout.setRefreshing(false);
mIsLoadMeadiaSucc = false;
}
});
}
mLoadTaskPicPresenter.loadTaskPic(mGreenMissionLog.getServer_id());
} else {
mSwipeRefreshLayout.setRefreshing(false);
}
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
public void setMissionTask(GreenMissionTask greenMissionTask) {
mGreenMissionTask = greenMissionTask;
}
public void setMissionLog(GreenMissionLog greenMissionLog) {
mGreenMissionLog = greenMissionLog;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
/**
* Created by Administrator on 2018/2/3.
*/
public class MarkerOptionInfo {
private int optionType;
private String sourcePath;
public int getOptionType() {
return optionType;
}
public void setOptionType(int optionType) {
this.optionType = optionType;
}
public String getSourcePath() {
return sourcePath;
}
public void setSourcePath(String sourcePath) {
this.sourcePath = sourcePath;
}
@Override
public String toString() {
return "MarkerOptionInfo{" +
"optionType=" + optionType +
", sourcePath='" + sourcePath + '\'' +
'}';
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
/**
* Created by Administrator on 2018/6/9/009.
*/
public interface LoadAcceptNumberContract {
interface Model {
void loadAcceptNumber();
}
interface View {
void loadAcceptNumberSucc(String result);
void loadAcceptNumberFail(Throwable ex);
}
interface Presenter {
void loadAcceptNumber();
void loadAcceptNumberSucc(String result);
void loadAcceptNumberFail(Throwable ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone;
import android.annotation.SuppressLint;
import android.content.ComponentName;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Environment;
import android.os.IBinder;
import android.os.RemoteException;
import android.text.TextUtils;
import android.util.Log;
import com.amap.api.navi.AMapNavi;
import com.google.gson.Gson;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.DefaultContants;
import com.zhang.baselib.utils.FileUtil;
import com.zhang.baselib.utils.NetUtil;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadLocationToServerContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.UploadLocationToServerPresenter;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.concurrent.TimeUnit;
import io.reactivex.Flowable;
import io.reactivex.schedulers.Schedulers;
/**
* Created by Administrator on 2018/4/20.
*/
public class OkingLocationManager {
private static OkingLocationManager mOkingLocationMannager;
private Subscription mSubscription;
private SimpleDateFormat locationFileSdf = new SimpleDateFormat("yyyyMMdd");
private UploadLocationToServerPresenter mUploadLocationToServerPresenter;
private OkingLocationManager() {
}
public static final OkingLocationManager getInstence() {
if (mOkingLocationMannager == null) {
synchronized (OkingLocationManager.class) {
if (mOkingLocationMannager == null) {
mOkingLocationMannager = new OkingLocationManager();
}
}
}
return mOkingLocationMannager;
}
public void init() {
//设置高德地图apikey
AMapNavi.setApiKey(BaseApplication.getApplictaion(), "26c7c12d8952b9db18af6439616b25aa");
}
@SuppressLint("WrongConstant")
public void startLocation(final UploadLocationToServerContract.View mView, final String imei, final Gson gson) {
Intent intent = new Intent();
intent.setComponent(new ComponentName("com.zhang.okinglawenforcementphone", "com.zhang.okinglawenforcementphone.service.IRemoteLocationService"));
BaseApplication.getApplictaion().bindService(intent, new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
final AmapLocationAidlInterface amapLocationAidlInterface = AmapLocationAidlInterface.Stub.asInterface(iBinder);
if (amapLocationAidlInterface != null) {
Flowable.interval(1, 4, TimeUnit.SECONDS)
.onBackpressureDrop()
.subscribe(new Subscriber<Long>() {
@Override
public void onSubscribe(Subscription s) {
mSubscription = s;
s.request(Long.MAX_VALUE);
}
@Override
public void onNext(Long aLong) {
try {
String[] location = amapLocationAidlInterface.getLocation();
OkingContract.LOCATIONRESULT = location;
OkingContract.MARQUEEVIEWINFO.clear();
OkingContract.MARQUEEVIEWINFO.add("当前定位类型:" + OkingContract.LOCATIONRESULT[0]);
OkingContract.MARQUEEVIEWINFO.add("经纬度:" + OkingContract.LOCATIONRESULT[1] + "," + OkingContract.LOCATIONRESULT[2]);
OkingContract.MARQUEEVIEWINFO.add("定位时间:" + OkingContract.LOCATIONRESULT[3]);
} catch (RemoteException e) {
e.printStackTrace();
}
try {
if (aLong % 5 == 0) {
amapLocationAidlInterface.refreshNotification();
if (NetUtil.isConnected(BaseApplication.getApplictaion())
&& OkingContract.CURRENTUSER != null
&& !"".equals(OkingContract.CURRENTUSER.getUserid()) &&
DefaultContants.ISHTTPLOGIN && !TextUtils.isEmpty(OkingContract.LOCATIONRESULT[1])) {
if (mUploadLocationToServerPresenter == null) {
mUploadLocationToServerPresenter = new UploadLocationToServerPresenter(mView);
}
mUploadLocationToServerPresenter.upploadLocationToServer(Long.parseLong(OkingContract.CURRENTUSER.getLogintime()), OkingContract.SDF, imei, gson);
}
//把定位经纬度保存text
if (!TextUtils.isEmpty(OkingContract.LOCATIONRESULT[3]) && !OkingContract.LOCATIONRESULT[0].equals("返回上次定位")) {
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
writeToLogFile();
}
});
}
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
@Override
public void onError(Throwable t) {
Log.i("Oking", "定位异常" + t.getMessage());
}
@Override
public void onComplete() {
}
});
}
}
@Override
public void onServiceDisconnected(ComponentName componentName) {
if (mSubscription != null) {
mSubscription.cancel();
}
}
}, 1);
}
public void cancelLocation() {
if (mSubscription!=null){
mSubscription.cancel();
mSubscription=null;
}
}
private void writeToLogFile() {
String filePath = Environment.getExternalStorageDirectory() + "/oking/location/" + locationFileSdf.format(System.currentTimeMillis()) + ".txt";
String cont = null;
try {
long time = OkingContract.SDF.parse(OkingContract.LOCATIONRESULT[3]).getTime();
cont = OkingContract.LOCATIONRESULT[1] + "," + OkingContract.LOCATIONRESULT[2] + "," + time + "\n";
} catch (ParseException e) {
e.printStackTrace();
}
boolean flag = FileUtil.writeFileFromString(filePath, cont, true);
if (flag) {
// System.out.println("文件写入成功");
} else {
// System.out.println("文件写入失败");
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.views;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
/**
* Created by zhao on 2016/10/18.
*/
public class SignatureView extends View {
private Paint paint = new Paint();
private Path path = new Path();
private boolean canPaint = true;
public SignatureView(Context context) {
super(context);
initView(context);
}
public SignatureView(Context context, AttributeSet attrs) {
super(context, attrs);
initView(context);
}
private void initView(Context context) {
paint.setAntiAlias(true);
paint.setColor(Color.BLACK);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeJoin(Paint.Join.ROUND);
paint.setStrokeWidth(25f);
}
@Override
protected void onDraw(Canvas canvas) {
if(canPaint) {
canvas.drawPath(path, paint);
}
super.onDraw(canvas);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
float eventX = event.getX();
float eventY = event.getY();
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
path.moveTo(eventX, eventY);
return true;
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_UP:
path.lineTo(eventX, eventY);
break;
default:
return false;
}
invalidate();
return true;
}
public void clear(){
this.setDrawingCacheEnabled(false);
path.reset();
invalidate();
}
public Bitmap save(){
if(!path.isEmpty()) {
this.setDrawingCacheEnabled(true);
this.setDrawingCacheQuality(View.DRAWING_CACHE_QUALITY_HIGH);
this.setDrawingCacheBackgroundColor(Color.WHITE);
this.buildDrawingCache();
return this.getDrawingCache();
}else{
return null;
}
}
public boolean isCanPaint() {
return canPaint;
}
public void setCanPaint(boolean canPaint) {
this.canPaint = canPaint;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.widget.Toolbar;
import android.view.KeyEvent;
import android.view.View;
import android.widget.TextView;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.HandlingMenuFragment;
import com.zhang.okinglawenforcementphone.mvp.ui.fragments.LawEnforcementMenuFragment;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class CaseManagerActivity extends BaseActivity {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.tv_title)
TextView mTvTitle;
private Unbinder mBind;
private HandlingMenuFragment mHandlingMenuFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_case_manager);
mBind = ButterKnife.bind(this);
initData();
setListenner();
}
private void setListenner() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
back();
}
});
}
private void back() {
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
Fragment caseRegistrationFragment = getSupportFragmentManager().findFragmentByTag("CaseRegistrationFragment");
Fragment openCasesFragment = getSupportFragmentManager().findFragmentByTag("OpenCasesFragment");
Fragment caseProcessingListFragment = getSupportFragmentManager().findFragmentByTag("CaseProcessingListFragment");
Fragment caseComplaintFragment = getSupportFragmentManager().findFragmentByTag("CaseComplaintFragment");
Fragment caseInAdvanceFragment = getSupportFragmentManager().findFragmentByTag("CaseInAdvanceFragment");
if (caseRegistrationFragment != null && !caseRegistrationFragment.isHidden()) {
mTvTitle.setText("办案");
fragmentTransaction.hide(caseRegistrationFragment);
fragmentTransaction.show(mHandlingMenuFragment).commit();
}else if (openCasesFragment != null && !openCasesFragment.isHidden()) {
mTvTitle.setText("办案");
fragmentTransaction.hide(openCasesFragment);
fragmentTransaction.show(mHandlingMenuFragment).commit();
}else if (caseProcessingListFragment != null && !caseProcessingListFragment.isHidden()) {
mTvTitle.setText("办案");
fragmentTransaction.hide(caseProcessingListFragment);
fragmentTransaction.show(mHandlingMenuFragment).commit();
}else if (caseComplaintFragment != null && !caseComplaintFragment.isHidden()) {
mTvTitle.setText("办案");
fragmentTransaction.hide(caseComplaintFragment);
fragmentTransaction.show(mHandlingMenuFragment).commit();
}else if (caseInAdvanceFragment != null && !caseInAdvanceFragment.isHidden()) {
mTvTitle.setText("办案");
fragmentTransaction.hide(caseInAdvanceFragment);
fragmentTransaction.show(mHandlingMenuFragment).commit();
}else {
finish();
}
}
private void initData() {
mHandlingMenuFragment = HandlingMenuFragment.newInstance(null, null);
getSupportFragmentManager().beginTransaction().replace(R.id.rl_case_content, mHandlingMenuFragment, "HandlingMenuFragment").commit();
// initPage();
// CaseManagerListFragment caseManagerListFragment = CaseManagerListFragment.newInstance(null, null);
// getSupportFragmentManager().beginTransaction().replace(R.id.taskmanager_content, caseManagerListFragment).commit();
}
// private void initPage() {
// ArrayList<String> listTitles = new ArrayList<>();
// ArrayList<Fragment> fragments = new ArrayList<>();
//
// listTitles.add("案件登记");
// CaseRegistrationFragment caseRegistrationFragment = CaseRegistrationFragment.newInstance(null, null);
// fragments.add(caseRegistrationFragment);
// mTabLayout.addTab(mTabLayout.newTab().setText("案件登记"));
//
// listTitles.add("案件受理");
// OpenCasesFragment openCasesFragment = OpenCasesFragment.newInstance(null, null);
// fragments.add(openCasesFragment);
// mTabLayout.addTab(mTabLayout.newTab().setText("案件受理"));
//
// listTitles.add("案件处理");
// CaseProcessingListFragment caseManagerListFragment = CaseProcessingListFragment.newInstance(null, null);
// fragments.add(caseManagerListFragment);
// mTabLayout.addTab(mTabLayout.newTab().setText("案件处理"));
//
// listTitles.add("案件转办");
// CaseComplaintFragment caseComplaintFragment = CaseComplaintFragment.newInstance(null, null);
// fragments.add(caseComplaintFragment);
// mTabLayout.addTab(mTabLayout.newTab().setText("案件转办"));
//
//
// listTitles.add("预立案");
// CaseInAdvanceFragment caseInAdvanceFragment = CaseInAdvanceFragment.newInstance(null, null);
// fragments.add(caseInAdvanceFragment);
// mTabLayout.addTab(mTabLayout.newTab().setText("预立案"));
//
//
// TitleAdapter titleTabAdapter = new TitleAdapter(getSupportFragmentManager(), fragments, listTitles);
// mViewPager.setAdapter(titleTabAdapter);
// mTabLayout.setupWithViewPager(mViewPager);
// }
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
back();
return true;//return true;拦截事件传递,从而屏蔽back键。
}
return super.onKeyDown(keyCode, event);
}
public void setTitleText(String title) {
mTvTitle.setText(title);
}
}
<file_sep>path.variable.kotlin_bundled=G\:\\Android Studio\\plugins\\Kotlin\\kotlinc
jdk.home.1.8=G\:/Android Studio/jre
jdk.home.android_api_27_platform=G\:/androidSDK
javac2.instrumentation.includeJavaRuntime=false<file_sep>"# OkingLawEnforcementPhone"
"# OkingLawEnforcementPhone"
"# OkingLawEnforcementPhone"
"# OkingLawEnforcementPhone"
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.ToOne;
import org.greenrobot.greendao.annotation.Generated;
/**
* Created by Administrator on 2018/5/28.
*/
@Entity
public class GreenEvidenceMedia {
@Id(autoincrement = true)
private Long id;
private Long greenEvidenceId;
private Long time;
private String path;
private String userid;
private String taskid;
private Integer type; //1表示证据图片 2表示视频 3表示语音
@Generated(hash = 1044598213)
public GreenEvidenceMedia(Long id, Long greenEvidenceId, Long time, String path,
String userid, String taskid, Integer type) {
this.id = id;
this.greenEvidenceId = greenEvidenceId;
this.time = time;
this.path = path;
this.userid = userid;
this.taskid = taskid;
this.type = type;
}
@Generated(hash = 612896463)
public GreenEvidenceMedia() {
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public Long getGreenEvidenceId() {
return this.greenEvidenceId;
}
public void setGreenEvidenceId(Long greenEvidenceId) {
this.greenEvidenceId = greenEvidenceId;
}
public Long getTime() {
return this.time;
}
public void setTime(Long time) {
this.time = time;
}
public String getPath() {
return this.path;
}
public void setPath(String path) {
this.path = path;
}
public String getUserid() {
return this.userid;
}
public void setUserid(String userid) {
this.userid = userid;
}
public String getTaskid() {
return this.taskid;
}
public void setTaskid(String taskid) {
this.taskid = taskid;
}
public Integer getType() {
return this.type;
}
public void setType(Integer type) {
this.type = type;
}
@Override
public String toString() {
return "GreenEvidenceMedia{" +
"id=" + id +
", greenEvidenceId=" + greenEvidenceId +
", time=" + time +
", path='" + path + '\'' +
", userid='" + userid + '\'' +
", taskid='" + taskid + '\'' +
", type=" + type +
'}';
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import android.net.Uri;
import android.util.Log;
import com.google.gson.Gson;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.beans.GreenLocation;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.Point;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadJobLogForPicContract;
import org.json.JSONObject;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import io.reactivex.Observable;
import io.reactivex.ObservableSource;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.functions.Predicate;
import io.reactivex.schedulers.Schedulers;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/26/026.
*/
public class UploadJobLogForPicModel implements UploadJobLogForPicContract.Model {
private UploadJobLogForPicContract.Presenter mPresenter;
private String mLogResult;
private int pos = 0;
private String mLastPathSegment;
public UploadJobLogForPicModel(UploadJobLogForPicContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void uploadJobLogForPic(final Gson gson, final Map<String, RequestBody> photoParams, final GreenMissionLog mLog, final List<GreenMedia> media) {
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.getMissionRecordPicPath(mLog.getServer_id(), 0)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.observeOn(Schedulers.io())
.concatMap(new Function<ResponseBody, Observable<GreenMedia>>() {
@Override
public Observable<GreenMedia> apply(ResponseBody responseBody) throws Exception {
mLogResult = responseBody.string();
Log.i("Oking", "已存在日志图片集合:" + mLogResult);
pos = 0;
return Observable.fromIterable(media);
}
})
.concatMap(new Function<GreenMedia, ObservableSource<ResponseBody>>() {
@Override
public Observable<ResponseBody> apply(GreenMedia media) throws Exception {
mLastPathSegment = Uri.parse(media.getPath()).getLastPathSegment();
if (mLogResult.contains(mLastPathSegment)) {
//已经存在服务器
Log.i("Oking", "日志图片已存在服务器" + mLastPathSegment);
pos++;
mPresenter.uploadIsCount(pos);
} else {
//上传图片
photoParams.clear();
File file = new File(Uri.parse(media.getPath()).getPath());
photoParams.put("logId", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), mLog.getServer_id()));
photoParams.put("type", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), "0"));
photoParams.put("smallImg", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), ""));
String ext;
GreenLocation mediaLocation = media.getLocation();
if (mediaLocation != null) {
Point location = new Point();
location.setLongitude(Double.parseDouble(mediaLocation.getLongitude()));
location.setLatitude(Double.parseDouble(mediaLocation.getLatitude()));
location.setDatetime(media.getTime());
ext = gson.toJson(location);
} else {
Map<String, String> map = new HashMap<>();
map.put("datetime", OkingContract.SDF.format(media.getTime()));
ext = gson.toJson(map);
}
Log.i("Oking5",ext);
photoParams.put("ext", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), ext));
photoParams.put("files" + "\"; filename=\"" + file.getName(), RequestBody.create(MediaType.parse("image/png"), file));
return BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.uploadFiles(photoParams);
}
return Observable.empty();
}
})
.observeOn(AndroidSchedulers.mainThread())
.retry(5, new Predicate<Throwable>() {
@Override
public boolean test(Throwable throwable) throws Exception {
//最多让被观察者重新发射数据5次,但是这里返回值可以进行处理
//返回假就是不让重新发射数据了,调用观察者的onError就终止了。
//返回真就是让被观察者重新发射请求
mPresenter.uploadRetry(throwable);
Log.i("Oking", "日志图片上传异常,重试");
return true;
}
})
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
JSONObject jsonObject = new JSONObject(result);
String path = jsonObject.getString("path");
mLogResult = mLogResult + "," + path;
Log.i("Oking", "日志图片上传成功" + result);
mPresenter.uploadSucc(result);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.uploadFail(throwable);
Log.i("Oking", "日志图片上传失败" + throwable.toString());
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.app.PendingIntent;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Handler;
import android.support.design.widget.TextInputEditText;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.google.gson.Gson;
import com.jzxiang.pickerview.TimePickerDialog;
import com.jzxiang.pickerview.data.Type;
import com.jzxiang.pickerview.listener.OnDateSetListener;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.TextUtil;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.OkingNotificationManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.EmergencyMemberAdapter;
import com.zhang.okinglawenforcementphone.adapter.SourceArrayRecyAdapter;
import com.zhang.okinglawenforcementphone.beans.ApproverBean;
import com.zhang.okinglawenforcementphone.beans.EmergencyMemberGson;
import com.zhang.okinglawenforcementphone.beans.EmergencyTaskOV;
import com.zhang.okinglawenforcementphone.beans.GreenMember;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.LatLngListOV;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.SourceArrayOV;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.utils.ApproverPinyinComparator;
import com.zhang.okinglawenforcementphone.utils.DialogUtil;
import com.zhang.okinglawenforcementphone.utils.EmergencyPinyinComparator;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
import okhttp3.ResponseBody;
public class TemporaryEmergencyTaskActivity extends BaseActivity implements OnDateSetListener {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.et_taskname)
TextInputEditText mEtTaskname;
@BindView(R.id.publisher_tv)
TextInputEditText mPublisherTv;
@BindView(R.id.sp_tasktype)
TextView mSpTasktype;
@BindView(R.id.sp_approver)
TextView mSpApprover;
@BindView(R.id.sp_source)
TextView mSpSource;
@BindView(R.id.sp_tasknature)
TextView mSpTasknature;
@BindView(R.id.list_item_missionMember)
TextInputEditText mListItemMissionMember;
@BindView(R.id.bt_select_begintime)
Button mBtSelectBegintime;
@BindView(R.id.bt_select_endtime)
Button mBtSelectEndtime;
@BindView(R.id.list_item_missionDetail)
TextInputEditText mListItemMissionDetail;
@BindView(R.id.et_description)
TextInputEditText mEtDescription;
@BindView(R.id.bt_ok)
Button mBtOk;
@BindView(R.id.bt_select_members)
Button mBtSelectMembers;
@BindView(R.id.ib_map)
ImageButton mIbMap;
private TimePickerDialog mBeginDialogAll;
private TimePickerDialog mEndDialogAll;
private SimpleDateFormat sf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private long mBeginMillseconds = 0;
private long mEndMillseconds = 0;
private String mApproverId; //选中的审批人ID
private String mApprover; //选中的审批人
private String mSource; //选中的线索来源
private String mTasknature; //选中的任务性质
private List<ApproverBean.SZJCBean> mSzjc;
private String mMembersid;
private RxDialogLoading mSubRxDialogLoading;
private Handler mainHandler;
private String[] mTasktypeArray;
private String mTasktype;
private RxDialogLoading mRxDialogLoading;
private Gson gson = new Gson();
private String mMcoordinateJson;
private ArrayList<GreenMember> mEmergencyMembers;
private EmergencyMemberAdapter mEmergencyMemberAdapter;
private View mButtomContentView;
private ListView mLv_members;
private DialogUtil mButtomDialogUtil;
private Button mBtOkselect;
private List<GreenMember> mCheckName;
private Intent mIntent;
private Unbinder mBind;
private LatLngListOV mLatLngListOV;
private DialogUtil mDialogUtil;
private View mButtonDailog;
private TextView mTv_title;
private ArrayList<SourceArrayOV> mSourceArrayOVS;
private SourceArrayRecyAdapter mSourceArrayRecyAdapter;
private ArrayList<SourceArrayOV> mTasknatureArrayOVS;
private List<SourceArrayOV> mTasktypeArrayOVS;
private List<SourceArrayOV> mApproversOVS;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_temporary_emergency_task);
mBind = ButterKnife.bind(this);
initView();
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
}
private void initView() {
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
EventBus.getDefault().unregister(this);
}
private void initData() {
EventBus.getDefault().register(this);
mPublisherTv.setText(OkingContract.CURRENTUSER.getUserName());
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.loadEmergencyName(OkingContract.CURRENTUSER.getDept_id())
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
try {
JSONObject jsonObject = new JSONObject(result);
int code = jsonObject.getInt("code");
if (code == 400) {
JSONArray msg = jsonObject.getJSONArray("msg");
JSONObject jsonObject1 = msg.getJSONObject(0);
String rwmc = jsonObject1.getString("rwmc");
mEtTaskname.setText(rwmc);
TextUtil.setEditTextInhibitInputSpace(mEtTaskname);
TextUtil.setEditTextInhibitInputSpeChat(mEtTaskname);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
}
});
initButtomDialog();
String[] sourceArray = getResources().getStringArray(R.array.spinner_source);
mSourceArrayOVS = new ArrayList<>();
for (String s : sourceArray) {
SourceArrayOV sourceArrayOV = new SourceArrayOV();
sourceArrayOV.setType(0);
sourceArrayOV.setSource(s);
mSourceArrayOVS.add(sourceArrayOV);
}
String[] tasknatureArray = getResources().getStringArray(R.array.spinner_tasknature);
mTasknatureArrayOVS = new ArrayList<>();
for (String s : tasknatureArray) {
SourceArrayOV sourceArrayOV = new SourceArrayOV();
sourceArrayOV.setType(1);
sourceArrayOV.setSource(s);
mTasknatureArrayOVS.add(sourceArrayOV);
}
String[] tasktypeArray = getResources().getStringArray(R.array.spinner_tasktype);
mTasktypeArrayOVS = new ArrayList<>();
for (String s : tasktypeArray) {
SourceArrayOV sourceArrayOV = new SourceArrayOV();
sourceArrayOV.setType(2);
sourceArrayOV.setSource(s);
mTasktypeArrayOVS.add(sourceArrayOV);
}
if (mRxDialogLoading == null) {
initWaitingDialog();
}
mRxDialogLoading.show();
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.loadPersonnel("SZJC")
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
mRxDialogLoading.cancel();
ApproverBean approverBean = gson.fromJson(result, ApproverBean.class);
mSzjc = approverBean.getSZJC();
Collections.sort(mSzjc, new ApproverPinyinComparator());
mApproversOVS = new ArrayList<>();
for (int i = 0; i < mSzjc.size(); i++) {
SourceArrayOV sourceArrayOV = new SourceArrayOV();
sourceArrayOV.setType(3);
sourceArrayOV.setSource(mSzjc.get(i).getUSERNAME());
mApproversOVS.add(sourceArrayOV);
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Log.i("Oking", "失败:" + throwable.getMessage());
RxToast.error(BaseApplication.getApplictaion(), "数据获取失败", Toast.LENGTH_SHORT).show();
mRxDialogLoading.cancel();
}
});
}
private void initButtomDialog() {
if (mDialogUtil == null) {
mDialogUtil = new DialogUtil();
mButtonDailog = View.inflate(BaseApplication.getApplictaion(), R.layout.maptask_dialog, null);
mTv_title = mButtonDailog.findViewById(R.id.tv_title);
RecyclerView recyList = mButtonDailog.findViewById(R.id.recy_task);
recyList.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
recyList.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 1, Color.argb(255,223,223,223)));
mSourceArrayRecyAdapter = new SourceArrayRecyAdapter(R.layout.source_item, null);
mSourceArrayRecyAdapter.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
recyList.setAdapter(mSourceArrayRecyAdapter);
mSourceArrayRecyAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
List<SourceArrayOV> sourceArrayOVS = adapter.getData();
SourceArrayOV sourceArrayOV = sourceArrayOVS.get(position);
switch (sourceArrayOV.getType()) {
case 0:
if ("上级交办".equals(sourceArrayOV.getSource())) {
mSource = "0";
} else if ("部门移送".equals(sourceArrayOV.getSource())) {
mSource = "1";
} else if ("系统报警".equals(sourceArrayOV.getSource())) {
mSource = "2";
} else if ("日常巡查".equals(sourceArrayOV.getSource())) {
mSource = "3";
} else if ("媒体披露".equals(sourceArrayOV.getSource())) {
mSource = "4";
} else if ("群众举报".equals(sourceArrayOV.getSource())) {
mSource = "5";
}
mSpSource.setText(sourceArrayOV.getSource());
break;
case 1:
if ("日常执法".equals(sourceArrayOV.getSource())) {
mTasknature = "0";
} else if ("联合执法".equals(sourceArrayOV.getSource())) {
mTasknature = "1";
} else if ("专项执法".equals(sourceArrayOV.getSource())) {
mTasknature = "2";
}
mSpTasknature.setText(sourceArrayOV.getSource());
break;
case 2:
if ("河道管理".equals(sourceArrayOV.getSource())) {
mTasktype = "0";
} else if ("河道采砂".equals(sourceArrayOV.getSource())) {
mTasktype = "1";
} else if ("水资源管理".equals(sourceArrayOV.getSource())) {
mTasktype = "2";
} else if ("水土保持管理".equals(sourceArrayOV.getSource())) {
mTasktype = "3";
} else if ("水利工程管理".equals(sourceArrayOV.getSource())) {
mTasktype = "4";
}
mSpTasktype.setText(sourceArrayOV.getSource());
break;
case 3:
mApproverId = mSzjc.get(position).getUSERID();
mApprover = sourceArrayOV.getSource();
mSpApprover.setText(sourceArrayOV.getSource());
break;
default:
break;
}
mDialogUtil.cancelDialog();
}
});
}
}
private void initWaitingDialog() {
mRxDialogLoading = new RxDialogLoading(this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
mRxDialogLoading.setLoadingText("获取数据中,请稍等...");
}
@OnClick({R.id.sp_tasktype, R.id.sp_approver, R.id.sp_source, R.id.sp_tasknature,R.id.bt_select_begintime, R.id.bt_select_endtime, R.id.bt_ok, R.id.bt_select_members, R.id.ib_map})
public void onViewClicked(View view) {
switch (view.getId()) {
case R.id.sp_tasktype:
mTv_title.setText("任务类型");
mSourceArrayRecyAdapter.setNewData(mTasktypeArrayOVS);
mDialogUtil.showBottomDialog(TemporaryEmergencyTaskActivity.this, mButtonDailog, 300f);
break;
case R.id.sp_approver:
mTv_title.setText("审批领导");
mSourceArrayRecyAdapter.setNewData(mApproversOVS);
mDialogUtil.showBottomDialog(TemporaryEmergencyTaskActivity.this, mButtonDailog, 300f);
break;
case R.id.sp_source:
mTv_title.setText("线索来源");
mSourceArrayRecyAdapter.setNewData(mSourceArrayOVS);
mDialogUtil.showBottomDialog(TemporaryEmergencyTaskActivity.this, mButtonDailog, 300f);
break;
case R.id.sp_tasknature:
mTv_title.setText("任务性质");
mSourceArrayRecyAdapter.setNewData(mTasknatureArrayOVS);
mDialogUtil.showBottomDialog(TemporaryEmergencyTaskActivity.this, mButtonDailog, 300f);
break;
case R.id.bt_select_begintime: //选择开始时间
if (mBeginDialogAll == null) {
initBeginWheelYearMonthDayDialog();
}
mBtSelectEndtime.setText("选择");
mBeginDialogAll.show(getSupportFragmentManager(), "beginTime");
break;
case R.id.bt_select_endtime: //选择结束时间
initEndWheelYearMonthDayDialog();
mEndDialogAll.show(getSupportFragmentManager(), "endTime");
break;
case R.id.bt_ok: //提交
submitDataToServer();
break;
case R.id.bt_select_members: //选择成员
if (mRxDialogLoading == null) {
initWaitingDialog();
}
mRxDialogLoading.show();
httpGetCanSelectMember();
break;
case R.id.ib_map:
if (mIntent == null) {
mIntent = new Intent(TemporaryEmergencyTaskActivity.this, MapByPointActivity.class);
}
if (mLatLngListOV != null) {
mIntent.putExtra("centerPoint", mLatLngListOV.getCenterLatLng());
mIntent.putExtra("drawLaLoType", mLatLngListOV.getType());
mIntent.putExtra("left", mLatLngListOV.getLeft());
mIntent.putExtra("right", mLatLngListOV.getRight());
mIntent.putExtra("top", mLatLngListOV.getTop());
mIntent.putExtra("bottom", mLatLngListOV.getBottom());
mIntent.putExtra("zoom", mLatLngListOV.getZoom());
}
startActivity(mIntent);
break;
default:
break;
}
}
private void initEndWheelYearMonthDayDialog() {
long tenYears = 5L * 365 * 1000 * 60 * 60 * 24L;
if (mBeginMillseconds == 0) {
mBeginMillseconds = System.currentTimeMillis();
}
mEndDialogAll = new TimePickerDialog.Builder()
.setCallBack(this)
.setCancelStringId("取消")
.setSureStringId("确认")
.setTitleStringId("请选择结束时间")
.setYearText("年")
.setMonthText("月")
.setDayText("日")
.setHourText("点")
.setMinuteText("分")
.setCyclic(false)
.setMinMillseconds(mBeginMillseconds)
.setMaxMillseconds(System.currentTimeMillis() + tenYears)
.setCurrentMillseconds(System.currentTimeMillis())
.setThemeColor(getResources().getColor(R.color.timepicker_dialog_bg))
.setType(Type.ALL)
.setWheelItemTextNormalColor(getResources().getColor(R.color.timetimepicker_default_text_color))
.setWheelItemTextSelectorColor(getResources().getColor(R.color.timepicker_toolbar_bg))
.setWheelItemTextSize(18)
.build();
}
private void initBeginWheelYearMonthDayDialog() {
long tenYears = 5L * 365 * 1000 * 60 * 60 * 24L;
mBeginDialogAll = new TimePickerDialog.Builder()
.setCallBack(this)
.setCancelStringId("取消")
.setSureStringId("确认")
.setTitleStringId("请选择开始时间")
.setYearText("年")
.setMonthText("月")
.setDayText("日")
.setHourText("点")
.setMinuteText("分")
.setCyclic(false)
.setMinMillseconds(System.currentTimeMillis())
.setMaxMillseconds(System.currentTimeMillis() + tenYears)
.setCurrentMillseconds(System.currentTimeMillis())
.setThemeColor(getResources().getColor(R.color.timepicker_dialog_bg))
.setType(Type.ALL)
.setWheelItemTextNormalColor(getResources().getColor(R.color.timetimepicker_default_text_color))
.setWheelItemTextSelectorColor(getResources().getColor(R.color.timepicker_toolbar_bg))
.setWheelItemTextSize(18)
.build();
}
private void httpGetCanSelectMember() {
if (mButtomContentView == null) {
mButtomContentView = LayoutInflater.from(TemporaryEmergencyTaskActivity.this).inflate(R.layout.dialog_content_circle, null);
mLv_members = mButtomContentView.findViewById(R.id.lv_members);
mBtOkselect = mButtomContentView.findViewById(R.id.bt_okselect);
mBtOkselect.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mCheckName = mEmergencyMemberAdapter.getCheckName();
String members = OkingContract.CURRENTUSER.getUserName();
mMembersid = "";
for (GreenMember m : mCheckName) {
members = members + "," + m.getUsername();
mMembersid = mMembersid + "," + m.getUserid();
}
mListItemMissionMember.setText(members);
mButtomDialogUtil.cancelDialog();
}
});
}
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.loadPersonnel("SZJC,CBR")
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
mRxDialogLoading.cancel();
if (mEmergencyMembers == null) {
mEmergencyMembers = new ArrayList<GreenMember>();
}
mEmergencyMembers.clear();
EmergencyMemberGson emergencyMemberGson = gson.fromJson(result, EmergencyMemberGson.class);
List<EmergencyMemberGson.CBRBean> cbrs = emergencyMemberGson.getCBR();
for (EmergencyMemberGson.CBRBean cbrBean : cbrs) {
if (cbrBean.getUSERNAME().equals(OkingContract.CURRENTUSER.getUserName())) {
continue;
}
GreenMember member = new GreenMember();
member.setUsername(cbrBean.getUSERNAME());
member.setDepatid(cbrBean.getDEPTID());
member.setDepatname(cbrBean.getDEPTNAME());
member.setRemark(cbrBean.getREMARK());
member.setUserid(cbrBean.getUSERID());
member.setZfzh(cbrBean.getZFZH());
mEmergencyMembers.add(member);
}
List<EmergencyMemberGson.SZJCBean> szjcs = emergencyMemberGson.getSZJC();
for (EmergencyMemberGson.SZJCBean szjcBean : szjcs) {
if (szjcBean.getUSERNAME().equals(OkingContract.CURRENTUSER.getUserName())) {
continue;
}
GreenMember member = new GreenMember();
member.setUsername(szjcBean.getUSERNAME());
member.setDepatid(szjcBean.getDEPTID());
member.setDepatname(szjcBean.getDEPTNAME());
member.setRemark(szjcBean.getREMARK());
member.setUserid(szjcBean.getUSERID());
member.setZfzh(szjcBean.getZFZH());
mEmergencyMembers.add(member);
}
Collections.sort(mEmergencyMembers, new EmergencyPinyinComparator());
if (mButtomDialogUtil == null) {
mButtomDialogUtil = new DialogUtil();
}
mButtomDialogUtil.showBottomDialog(TemporaryEmergencyTaskActivity.this, mButtomContentView, 400f);
mEmergencyMemberAdapter = new EmergencyMemberAdapter(TemporaryEmergencyTaskActivity.this, mEmergencyMembers);
mLv_members.setAdapter(mEmergencyMemberAdapter);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mRxDialogLoading.cancel();
if (mEmergencyMemberAdapter != null) {
mEmergencyMemberAdapter.notifyDataSetChanged();
}
}
});
}
private void submitDataToServer() {
if (mSubRxDialogLoading == null) {
mSubRxDialogLoading = new RxDialogLoading(this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
mSubRxDialogLoading.setLoadingText("提交数据中,请稍等...");
}
final String taskName = mEtTaskname.getText().toString().trim();
String members = mListItemMissionMember.getText().toString().trim();
final String missionDetail = mListItemMissionDetail.getText().toString().trim();
final String description = mEtDescription.getText().toString().trim();
final String beginTime = mBtSelectBegintime.getText().toString();
final String endTime = mBtSelectEndtime.getText().toString();
if (mSpTasktype.getText().toString().trim().equals("*请选择")) {
RxToast.warning("请选择任务类型");
return;
}
if (mSpApprover.getText().toString().trim().equals("*请选择")) {
RxToast.warning("请选择审批领导");
return;
}
if (mSpSource.getText().toString().trim().equals("*请选择")) {
RxToast.warning("请选择线索来源");
return;
}
if (mSpTasknature.getText().toString().trim().equals("*请选择")) {
RxToast.warning("请选择任务性质");
return;
}
if (!TextUtils.isEmpty(taskName) && !TextUtils.isEmpty(members)
&& !TextUtils.isEmpty(missionDetail) && !TextUtils.isEmpty(description)
&& !"选择".equals(beginTime) && !"选择".equals(endTime)) {
mBtOk.setEnabled(false);
mSubRxDialogLoading.show();
HashMap<String, Object> stringObjectHashMap = new HashMap<>();
stringObjectHashMap.put("rwmc", taskName);
stringObjectHashMap.put("fid", "0");
stringObjectHashMap.put("rwms", description);
stringObjectHashMap.put("fbrid", OkingContract.CURRENTUSER.getUserid());
stringObjectHashMap.put("sjq", beginTime);
stringObjectHashMap.put("sjz", endTime);
stringObjectHashMap.put("jsrid", OkingContract.CURRENTUSER.getUserid());
stringObjectHashMap.put("rwlx", mTasknature);
stringObjectHashMap.put("sprid", mApproverId);
stringObjectHashMap.put("zt", "3");
stringObjectHashMap.put("jjcd", "1");
stringObjectHashMap.put("deptid", OkingContract.CURRENTUSER.getDept_id());
stringObjectHashMap.put("rwqyms", missionDetail);
stringObjectHashMap.put("rwly", mSource);
stringObjectHashMap.put("jsr", OkingContract.CURRENTUSER.getUserName());
stringObjectHashMap.put("jsdw", OkingContract.CURRENTUSER.getDeptname());
stringObjectHashMap.put("fbr", OkingContract.CURRENTUSER.getUserName());
stringObjectHashMap.put("fbdw", OkingContract.CURRENTUSER.getDeptname());
stringObjectHashMap.put("spr", mApprover);
stringObjectHashMap.put("rwcd", "1");
stringObjectHashMap.put("typeoftask", mTasktype);
stringObjectHashMap.put("yxry", mMembersid.substring(1, mMembersid.length()));
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.addEmergencyRelease(stringObjectHashMap)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
mSubRxDialogLoading.cancel();
try {
JSONObject jsonObject = new JSONObject(result);
int code = jsonObject.getInt("code");
if (code == 400) {
final String taskid = jsonObject.getString("taskid");
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
GreenMissionTask greenMissionTask = new GreenMissionTask();
//返回个id
greenMissionTask.setTaskid(taskid);
greenMissionTask.setStatus("3");
greenMissionTask.setTask_name(taskName);
greenMissionTask.setJjcd("1");
greenMissionTask.setTask_content(description);
greenMissionTask.setRwqyms(missionDetail);
greenMissionTask.setBegin_time(mBeginMillseconds);
greenMissionTask.setEnd_time(mEndMillseconds);
greenMissionTask.setUserid(OkingContract.CURRENTUSER.getUserid());
greenMissionTask.setTypeoftask(mTasktype);
greenMissionTask.setRwly(mSource);
greenMissionTask.setExamine_status(-1);
greenMissionTask.setTypename(mSpTasknature.getText().toString());
greenMissionTask.setApproved_person_name(mApprover);
greenMissionTask.setApproved_person(mApproverId);
greenMissionTask.setPublisher_name(OkingContract.CURRENTUSER.getUserName());
greenMissionTask.setFbdw(OkingContract.CURRENTUSER.getDeptname());
greenMissionTask.setTask_area(missionDetail);
if (mLatLngListOV != null) {
greenMissionTask.setDrawLaLoType(mLatLngListOV.getType());
greenMissionTask.setMcoordinateJson(gson.toJson(mLatLngListOV.getLatLngs()));
}
long insert = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().insert(greenMissionTask);
GreenMember greenMember = new GreenMember();
greenMember.setGreenMemberId(insert);
greenMember.setUsername(OkingContract.CURRENTUSER.getUserName());
greenMember.setUserid(OkingContract.CURRENTUSER.getUserid());
greenMember.setPost("负责人");
greenMember.setAccount(OkingContract.CURRENTUSER.getAcount());
GreenDAOManager.getInstence().getDaoSession().getGreenMemberDao().insert(greenMember);
for (GreenMember checkName : mCheckName) {
checkName.setPost("组员");
checkName.setGreenMemberId(insert);
GreenDAOManager.getInstence().getDaoSession().getGreenMemberDao().insert(checkName);
}
Intent intent = new Intent(BaseApplication.getApplictaion(), MissionActivity.class);
intent.putExtra("id", insert);
PendingIntent pendingIntent = PendingIntent.getActivity(BaseApplication.getApplictaion(), 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
OkingNotificationManager.getInstence().showTaskNotification(greenMissionTask, pendingIntent);
EmergencyTaskOV emergencyTaskOV = new EmergencyTaskOV();
emergencyTaskOV.setType(0);
emergencyTaskOV.setGreenMissionTask(greenMissionTask);
EventBus.getDefault().post(emergencyTaskOV);
}
});
if (mainHandler == null) {
mainHandler = new Handler();
}
mainHandler.postDelayed(new Runnable() {
@Override
public void run() {
finish();
}
}, 100);
RxToast.success(BaseApplication.getApplictaion(), "紧急任务发布成功", Toast.LENGTH_LONG).show();
} else {
RxToast.error(BaseApplication.getApplictaion(), "服务器系统内部出错了", Toast.LENGTH_LONG).show();
}
} catch (JSONException e) {
e.printStackTrace();
RxToast.error(BaseApplication.getApplictaion(), "服务器系统内部出错了", Toast.LENGTH_LONG).show();
}
mBtOk.setEnabled(true);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mBtOk.setEnabled(true);
mSubRxDialogLoading.cancel();
RxToast.error(BaseApplication.getApplictaion(), "紧急任务发布失败" + throwable.getMessage(), Toast.LENGTH_SHORT).show();
}
});
} else {
RxToast.warning(BaseApplication.getApplictaion(), "提交内容不能有空", Toast.LENGTH_LONG).show();
}
}
@Override
public void onDateSet(TimePickerDialog timePickerView, long millseconds) {
String tag = timePickerView.getTag();
if ("beginTime".equals(tag)) {
mBeginMillseconds = millseconds;
mBtSelectBegintime.setText(getDateToString(millseconds));
} else {
mEndMillseconds = millseconds;
mBtSelectEndtime.setText(getDateToString(millseconds));
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void handleEvent(LatLngListOV latLngListOV) {
this.mLatLngListOV = latLngListOV;
mMcoordinateJson = gson.toJson(latLngListOV.getLatLngs());
Log.i("Oking", latLngListOV.getLatLngs().size() + "" + latLngListOV.getLatLngs().toString());
}
private String getDateToString(long millseconds) {
Date d = new Date(millseconds);
return sf.format(d);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class BinnerContentActivity extends BaseActivity {
@BindView(R.id.tv_title)
TextView mTvTitle;
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.rl_web)
RelativeLayout mRlWeb;
private Unbinder mBind;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_binner_content);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
}
private void initData() {
Intent intent = getIntent();
final String title = intent.getStringExtra("title");
String toContent = intent.getStringExtra("toContent");
mTvTitle.setText(title);
WebView webView = new WebView(BaseApplication.getApplictaion());
webView.setLayoutParams(new RelativeLayout.LayoutParams(-1, -1));
WebSettings settings = webView.getSettings();
settings.setSupportZoom(true);
settings.setUseWideViewPort(false);
settings.setMediaPlaybackRequiresUserGesture(true);
settings.setUseWideViewPort(true);
settings.setDefaultFontSize(16);
settings.setLayoutAlgorithm(WebSettings.LayoutAlgorithm.NARROW_COLUMNS);
settings.setLoadWithOverviewMode(true);
settings.setJavaScriptEnabled(true);
webView.loadUrl(toContent);
webView.setWebViewClient(new WebViewClient());
mRlWeb.addView(webView);
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onDestroy() {
super.onDestroy();
mRlWeb.removeAllViews();
mBind.unbind();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import android.text.Html;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenCase;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.List;
/**
* Created by zhao on 2016/9/12.
*/
public class CaseListAdapter extends BaseQuickAdapter<GreenCase, BaseViewHolder> {
private DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm");
public CaseListAdapter(int layoutResId, @Nullable List<GreenCase> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, GreenCase item) {
helper.setText(R.id.tv_caseid, Html.fromHtml("<font color=\"#98CF60\">案件编号:</font>"+item.getAJID()));
helper.setText(R.id.tv_casename, Html.fromHtml("<font color=\"#98CF60\">案件名称:</font>"+item.getAJMC()));
helper.setText(R.id.tv_casetime, Html.fromHtml("<font color=\"#98CF60\">受理时间:</font>"+dateFormat.format(item.getSLRQ())));
switch (item.getSLXX_ZT()) {
case "SL":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>受理"));
break;
case "CBBDCQZ":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>承办并调查取证"));
break;
case "ZB":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>转办"));
break;
case "LA":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>立案"));
break;
case "AJSC":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>案件审查"));
break;
case "BYCF":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>不予处罚"));
break;
case "WSZL":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>完善资料"));
break;
case "YS":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>移送"));
break;
case "CFGZHTZ":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>处罚告知或听证"));
break;
case "TZ":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>听证"));
break;
case "FH":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>复核"));
break;
case "CFJD":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>处罚决定"));
break;
case "ZX":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>执行"));
break;
case "JABGD":
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>结案并归档"));
break;
default:
helper.setText(R.id.tv_casestate, Html.fromHtml("<font color=\"#98CF60\">状态:</font>未知"));
break;
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.SourceArrayOV;
import java.util.List;
/**
* Created by Administrator on 2018/5/25/025.
*/
public class SourceArrayRecyAdapter extends BaseQuickAdapter<SourceArrayOV, BaseViewHolder> {
public SourceArrayRecyAdapter(int layoutResId, @Nullable List<SourceArrayOV> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, SourceArrayOV item) {
helper.setText(R.id.tv_sub, item.getSource());
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.ProblemBean;
import java.util.List;
/**
* Created by Administrator on 2017/11/14.
*/
public class AskAdapter extends BaseQuickAdapter<ProblemBean, BaseViewHolder> {
public AskAdapter(int layoutResId, @Nullable List<ProblemBean> data) {
super(layoutResId, data);
// micImages = new Drawable[] { MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_01),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_02),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_03),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_04),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_05),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_06),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_07),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_08),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_09),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_10),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_11),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_12),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_13),
// MyApp.getApplictaion().getResources().getDrawable(com.hyphenate.easeui.R.drawable.ease_record_animate_14), };
//
// voiceRecorder = new EaseVoiceRecorder(micImageHandler);
}
// private PowerManager.WakeLock wakeLock;
// private EaseVoiceRecorder voiceRecorder;
// private Drawable[] micImages;
// private ImageView mMic_image;
// private Handler micImageHandler = new Handler() {
// @Override
// public void handleMessage(android.os.Message msg) {
// // change image
// mMic_image.setImageDrawable(micImages[msg.what]);
// }
// };
@Override
protected void convert(BaseViewHolder helper, ProblemBean item) {
helper.setText(R.id.tv_ask_content,"问:" + item.getAsk());
helper.setText(R.id.et_answer_content,"");
}
// @Override
// public View getView( int position, View contentView, ViewGroup viewGroup) {
// ViewHolder viewHolder;
// if (contentView == null) {
// viewHolder = new ViewHolder();
// contentView = View.inflate(BaseApplication.getApplictaion(), R.layout.survey_ask_item, null);
// viewHolder.tv_ask_content = contentView.findViewById(R.id.tv_ask_content);
// viewHolder.et_answer_content = contentView.findViewById(R.id.et_answer_content);
// contentView.setTag(viewHolder);
// }
//
// viewHolder = (ViewHolder) contentView.getTag();
// viewHolder.et_answer_content.setText("");
//// ImageView iv_record = contentView.findViewById(R.id.iv_record);
// final ProblemBean problemBean = problemContent.get(position);
// viewHolder.tv_ask_content.setText("问:" + problemBean.getAsk());
//// iv_record.setOnClickListener(new View.OnClickListener() {
//// @Override
//// public void onClick(View view) {
//// AlertDialog.Builder builder = new AlertDialog.Builder(mActivity);
//// View inflate = View.inflate(mActivity, R.layout.voice_recorder_dialog, null);
//// mMic_image = inflate.findViewById(R.id.mic_image);
//// builder.setView(inflate);
//// builder.setCancelable(false);
//// builder.setNegativeButton("取消", new DialogInterface.OnClickListener() {
//// @Override
//// public void onClick(DialogInterface dialogInterface, int i) {
//// stopRecoding();
//// dialogInterface.dismiss();
//// }
//// });
//// builder.setPositiveButton("停止", new DialogInterface.OnClickListener() {
//// @Override
//// public void onClick(DialogInterface dialogInterface, int i) {
////
//// try {
//// int length = stopRecoding();
//// if (length > 0) {
//// System.out.println(getVoiceFilePath() + "<<<<<<<<<<<<<<<");
////
//// } else if (length == EMError.FILE_INVALID) {
//// RxToast.error(MyApp.getApplictaion(), "录音失败", Toast.LENGTH_SHORT).show();
//// } else {
//// RxToast.warning(MyApp.getApplictaion(), "录音时间太短", Toast.LENGTH_SHORT).show();
//// }
//// } catch (Exception e) {
//// e.printStackTrace();
//// RxToast.error(MyApp.getApplictaion(), "录音失败", Toast.LENGTH_SHORT).show();
//// }
////
////
//// dialogInterface.dismiss();
//// }
//// });
////
//// AlertDialog alertDialog = builder.create();
//// alertDialog.show();
//// WindowManager.LayoutParams params =
//// alertDialog.getWindow().getAttributes();
//// params.width = 450;
//// params.height = 400;
//// alertDialog.getWindow().setAttributes(params);
//// startRecording();
//// }
//// });
//
// return contentView;
// }
// private int stopRecoding() {
//// if (wakeLock.isHeld()){
////
//// wakeLock.release();
//// }
// return voiceRecorder.stopRecoding();
// }
// private void startRecording() {
// if (!EaseCommonUtils.isSdcardExist()) {
// RxToast.error(BaseApplication.getApplictaion(), "请插上sd卡", Toast.LENGTH_SHORT).show();
// return;
// }
// try {
//// wakeLock.acquire();
// voiceRecorder.startRecording(BaseApplication.getApplictaion());
// } catch (Exception e) {
// e.printStackTrace();
//// if (wakeLock.isHeld()){
////
//// wakeLock.release();
//// }
// if (voiceRecorder != null) {
//
// voiceRecorder.discardRecording();
// }
// RxToast.error(BaseApplication.getApplictaion(), "录音失败,请重试!", Toast.LENGTH_SHORT).show();
// return;
// }
// }
// public String getVoiceFilePath() {
// return voiceRecorder.getVoiceFilePath();
// }
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxDialogSure;
import com.zhang.baselib.ui.views.RxDialogSureCancel;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.DocumentaryEvidenceListRecyAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenCase;
import com.zhang.okinglawenforcementphone.beans.GreenEvidence;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceMedia;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceSZOV;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadEvidenceContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.UploadEvidencePresenter;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 书证列表
* A simple {@link Fragment} subclass.
*/
public class DocumentaryEvidenceListFragment extends Fragment {
private GreenCase mycase;
private RecyclerView ryMain;
private ArrayList<GreenEvidence> evidences = new ArrayList<>();
private SimpleDateFormat mSimpleDateFormat;
private DocumentaryEvidenceListRecyAdapter mDocumentaryEvidenceListRecyAdapter;
private RxDialogLoading mRxDialogLoading;
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
private String mParam2;
private RxDialogSureCancel mRxDialogSureCancel;
private View mInflate;
private List<GreenEvidenceMedia> mPicGreenMedias = new ArrayList<>();
private UploadEvidencePresenter mUploadEvidencePresenter;
private DocumentaryEvidenceFragment mDocumentaryEvidenceFragment;
public DocumentaryEvidenceListFragment() {
// Required empty public constructor
}
public static DocumentaryEvidenceListFragment newInstance(String param2) {
DocumentaryEvidenceListFragment fragment = new DocumentaryEvidenceListFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
mSimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_documentary_evidence_list, container, false);
}
EventBus.getDefault().register(this);
initView(mInflate);
return mInflate;
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void handleEvent1(GreenEvidenceSZOV event) {
Log.i("Oking", event.toString());
if (event.getType() == 2) { //添加征据
evidences.add(event.getGreenEvidence());
mDocumentaryEvidenceListRecyAdapter.setNewData(evidences);
} else {
loadEvidence();
}
}
@Override
public void onDestroyView() {
super.onDestroyView();
EventBus.getDefault().unregister(this);
}
public void initView(View rootView) {
ryMain = rootView.findViewById(R.id.ry_main);
ryMain.setLayoutManager(new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false));
if (mDocumentaryEvidenceListRecyAdapter == null) {
mDocumentaryEvidenceListRecyAdapter = new DocumentaryEvidenceListRecyAdapter(R.layout.list_item_documentaryevidence, null);
ryMain.setAdapter(mDocumentaryEvidenceListRecyAdapter);
}
loadEvidence();
mDocumentaryEvidenceListRecyAdapter.setOnItemChildClickListener(new BaseQuickAdapter.OnItemChildClickListener() {
@Override
public void onItemChildClick(final BaseQuickAdapter adapter, View view, final int position) {
final List<GreenEvidence> datas = adapter.getData();
final GreenEvidence greenEvidence = datas.get(position);
switch (view.getId()) {
case R.id.upload_button: //上传
Log.i("Oking", "点击的:position:" + position + "ID:" + greenEvidence.getId() + "###" + greenEvidence.toString());
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(getActivity(), false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
mRxDialogLoading.setLoadingText("上传数据中...");
}
mRxDialogLoading.show();
saveEvidence(position, greenEvidence);
break;
case R.id.delete_button: //删除
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(getActivity());
mRxDialogSureCancel.setTitle("提示");
mRxDialogSureCancel.setContent("是否删除证据?");
}
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().delete(greenEvidence);
mDocumentaryEvidenceListRecyAdapter.remove(position);
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.show();
break;
case R.id.edit_button: //查看编辑
if (((Button) view).getText().equals("查看")) {
FragmentTransaction ft = getFragmentManager().beginTransaction();
mDocumentaryEvidenceFragment = DocumentaryEvidenceFragment.newInstance(0);
mDocumentaryEvidenceFragment.setGreenCase(mycase, evidences.get(position));
ft.add(R.id.rl_sub_content, mDocumentaryEvidenceFragment, "documentaryEvidenceFragment").commit();
} else {
FragmentTransaction ft = getFragmentManager().beginTransaction();
mDocumentaryEvidenceFragment = DocumentaryEvidenceFragment.newInstance(1);
mDocumentaryEvidenceFragment.setGreenCase(mycase, evidences.get(position));
ft.add(R.id.rl_sub_content, mDocumentaryEvidenceFragment, "documentaryEvidenceFragment").commit();
}
break;
default:
break;
}
}
});
}
/**
* 上传数据
*
* @param position
* @param evidence
*/
private void saveEvidence(final int position, final GreenEvidence evidence) {
Map<String, Object> fields = new HashMap<>();
if (evidence.getZJID() != null) {
fields.put("zjid", evidence.getZJID());
}
if (evidence.getAJID() != null) {
fields.put("ajid", evidence.getAJID());
}
if (evidence.getZJLX() != null) {
fields.put("zjlx", evidence.getZJLX());
}
if (evidence.getZJMC() != null) {
fields.put("zjmc", evidence.getZJMC());
}
if (evidence.getZJLY() != null) {
fields.put("zjly", evidence.getZJLY());
}
if (evidence.getZJNR() != null) {
fields.put("zjnr", evidence.getZJNR());
}
if (evidence.getSL() != null) {
fields.put("sl", evidence.getSL());
}
if (((Long) evidence.getCJSJ()) != null) {
fields.put("cjsj", mSimpleDateFormat.format(evidence.getCJSJ()));
}
if (evidence.getCJR() != null) {
fields.put("cjr", evidence.getCJR());
}
if (evidence.getCJDD() != null) {
fields.put("cjdd", evidence.getCJDD());
}
if (evidence.getJZR() != null) {
fields.put("jzr", evidence.getJZR());
}
if (evidence.getDW() != null) {
fields.put("dw", evidence.getDW());
}
if (evidence.getBZ() != null) {
fields.put("bz", evidence.getBZ());
}
if (OkingContract.CURRENTUSER != null) {
fields.put("scr", OkingContract.CURRENTUSER.getUserName());
}
fields.put("scsj", mSimpleDateFormat.format(System.currentTimeMillis()));
if (evidence.getZT() != null) {
fields.put("zt", evidence.getZT());
}
if (evidence.getWSID() != null) {
fields.put("wsid", evidence.getWSID());
}
if (evidence.getLXMC() != null) {
fields.put("lxmc", evidence.getLXMC());
}
if (evidence.getZJLYMC() != null) {
fields.put("zjlymc", evidence.getZJLYMC());
}
if (evidence.getYS() != null) {
fields.put("ys", evidence.getYS());
}
List<GreenEvidenceMedia> greenMedias = evidence.getGreenMedia();
for (GreenEvidenceMedia greenMedia : greenMedias) {
if (greenMedia.getType() == 1) {
mPicGreenMedias.add(greenMedia);
}
}
if (mUploadEvidencePresenter == null) {
mUploadEvidencePresenter = new UploadEvidencePresenter(new UploadEvidenceContract.View() {
@Override
public void uploadEvidenceSucc(String result) {
mRxDialogLoading.cancel();
checkChangeState(position, evidence);
}
@Override
public void uploadEvidenceFail(Throwable ex) {
mRxDialogLoading.cancel();
}
});
}
mUploadEvidencePresenter.uploadEvidence(fields, evidence, mPicGreenMedias);
}
public void loadEvidence() {
evidences.clear();
if (mycase != null) {
mycase.resetGreenEvidence();
for (int i = 0; i < mycase.getGreenEvidence().size(); i++) {
if ("SZ".equals(mycase.getGreenEvidence().get(i).getZJLX())) {
evidences.add(mycase.getGreenEvidence().get(i));
}
}
mDocumentaryEvidenceListRecyAdapter.setNewData(evidences);
}
}
private void checkChangeState(final int position, final GreenEvidence evidence) {
evidence.setIsUpload(true);
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().update(evidence);
GreenDAOManager.getInstence().getDaoSession().getGreenCaseDao().update(mycase);
mRxDialogLoading.cancel();
getActivity().finish();
}
public void setGreenCase(GreenCase greenCase) {
this.mycase = greenCase;
}
}
<file_sep>package com.zhang.baselib.http.interceptor;
import android.util.Log;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.utils.NetUtil;
import java.io.IOException;
import okhttp3.CacheControl;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
/**
* Created by Administrator on 2018/3/6.
* 缓存拦截器
*/
public class CacheControlInterceptor implements Interceptor {
//短缓存1分钟
public static final int CACHE_AGE_SHORT = 60;
//长缓存有效期为1天
public static final int CACHE_STALE_LONG = 60 * 60 * 24;
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request();
if (!NetUtil.isConnected(BaseApplication.getApplictaion())) {
//没网时只使用缓存
//自定义请求头,可以在响应头对请求头的header进行拦截,配置不同的缓存策略
request = request.newBuilder()
.header("head-request", request.toString())
.cacheControl(CacheControl.FORCE_CACHE)
.build();
}
Response response = chain.proceed(request);
if (!NetUtil.isConnected(BaseApplication.getApplictaion())) {
//有网的时候读接口上的@Headers里的配置,你可以在这里进行统一的设置
Log.e("Interceptor", "response: " + response.toString());
//添加头信息,配置Cache-Control
//removeHeader("Pragma") 使缓存生效
return response.newBuilder()
.header("Cache-Control", "public, max-age=" + CACHE_AGE_SHORT)
.removeHeader("Pragma")
.build();
} else {
Log.e("Interceptor", "net not connect");
return response.newBuilder()
.header("Cache-Control", "public,only-if-cached, max-stale=" + CACHE_STALE_LONG)
.removeHeader("Pragma")
.build();
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.Switch;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class TaskPatrolFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.sw_summary)
Switch mSwSummary;
@BindView(R.id.summary_editText)
EditText mSummaryEditText;
Unbinder unbinder;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private View mInflate;
private GreenMissionLog mGreenMissionLog;
private GreenMissionTask mGreenMissionTask;
private boolean mSummarySwisopen = false;
public TaskPatrolFragment() {
// Required empty public constructor
}
public static TaskPatrolFragment newInstance(String param1, String param2) {
TaskPatrolFragment fragment = new TaskPatrolFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_task_patrol, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
setListerner();
return mInflate;
}
private void setListerner() {
mSwSummary.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean isChecked) {
mSummarySwisopen = isChecked;
if (isChecked) {
mSummaryEditText.setEnabled(false);
mSummaryEditText.setText("");
} else {
mSummaryEditText.setEnabled(true);
}
}
});
if (mGreenMissionTask.getStatus().equals("5")) {
mSwSummary.setEnabled(false);
mSummaryEditText.setFocusable(false);
} else if (mGreenMissionTask.getStatus().equals("9")) {
mSwSummary.setEnabled(true);
}
}
private void initData() {
mSummaryEditText.setText(mGreenMissionLog.getPatrol());
}
public void setMission(GreenMissionTask mission) {
mGreenMissionTask = mission;
}
public void setGreenMissionLog(GreenMissionLog greenMissionLog) {
mGreenMissionLog = greenMissionLog;
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
public void savePatrol() {
mGreenMissionLog.setSummarySwisopen(mSummarySwisopen);
mGreenMissionLog.setPatrol(mSummaryEditText.getText().toString().trim());
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().update(mGreenMissionLog);
}
}
<file_sep>package com.zhang.baselib;
import android.app.Application;
/**
* Created by Administrator on 2018/3/14.
*/
public class BaseApplication extends Application{
private static BaseApplication baseApplication;
@Override
public void onCreate() {
super.onCreate();
baseApplication = this;
}
public static Application getApplictaion() {
return baseApplication;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.mvp.contract.TaskBackContract;
import com.zhang.okinglawenforcementphone.mvp.model.TaskBackModel;
import java.util.Map;
import retrofit2.http.FieldMap;
/**
* Created by Administrator on 2018/6/7/007.
*/
public class TaskBackPresenter implements TaskBackContract.Presenter {
private TaskBackContract.Model mModel;
private TaskBackContract.View mView;
public TaskBackPresenter(TaskBackContract.View view) {
mView = view;
mModel = new TaskBackModel(this);
}
@Override
public void taskBack(Map<String, Object> params) {
mModel.taskBack(params);
}
@Override
public void taskBackSucc(String result) {
mView.taskBackSucc(result);
}
@Override
public void taskBackFail(Throwable ex) {
mView.taskBackFail(ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.support.design.widget.TextInputEditText;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.Button;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.DefaultContants;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTaskDao;
import com.zhang.okinglawenforcementphone.beans.NewsTaskOV;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.UpdateGreenMissionTaskOV;
import com.zhang.okinglawenforcementphone.mvp.contract.UpdateMissionStateContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.UpdateMissionStatePresenter;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
public class MissionActivity extends BaseActivity {
private DateFormat mDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.list_item_missionTitle)
TextInputEditText tv_taskname;
@BindView(R.id.list_item_missionDate)
TextInputEditText tv_entime;
@BindView(R.id.list_item_missionState)
TextInputEditText tv_state;
@BindView(R.id.publisher_tv)
TextInputEditText tv_sued_people;
@BindView(R.id.approved_person_tv)
TextInputEditText tv_approver_people;
@BindView(R.id.begin_time_tv)
TextInputEditText tv_statime;
@BindView(R.id.mission_type_tv)
TextInputEditText tv_type;
@BindView(R.id.list_item_missionMember)
TextInputEditText tv_members;
@BindView(R.id.list_item_missionDetail)
TextInputEditText tv_patrol_area;
@BindView(R.id.list_item_missionRecord)
Button list_item_missionRecord;
private GreenMissionTask mGreenMissionTask;
private Unbinder mBind;
private int mPosition;
private UpdateMissionStatePresenter mUpdateMissionStatePresenter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mission);
mBind = ButterKnife.bind(this);
EventBus.getDefault().register(this);
initView();
initData();
}
private void initView() {
Intent intent = getIntent();
mPosition = intent.getIntExtra("position", -1);
long id = intent.getLongExtra("id", -1L);
if (id != -1L) {
mGreenMissionTask = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().queryBuilder().where(GreenMissionTaskDao.Properties.Id.eq(id)).unique();
}
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
String groupName = mGreenMissionTask.getTask_name();
String endTime = mDateFormat.format(mGreenMissionTask.getEnd_time());
tv_taskname.setText(groupName);
tv_entime.setText(endTime);
switch (mGreenMissionTask.getStatus()) {
case "0":
case "1":
case "2":
list_item_missionRecord.setText("NONE");
list_item_missionRecord.setEnabled(false);
tv_state.setText("未安排人员");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain8));
break;
case "3":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("开始任务");
tv_state.setText("已安排,待执行");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain7));
break;
case "4":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("巡查中");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain5));
break;
case "100":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("巡查结束");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.law_enforcement_bt_enable));
break;
case "5":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("已上报");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.task_other_tx));
break;
case "9":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("退回修改");
tv_state.setTextColor(Color.GRAY);
break;
default:
break;
}
tv_sued_people.setText(mGreenMissionTask.getPublisher_name());
tv_approver_people.setText(mGreenMissionTask.getApproved_person_name());
tv_statime.setText(mDateFormat.format(mGreenMissionTask.getBegin_time()));
tv_type.setText(mGreenMissionTask.getTypename());
String memberStr = "";
//清除dao缓存
mGreenMissionTask.resetMembers();
if (mGreenMissionTask.getMembers() != null) {
for (int j = 0; j < mGreenMissionTask.getMembers().size(); j++) {
memberStr += mGreenMissionTask.getMembers().get(j).getUsername() + ",";
}
}
if (!"".equals(memberStr)) {
memberStr = memberStr.substring(0, memberStr.length() - 1);
}
tv_members.setText(memberStr);
if (mGreenMissionTask.getRwqyms() != null) {
tv_patrol_area.setText(mGreenMissionTask.getRwqyms());
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void handleEvent1(UpdateGreenMissionTaskOV event) {
switch (event.getMissionTask().getStatus()) {
case "0":
case "1":
case "2":
list_item_missionRecord.setText("NONE");
list_item_missionRecord.setEnabled(false);
tv_state.setText("未安排人员");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain8));
break;
case "3":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("开始任务");
tv_state.setText("已安排,待执行");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain7));
break;
case "4":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("巡查中");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain5));
break;
case "100":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("巡查结束");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.law_enforcement_bt_enable));
break;
case "5":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("已上报");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.task_other_tx));
break;
case "9":
list_item_missionRecord.setEnabled(true);
list_item_missionRecord.setText("任务日志");
tv_state.setText("退回修改");
tv_state.setTextColor(Color.GRAY);
break;
default:
break;
}
}
private void initData() {
}
@OnClick(R.id.list_item_missionRecord)
public void onClick(View view) {
if ("开始任务".equals(list_item_missionRecord.getText().toString())) {
if (mGreenMissionTask.getBegin_time() > System.currentTimeMillis()) {
RxToast.warning("未到达任务开始时间,不能开始任务!");
return;
}
Calendar c = Calendar.getInstance();
c.setTime(new Date(mGreenMissionTask.getEnd_time()));
if (c.getTime().getTime() < System.currentTimeMillis()) {
RxToast.warning("超出任务的预计结束时间,不能开始任务!");
return;
}
if (DefaultContants.ISHTTPLOGIN) {
if (mUpdateMissionStatePresenter ==null){
mUpdateMissionStatePresenter = new UpdateMissionStatePresenter(new UpdateMissionStateContract.View() {
@Override
public void updateMissionStateSucc(String result) {
try {
JSONObject object = new JSONObject(result);
int code = object.getInt("code");
if (code == 0) {
mGreenMissionTask.setExecute_start_time(System.currentTimeMillis());
mGreenMissionTask.setStatus("4");
GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().update(mGreenMissionTask);
list_item_missionRecord.setText("任务日志");
tv_state.setText("巡查中");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain5));
if (mPosition != -1) {
UpdateGreenMissionTaskOV updateGreenMissionTaskOV = new UpdateGreenMissionTaskOV();
updateGreenMissionTaskOV.setType(200);
updateGreenMissionTaskOV.setPosition(mPosition);
updateGreenMissionTaskOV.setMissionTask(mGreenMissionTask);
EventBus.getDefault().post(updateGreenMissionTaskOV);
}
} else {
RxToast.error(object.getString("msg"));
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void updateMissionStateFail(Throwable ex) {
mGreenMissionTask.setExecute_start_time(System.currentTimeMillis());
mGreenMissionTask.setStatus("4");
GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().update(mGreenMissionTask);
list_item_missionRecord.setText("任务日志");
tv_state.setText("巡查中");
tv_state.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain5));
}
});
}
mUpdateMissionStatePresenter.updateMissionState(mGreenMissionTask.getTaskid(), OkingContract.SDF.format(System.currentTimeMillis())
, "", 4);
EventBus.getDefault().post(new NewsTaskOV(0,null,mGreenMissionTask));
} else {
mGreenMissionTask.setExecute_start_time(System.currentTimeMillis());
mGreenMissionTask.setStatus("4");
GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().update(mGreenMissionTask);
}
} else if ("任务日志".equals(list_item_missionRecord.getText().toString())) {
Intent intent = new Intent(MissionActivity.this, MissionRecorActivity.class);
intent.putExtra("position", mPosition);
intent.putExtra("taskId", mGreenMissionTask.getTaskid());
startActivity(intent);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
EventBus.getDefault().unregister(this);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.app.Activity;
import android.net.Uri;
import android.widget.ImageView;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.baselib.GlideApp;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import java.util.List;
/**
* Created by zhao on 2016/10/9.
*/
public class PicSimpleAdapter extends BaseQuickAdapter<GreenMedia, BaseViewHolder> {
private Activity activity;
private boolean canAdd;
private String typeName;
public PicSimpleAdapter(int layoutResId, List<GreenMedia> data, Activity recorActivity, boolean canAdd, String typeName) {
super(layoutResId, data);
this.activity = recorActivity;
this.canAdd = canAdd;
this.typeName = typeName;
}
@Override
protected void convert(BaseViewHolder helper, final GreenMedia item) {
ImageView sdv = helper.getView(R.id.sdv);
String path = item.getPath();
final Uri uri = Uri.parse(path);
GlideApp.with(activity)
.load(uri)
.placeholder(R.mipmap.ic_launcher_logo)
.error(R.drawable.loadfail)
.into(sdv);
helper.addOnClickListener(R.id.sdv);
if (canAdd) {
helper.addOnLongClickListener(R.id.sdv);
}
String picName = uri.getPath();
String s = picName.substring(picName.lastIndexOf("/") + 1, picName.length());
helper.setText(R.id.tv, s.split("_")[0] + typeName);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import com.amap.api.maps.model.LatLng;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Administrator on 2018/4/18.
*/
public class OkingContract {
public static SimpleDateFormat SDF = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
public static GreenUser CURRENTUSER = null;
public static String[] LOCATIONRESULT = new String[8];
public static final String UPDATE_GPS_STATE_UI = "oking.updategpsstate";
public static List<String> MARQUEEVIEWINFO = new ArrayList<>();
static {
MARQUEEVIEWINFO.add("");
MARQUEEVIEWINFO.add("");
MARQUEEVIEWINFO.add("");
}
public static final int PICTYPE = 1; //图片
public static final int RECORDSCREENTYPE = 2; //视频
public static final int STARTADNEDD = 0; //起点终点
public static final LatLng PAD_1 = new LatLng(22.545173, 113.360352);
public static final LatLng PAD_2 = new LatLng(22.5225, 113.384385);
public static final LatLng PAD_3 = new LatLng(22.481822, 113.403293);
public static final LatLng PAD_4 = new LatLng(22.45648, 113.411084);
public static final LatLng PAD_5 = new LatLng(23.146249, 113.333418);
public static final LatLng PAD_6 = new LatLng(23.145603, 113.334106);
public static final LatLng PAD_7 = new LatLng(23.145509, 113.333462);
public static final LatLng PAD_8 = new LatLng(23.146984, 113.332738);
public static final LatLng PAD_9 = new LatLng(23.146436, 113.334202);
public static final LatLng MOVECENTER = new LatLng(23.216240864201108, 112.80308326186321);
public static final LatLng STARTLATLNG = new LatLng(23.234459161444516, 112.8114729970563);
public static final LatLng ENDLATLNG = new LatLng(23.216927695817933, 112.80293250504201);
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.DeviceUtil;
import com.zhang.baselib.utils.NetUtil;
import com.zhang.baselib.utils.RegUtil;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.SendEmailManager;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhy.view.flowlayout.FlowLayout;
import com.zhy.view.flowlayout.TagAdapter;
import com.zhy.view.flowlayout.TagFlowLayout;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Set;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
/**
* 意见反馈
*/
public class FeedbackActivity extends BaseActivity {
@BindView(R.id.tv_title)
TextView mTvTitle;
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.type_flow_layout)
TagFlowLayout mTypeFlowLayout;
@BindView(R.id.et_contet)
EditText mEtContet;
@BindView(R.id.et_phone)
EditText mEtPhone;
@BindView(R.id.bt_submit)
TextView mBtSubmit;
private Unbinder mBind;
private ArrayList<String> mPartList;
private String mAdviceType = "无";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_feedback);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
}
private void initData() {
mPartList = new ArrayList<>();
mPartList.add("bug");
mPartList.add("产品建议");
mPartList.add("吐槽");
mPartList.add("其他");
TagAdapter<String> tagAdapter = new TagAdapter<String>(mPartList) {
@Override
public View getView(FlowLayout parent, int position, String parts) {
View inflate = View.inflate(BaseApplication.getApplictaion(), R.layout.feedback_tag_item, null);
TextView tv_tag = inflate.findViewById(R.id.tv_tag);
tv_tag.setText(parts);
return inflate;
}
};
mTypeFlowLayout.setAdapter(tagAdapter);
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
@OnClick(R.id.bt_submit)
public void onViewClicked(View view) {
String advice = mEtContet.getText().toString().trim();
String addr = mEtPhone.getText().toString().trim();
if (!TextUtils.isEmpty(advice) && NetUtil.isConnected(BaseApplication.getApplictaion())) {
if (!TextUtils.isEmpty(addr)) {
if (RegUtil.isEmail(addr) || RegUtil.isMobileSimple(addr)) {
} else {
RxToast.warning("请输入正确的联系方式");
return;
}
}
mBtSubmit.setEnabled(false);
Set<Integer> selectedList = mTypeFlowLayout.getSelectedList();
Iterator<Integer> it = selectedList.iterator();
while (it.hasNext()) {
Integer next = it.next();
mAdviceType = mPartList.get(next);
}
final String content = "用户名:" + OkingContract.CURRENTUSER.getUserName() + "\n"
+ "设备厂商:" + Build.MANUFACTURER + "\n"
+ "设备型号:" + Build.MODEL + "\n"
+ "系统版本:" + Build.VERSION.RELEASE + "\n"
+ "程序版本:" + DeviceUtil.getAppVersionName(BaseApplication.getApplictaion()) + "\n"
+ "意见类型:" + mAdviceType + "\n"
+ "意见内容:" + advice + "\n"
+ "联系方式:" + addr;
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
boolean succ = SendEmailManager.send("<EMAIL>", "水政执法意见反馈", content);
if (succ) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
RxToast.success("您的反馈我们已经收到~~");
mBtSubmit.setEnabled(true);
}
});
}
}
});
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.app.PendingIntent;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Handler;
import android.os.Parcel;
import android.support.design.widget.TextInputEditText;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import com.google.gson.Gson;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.OkingJPushManager;
import com.zhang.okinglawenforcementphone.OkingNotificationManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTaskDao;
import com.zhang.okinglawenforcementphone.beans.InspectTaskBean;
import com.zhang.okinglawenforcementphone.beans.JPushMessageBean;
import com.zhang.okinglawenforcementphone.beans.NewsTaskOV;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.UpdateGreenMissionTaskOV;
import com.zhang.okinglawenforcementphone.mvp.contract.JPushMessageContract;
import com.zhang.okinglawenforcementphone.mvp.contract.TaskBackContract;
import com.zhang.okinglawenforcementphone.mvp.contract.TaskReviewContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.TaskBackPresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.TaskReviewPresenter;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import org.greenrobot.eventbus.EventBus;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
/**
* 审核
*/
public class AuditActivity extends BaseActivity {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.list_item_missionTitle)
TextInputEditText mListItemMissionTitle;
@BindView(R.id.list_item_missionState)
TextInputEditText mListItemMissionState;
@BindView(R.id.publisher_tv)
TextInputEditText mPublisherTv;
@BindView(R.id.approved_person_tv)
TextInputEditText mApprovedPersonTv;
@BindView(R.id.begin_time_tv)
TextInputEditText mBeginTimeTv;
@BindView(R.id.list_endDate)
TextInputEditText mEtEndTime;
@BindView(R.id.mission_type_tv)
TextInputEditText mMissionTypeTv;
@BindView(R.id.list_item_missionDetail)
TextInputEditText tvPatrolAarea;
@BindView(R.id.ed_approval_opinions)
TextInputEditText mEdApprovalOpinions;
@BindView(R.id.bt_ok)
Button mBtOk;
@BindView(R.id.bt_modify)
Button mBtModify;
private Unbinder mBind;
private DateFormat mDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private TaskReviewPresenter mTaskReviewPresenter;
private GreenMissionTask mUnique;
private RxDialogLoading mRxDialogLoading;
private int mPosition;
private TaskBackPresenter mTaskBackPresenter;
private Handler mainHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_audit);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
}
private void initData() {
Intent intent = getIntent();
mPosition = intent.getIntExtra("position", -1);
long id = intent.getLongExtra("id", -1L);
if (id != -1L) {
mUnique = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao()
.queryBuilder()
.where(GreenMissionTaskDao.Properties.Id.eq(id)).unique();
mListItemMissionTitle.setText(mUnique.getTask_name());
switch (mUnique.getStatus()) {
case "0":
case "1":
mListItemMissionState.setText("待审核");
mListItemMissionState.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain8));
break;
case "2":
mListItemMissionState.setText("未安排人员");
mListItemMissionState.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain8));
break;
case "3":
mListItemMissionState.setText("已安排,待执行");
mListItemMissionState.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain7));
break;
case "4":
mListItemMissionState.setText("巡查中");
mListItemMissionState.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain5));
break;
case "100":
mListItemMissionState.setText("巡查结束");
mListItemMissionState.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain6));
break;
case "5":
mListItemMissionState.setText("已上报");
mListItemMissionState.setTextColor(BaseApplication.getApplictaion().getResources().getColor(R.color.colorMain4));
break;
case "9":
mListItemMissionState.setText("退回修改");
mListItemMissionState.setTextColor(Color.GRAY);
break;
default:
break;
}
mPublisherTv.setText(mUnique.getPublisher_name());
mApprovedPersonTv.setText(mUnique.getApproved_person_name());
mBeginTimeTv.setText(mDateFormat.format(mUnique.getBegin_time()));
mEtEndTime.setText(mDateFormat.format(mUnique.getEnd_time()));
mMissionTypeTv.setText(mUnique.getTypename());
tvPatrolAarea.setText(mUnique.getRwqyms());
if (mUnique.getStatus().equals("7")) {
mEdApprovalOpinions.setText(mUnique.getSpyj());
mEdApprovalOpinions.setEnabled(false);
mBtOk.setText("重新发布");
mBtModify.setVisibility(View.GONE);
} else {
if (mUnique.getApproved_person().equals(OkingContract.CURRENTUSER.getUserid())) {
//审批人是自己
} else {
mBtOk.setEnabled(false);
mEdApprovalOpinions.setEnabled(false);
mBtOk.setText("等候" + mUnique.getApproved_person_name() + "审批");
mBtModify.setVisibility(View.GONE);
}
}
}
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
@OnClick({R.id.bt_ok, R.id.bt_modify})
public void onViewClicked(View view) {
final String approvalOpinions = mEdApprovalOpinions.getText().toString().trim();
switch (view.getId()) {
case R.id.bt_ok:
if (mBtOk.getText().toString().trim().equals("重新发布")) {
Intent intent = new Intent(AuditActivity.this, PatrolsToReleaseActivity.class);
InspectTaskBean fromParcel = InspectTaskBean.CREATOR.createFromParcel(Parcel.obtain());
fromParcel.setAPPROVED_PERSON(mUnique.getApproved_person());
fromParcel.setBEGIN_TIME(mUnique.getBegin_time());
fromParcel.setCREATE_TIME(mUnique.getCreate_time());
fromParcel.setDELIVERY_TIME(mUnique.getDelivery_time());
fromParcel.setDEPT_ID(OkingContract.CURRENTUSER.getDept_id());
fromParcel.setEND_TIME(mUnique.getEnd_time());
fromParcel.setFBDW(mUnique.getFbdw());
fromParcel.setFBR(mUnique.getFbr());
fromParcel.setFBRID(OkingContract.CURRENTUSER.getUserid());
String taskid = mUnique.getTaskid();
fromParcel.setID(taskid);
if ("0".equals(mUnique.getJjcd())) {
fromParcel.setJJCD("特急");
} else if ("1".equals(mUnique.getJjcd())) {
fromParcel.setJJCD("紧急");
} else if ("2".equals(mUnique.getJjcd())) {
fromParcel.setJJCD("一般");
}
fromParcel.setJSDW(mUnique.getJsdw());
fromParcel.setJSR(mUnique.getJsr());
fromParcel.setPUBLISHER(mUnique.getPublisher());
fromParcel.setRECEIVER(mUnique.getReceiver());
if ("0".equals(mUnique.getTypeoftask())) {
fromParcel.setRWLX("日常执法");
} else if ("1".equals(mUnique.getTask_type())) {
fromParcel.setRWLX("联合执法");
} else if ("2".equals(mUnique.getTask_type())) {
fromParcel.setRWLX("专项执法");
} else if ("3".equals(mUnique.getTask_type())) {
fromParcel.setRWLX("目标核查");
}
if ("0".equals(mUnique.getRwly())) {
fromParcel.setRWLY("上级交办");
} else if ("1".equals(mUnique.getRwly())) {
fromParcel.setRWLY("部门移送");
} else if ("2".equals(mUnique.getRwly())) {
fromParcel.setRWLY("系统报警");
} else if ("3".equals(mUnique.getRwly())) {
fromParcel.setRWLY("日常巡查");
} else if ("4".equals(mUnique.getRwly())) {
fromParcel.setRWLY("媒体披露");
} else if ("5".equals(mUnique.getRwly())) {
fromParcel.setRWLY("群众举报");
}
fromParcel.setRWMC(mUnique.getTask_name());
fromParcel.setRWMS(mUnique.getTask_content());
fromParcel.setRWQYMS(mUnique.getRwqyms());
fromParcel.setSPR(mUnique.getApproved_person_name());
fromParcel.setSPRID(mUnique.getApproved_person());
if ("0".equals(mUnique.getStatus())) {
fromParcel.setSTATUS("未发布");
} else if ("1".equals(mUnique.getStatus())) {
fromParcel.setSTATUS("已发布待审核");
} else if ("2".equals(mUnique.getStatus())) {
fromParcel.setSTATUS("审核通过");
} else if ("3".equals(mUnique.getStatus())) {
fromParcel.setSTATUS("接收并已分配队员");
} else if ("4".equals(mUnique.getStatus())) {
fromParcel.setSTATUS("任务开始");
} else if ("5".equals(mUnique.getStatus())) {
fromParcel.setSTATUS("任务完成");
} else if ("7".equals(mUnique.getStatus())) {
fromParcel.setSTATUS("退回修改");
}
if ("0".equals(mUnique.getTypeoftask())) {
fromParcel.setTYPEOFTASK("河道管理");
} else if ("1".equals(mUnique.getTypeoftask())) {
fromParcel.setTYPEOFTASK("河道采砂");
} else if ("2".equals(mUnique.getTypeoftask())) {
fromParcel.setTYPEOFTASK("水资源管理");
} else if ("3".equals(mUnique.getTypeoftask())) {
fromParcel.setTYPEOFTASK("水土保持管理");
} else if ("4".equals(mUnique.getTypeoftask())) {
fromParcel.setTYPEOFTASK("水利工程管理");
}
fromParcel.setTASK_NAME(mUnique.getTask_name());
intent.putExtra("inspectTaskBean", fromParcel);
intent.putExtra("position", mPosition);
intent.putExtra("id", mUnique.getId());
startActivity(intent);
finish();
} else {
if (!TextUtils.isEmpty(approvalOpinions)) {
if (mTaskReviewPresenter == null) {
mTaskReviewPresenter = new TaskReviewPresenter(new TaskReviewContract.View() {
@Override
public void taskReviewSucc(final String result) {
Log.i("Oking", "成功:" + result);
mRxDialogLoading.cancel();
try {
JSONObject jsonObject = new JSONObject(result);
String status = jsonObject.getString("status");
String msg = jsonObject.getString("msg");
if (status.equals("1")) {
//发送一个远程通知
JPushMessageBean jPushMessageBean = new JPushMessageBean();
JPushMessageBean.AudienceBean audienceBean = new JPushMessageBean.AudienceBean();
ArrayList<String> alias = new ArrayList<>();
alias.add(mUnique.getReceiver());
mUnique.setStatus("2");
audienceBean.setAlias(alias);
jPushMessageBean.setAudience(audienceBean);
JPushMessageBean.NotificationBean notificationBean = new JPushMessageBean.NotificationBean();
notificationBean.setAlert("新消息:"+mUnique.getTask_name());
JPushMessageBean.NotificationBean.AndroidBean androidBean = new JPushMessageBean.NotificationBean.AndroidBean();
JPushMessageBean.NotificationBean.AndroidBean.ExtrasBean extrasBean = new JPushMessageBean.NotificationBean.AndroidBean.ExtrasBean();
extrasBean.setOpenType("1");
extrasBean.setTaskid(mUnique.getTaskid());
androidBean.setExtras(extrasBean);
notificationBean.setAndroid(androidBean);
ArrayList<String> platforms = new ArrayList<>();
platforms.add("android");
jPushMessageBean.setPlatform(platforms);
jPushMessageBean.setNotification(notificationBean);
OkingJPushManager.getInstence().pushMessage(jPushMessageBean, new JPushMessageContract.View() {
@Override
public void pushMessageSucc(String result) {
}
@Override
public void pushMessageFail(Throwable ex) {
RxToast.error(ex.getMessage());
}
});
UpdateGreenMissionTaskOV updateGreenMissionTaskOV = new UpdateGreenMissionTaskOV();
updateGreenMissionTaskOV.setType(100);
updateGreenMissionTaskOV.setMissionTask(mUnique);
updateGreenMissionTaskOV.setPosition(mPosition);
EventBus.getDefault().post(updateGreenMissionTaskOV);
GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().delete(mUnique);
if (mainHandler == null) {
mainHandler = new Handler();
}
mainHandler.postDelayed(new Runnable() {
@Override
public void run() {
finish();
}
}, 100);
RxToast.success(msg);
} else {
RxToast.error(msg);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void taskReviewFail(Throwable ex) {
Log.i("Oking", "失败:" + ex.toString());
mRxDialogLoading.cancel();
}
});
}
Map<String, Object> params = new HashMap<>();
params.put("lx", "updatesp");
// params.put("coordinateJson", "");
params.put("rwmc", mUnique.getTask_name());
params.put("fbr", mUnique.getPublisher_name());
params.put("fbdw", mUnique.getFbdw());
params.put("jsr", mUnique.getJsr());
params.put("jsdw", mUnique.getJsdw());
params.put("rwms", mUnique.getTask_content());
params.put("rwqyms", mUnique.getTask_area());
params.put("sjq", mBeginTimeTv.getText().toString().trim());
params.put("sjz", mEtEndTime.getText().toString().trim());
params.put("rwlx", mUnique.getTask_type());
params.put("jjcd", mUnique.getJjcd());
params.put("zt", "2");
params.put("rwly", mUnique.getRwly());
params.put("fbrid", mUnique.getPublisher());
params.put("jsrid", mUnique.getReceiver());
params.put("sprid", mUnique.getApproved_person());
params.put("spr", mUnique.getApproved_person_name());
params.put("deptid", OkingContract.CURRENTUSER.getDept_id());
params.put("id", mUnique.getTaskid());
params.put("spyj", approvalOpinions);
params.put("typeoftask", mUnique.getTypeoftask());
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(AuditActivity.this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
dialog.cancel();
}
});
mRxDialogLoading.setLoadingText("正在提交数据请稍候...");
}
mRxDialogLoading.show();
mTaskReviewPresenter.taskReview(params);
} else {
RxToast.warning("请填入审批意见");
}
}
break;
case R.id.bt_modify:
if (!TextUtils.isEmpty(approvalOpinions)) {
if (mTaskBackPresenter == null) {
mTaskBackPresenter = new TaskBackPresenter(new TaskBackContract.View() {
@Override
public void taskBackSucc(String result) {
mRxDialogLoading.cancel();
try {
JSONObject jsonObject = new JSONObject(result);
String status = jsonObject.getString("status");
String msg = jsonObject.getString("msg");
if (status.equals("1")) {
GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao().delete(mUnique);
//发送一个远程通知
JPushMessageBean jPushMessageBean = new JPushMessageBean();
JPushMessageBean.AudienceBean audienceBean = new JPushMessageBean.AudienceBean();
ArrayList<String> alias = new ArrayList<>();
alias.add(mUnique.getPublisher());
mUnique.setStatus("7");
audienceBean.setAlias(alias);
jPushMessageBean.setAudience(audienceBean);
JPushMessageBean.NotificationBean notificationBean = new JPushMessageBean.NotificationBean();
notificationBean.setAlert(mUnique.getTask_name());
JPushMessageBean.NotificationBean.AndroidBean androidBean = new JPushMessageBean.NotificationBean.AndroidBean();
JPushMessageBean.NotificationBean.AndroidBean.ExtrasBean extrasBean = new JPushMessageBean.NotificationBean.AndroidBean.ExtrasBean();
extrasBean.setData(new Gson().toJson(mUnique));
androidBean.setExtras(extrasBean);
notificationBean.setAndroid(androidBean);
ArrayList<String> platforms = new ArrayList<>();
platforms.add("android");
jPushMessageBean.setPlatform(platforms);
jPushMessageBean.setNotification(notificationBean);
OkingJPushManager.getInstence().pushMessage(jPushMessageBean, new JPushMessageContract.View() {
@Override
public void pushMessageSucc(String result) {
}
@Override
public void pushMessageFail(Throwable ex) {
RxToast.error(ex.getMessage());
}
});
if (mainHandler == null) {
mainHandler = new Handler();
}
mainHandler.postDelayed(new Runnable() {
@Override
public void run() {
finish();
}
}, 100);
RxToast.success("退回成功");
if (mPosition != -1) {
UpdateGreenMissionTaskOV updateGreenMissionTaskOV = new UpdateGreenMissionTaskOV();
updateGreenMissionTaskOV.setType(100);
updateGreenMissionTaskOV.setPosition(mPosition);
EventBus.getDefault().post(updateGreenMissionTaskOV);
}
finish();
} else {
RxToast.error("退回失败!");
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void taskBackFail(Throwable ex) {
mRxDialogLoading.cancel();
}
});
}
Map<String, Object> params = new HashMap<>();
params.put("lx", "updatesp");
// params.put("coordinateJson", "");
params.put("rwmc", mUnique.getTask_name());
params.put("fbr", mUnique.getPublisher_name());
params.put("fbdw", mUnique.getFbdw());
params.put("jsr", mUnique.getJsr());
params.put("jsdw", mUnique.getJsdw());
params.put("rwms", mUnique.getTask_content());
params.put("rwqyms", mUnique.getTask_area());
params.put("sjq", mBeginTimeTv.getText().toString().trim());
params.put("sjz", mEtEndTime.getText().toString().trim());
params.put("rwlx", mUnique.getTask_type());
params.put("jjcd", mUnique.getJjcd());
params.put("zt", "7");
params.put("rwly", mUnique.getRwly());
params.put("fbrid", mUnique.getPublisher());
params.put("jsrid", mUnique.getReceiver());
params.put("sprid", mUnique.getApproved_person());
params.put("spr", mUnique.getApproved_person_name());
params.put("deptid", OkingContract.CURRENTUSER.getDept_id());
params.put("id", mUnique.getTaskid());
params.put("spyj", approvalOpinions);
params.put("typeoftask", mUnique.getTypeoftask());
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(AuditActivity.this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
dialog.cancel();
}
});
mRxDialogLoading.setLoadingText("正在提交数据请稍候...");
}
mRxDialogLoading.show();
mTaskBackPresenter.taskBack(params);
} else {
RxToast.warning("请填入审批意见");
}
break;
default:
break;
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.TextView;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxDialogSureCancel;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.AllMenuRecyAdapter;
import com.zhang.okinglawenforcementphone.beans.AllMenuItemBean;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class AllActivity extends BaseActivity {
@BindView(R.id.tv_title)
TextView tvTitle;
@BindView(R.id.toolbar)
Toolbar toolbar;
@BindView(R.id.rcy_all)
RecyclerView rcyAll;
private Unbinder mBind;
private AllMenuItemBean mAllMenuItemBean;
private List<String> mMenusSub;
private RxDialogSureCancel mRxDialogSureCancel;
private Intent intent;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_all);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
}
private void initData() {
rcyAll.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
rcyAll.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 20, getResources().getColor(R.color.activity_bg)));
getMenu();
AllMenuRecyAdapter allMenuRecyAdapter = new AllMenuRecyAdapter(R.layout.all_menu_item, getMenu(), new AllMenuRecyAdapter.OnItemClickListener() {
@Override
public void setOnItemClickListener(BaseQuickAdapter adapter, View view, int groupPosition, int chilPosition) {
switch (groupPosition) {
case 0: //任务管理
switch (chilPosition) {
case 0:
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(AllActivity.this);
}
mRxDialogSureCancel.setContent("请选择发布任务类型");
mRxDialogSureCancel.getTvSure().setText("一般任务");
mRxDialogSureCancel.getTvCancel().setText("紧急任务");
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
Intent intent = new Intent(AllActivity.this, TemporaryEmergencyTaskActivity.class);
startActivity(intent);
}
});
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
Intent intent = new Intent(AllActivity.this, PatrolsToReleaseActivity.class);
startActivity(intent);
}
});
mRxDialogSureCancel.show();
break;
case 1:
intent = new Intent(AllActivity.this, TaskMissionProjectActivity.class);
intent.putExtra("activity", "ArrangeMissionActivity");
startActivity(intent);
break;
case 2:
intent = new Intent(AllActivity.this, TaskMissionProjectActivity.class);
intent.putExtra("activity", "TaskExecutionActivity");
startActivity(intent);
break;
case 3:
intent = new Intent(AllActivity.this, TaskMissionProjectActivity.class);
intent.putExtra("activity", "ReportTaskActivity");
startActivity(intent);
break;
case 4:
intent = new Intent(AllActivity.this, TaskMissionProjectActivity.class);
intent.putExtra("activity", "CompleteListActivity");
startActivity(intent);
break;
default:
break;
}
break;
case 1: //执法管理
switch (chilPosition) {
case 0:
intent = new Intent(AllActivity.this, WrittenRecordActivity.class);
startActivity(intent);
break;
case 1:
intent = new Intent(AllActivity.this, FromAllLawEnforcementActivity.class);
startActivity(intent);
break;
case 2:
intent = new Intent(AllActivity.this, FromAllPenaltyTheSpotActivity.class);
startActivity(intent);
break;
case 3:
intent = new Intent(AllActivity.this, SceneInquestActivity.class);
startActivity(intent);
break;
default:
break;
}
break;
case 2: //执法指导
switch (chilPosition) {
case 0:
intent = new Intent(AllActivity.this, FromAllLawsAndRegulationsActivity.class);
startActivity(intent);
break;
case 1:
intent = new Intent(AllActivity.this, FromAllLawEnforcementSpecificationActivity.class);
startActivity(intent);
break;
case 2:
intent = new Intent(AllActivity.this, PuttedForwardConActivity.class);
startActivity(intent);
break;
default:
break;
}
break;
case 3: //统计查询
switch (chilPosition) {
case 0:
intent = new Intent(AllActivity.this, StatisticalActivity.class);
startActivity(intent);
break;
case 1:
intent = new Intent(AllActivity.this, MapQueryActivity.class);
startActivity(intent);
break;
case 2:
intent = new Intent(AllActivity.this, MapTaskActivity.class);
startActivity(intent);
break;
case 3:
intent = new Intent(AllActivity.this, RegionalHistoryEnforcementActivity.class);
startActivity(intent);
break;
case 4:
intent = new Intent(AllActivity.this, TaskMissionProjectActivity.class);
intent.putExtra("activity", "TrajectoryListActivity");
startActivity(intent);
break;
default:
break;
}
break;
case 4: //案件管理
switch (chilPosition) {
case 0:
intent = new Intent(AllActivity.this, FromAllCaseRegistrationActivity.class);
startActivity(intent);
break;
case 1:
intent = new Intent(AllActivity.this, FromAllOpenCasesActivity.class);
startActivity(intent);
break;
case 2:
intent = new Intent(AllActivity.this, FromAllCaseProcessingListActivity.class);
startActivity(intent);
break;
case 3:
intent = new Intent(AllActivity.this, FromAllCaseComplaintActivity.class);
startActivity(intent);
break;
case 4:
intent = new Intent(AllActivity.this, FromAllCaseInAdvanceActivity.class);
startActivity(intent);
break;
default:
break;
}
break;
default:
break;
}
}
});
allMenuRecyAdapter.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
rcyAll.setAdapter(allMenuRecyAdapter);
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
public List<AllMenuItemBean> getMenu() {
List<AllMenuItemBean> allMenuItemBeans = new ArrayList<>();
mAllMenuItemBean = new AllMenuItemBean();
mAllMenuItemBean.setTitle("任务管理");
mMenusSub = new ArrayList<>();
mMenusSub.add("巡查任务发布");
mMenusSub.add("巡查任接收安排");
mMenusSub.add("任务执行");
mMenusSub.add("任务上报");
mMenusSub.add("日志(已完成)");
mAllMenuItemBean.setSubList(mMenusSub);
allMenuItemBeans.add(mAllMenuItemBean);
mAllMenuItemBean = new AllMenuItemBean();
mAllMenuItemBean.setTitle("执法管理");
mMenusSub = new ArrayList<>();
mMenusSub.add("调查笔录");
mMenusSub.add("责令停止违法行为通知");
mMenusSub.add("水行政当场除非决定书");
mMenusSub.add("现场勘验");
mAllMenuItemBean.setSubList(mMenusSub);
allMenuItemBeans.add(mAllMenuItemBean);
mAllMenuItemBean = new AllMenuItemBean();
mAllMenuItemBean.setTitle("执法指导");
mMenusSub = new ArrayList<>();
mMenusSub.add("法律法规库");
mMenusSub.add("执法规范");
mMenusSub.add("案例库");
mAllMenuItemBean.setSubList(mMenusSub);
allMenuItemBeans.add(mAllMenuItemBean);
mAllMenuItemBean = new AllMenuItemBean();
mAllMenuItemBean.setTitle("统计查询");
mMenusSub = new ArrayList<>();
mMenusSub.add("日志统计");
mMenusSub.add("地图查询");
mMenusSub.add("地图任务展示");
mMenusSub.add("区域执法记录查询");
mMenusSub.add("轨迹管理");
mAllMenuItemBean.setSubList(mMenusSub);
allMenuItemBeans.add(mAllMenuItemBean);
mAllMenuItemBean = new AllMenuItemBean();
mAllMenuItemBean.setTitle("案件管理");
mMenusSub = new ArrayList<>();
mMenusSub.add("案件登记");
mMenusSub.add("案件受理");
mMenusSub.add("案件处理");
mMenusSub.add("案件转办");
mMenusSub.add("预立案");
mAllMenuItemBean.setSubList(mMenusSub);
allMenuItemBeans.add(mAllMenuItemBean);
return allMenuItemBeans;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.utils.DeviceUtil;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.AppVersionContract;
import org.json.JSONArray;
import org.json.JSONObject;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/18.
*/
public class AppVersionModel implements AppVersionContract.Model {
private AppVersionContract.Presenter mPresenter;
public AppVersionModel(AppVersionContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void reqAppVersion() {
GDWaterService service = BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL);
service.reqAppVersion("app_version")
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
JSONArray jsonArray = new JSONArray(responseBody.string());
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
String value = jsonObject.getString("VALUE");
String bz = jsonObject.getString("BZ");
if (!value.equals(DeviceUtil.getAppVersionName(BaseApplication.getApplictaion()))) {
mPresenter.reqSucc(bz);
}
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.reqFail(throwable);
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.views;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.os.Handler;
import android.view.SurfaceHolder;
import android.widget.Toast;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.PicUtil;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.ShootActivity;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ScheduledThreadPoolExecutor;
@SuppressWarnings("deprecation")
public class MyCamera implements Camera.PictureCallback, Camera.ShutterCallback {
private Camera mCamera;
private ScheduledThreadPoolExecutor mTimerShootingExecutor;
private ArrayList<String> paths = new ArrayList<String>();
private ShootActivity mShootActivity;
private Handler mHandler = new Handler();
public MyCamera(ShootActivity shootActivity) {
this.mShootActivity = shootActivity;
}
public void openCamera() {
if (null == mCamera) {
mCamera = Camera.open();
mCamera.setDisplayOrientation(90);
}
}
public void releaseCamera() {
if (null != mCamera) {
if (isTimerShootingStart()) {
stopTimerShooting();
}
mCamera.release();
mCamera = null;
}
}
public void takePicture() {
mCamera.takePicture(this, null, this);
}
public synchronized void stopTimerShooting() {
if (null != mTimerShootingExecutor) {
mTimerShootingExecutor.shutdown();
mTimerShootingExecutor = null;
}
}
public synchronized boolean isTimerShootingStart() {
if (null != mTimerShootingExecutor) {
return true;
} else {
return false;
}
}
public void onSurfaceCreated(SurfaceHolder holder) {
try {
//surface创建成功能够拿到回调的holder
//holder中包含有成功创建的Surface
//从而交给摄像机预览使用
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
}
public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) {
//surface的尺寸发生变化
//配置预览参数,如分辨率等
//这里使用的分辨率简单选取了支持的预览分辨率的第一项
//网上可以查找对应的优选算法
Camera.Parameters parameters = mCamera.getParameters();
List<Camera.Size> sizes = parameters.getSupportedPreviewSizes();
Camera.Size selected = sizes.get(0);
parameters.setPreviewSize(selected.width, selected.height);
parameters.setPictureSize(selected.width, selected.height);
//给摄像机设置参数,开始预览
mCamera.setParameters(parameters);
mCamera.startPreview();
}
public void onSurfaceDestroyed(SurfaceHolder holder) {
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Bitmap bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
bmp = PicUtil.rotateBitmapByDegree(bmp, 90);
String filename = UUID.randomUUID().toString();
String filePathname = "/storage/emulated/0/oking/mission_pic/" + filename+".jpg";
save(bmp, filePathname, filename);
}
private void save(Bitmap bitmap, String filePath, String fileName) {
File file = new File(filePath);
if (!file.getParentFile().exists()) {
file.getParentFile().mkdirs(); // 创建文件夹
}
try {
BufferedOutputStream bos = new BufferedOutputStream(
new FileOutputStream(file));
bitmap.compress(Bitmap.CompressFormat.JPEG, 80, bos); // 向缓冲区之中压缩图片
bos.flush();
bos.close();
paths.add(filePath);
RxToast.success(BaseApplication.getApplictaion(), "拍照成功", Toast.LENGTH_SHORT).show();
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
mShootActivity.notyEnablestate(true);
}
}, 300);
//重新启动预览
mCamera.startPreview();
} catch (Exception e) {
}
}
@Override
public void onShutter() {
}
public ArrayList<String> completePhotos() {
return paths;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseExpandableListAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.ChapterDomain;
import com.zhang.okinglawenforcementphone.beans.LawsRegulation;
import java.util.ArrayList;
/**
* Created by Administrator on 2017/10/30.
*/
public class RegulationsExListViewAdapter extends BaseExpandableListAdapter {
private ArrayList<LawsRegulation> lawChapter;
public RegulationsExListViewAdapter(ArrayList<LawsRegulation> lawChapter) {
this.lawChapter = lawChapter;
}
public void setDataList(ArrayList dataList) {
}
@Override
public int getGroupCount() {
return lawChapter.size();
}
@Override
public int getChildrenCount(int i) {
return lawChapter.get(i).getChapterDirectory().get(i).getSection().size();
}
@Override
public Object getGroup(int i) {
return null;
}
@Override
public Object getChild(int i, int i1) {
return null;
}
@Override
public long getGroupId(int i) {
return 0;
}
@Override
public long getChildId(int i, int i1) {
return 0;
}
@Override
public boolean hasStableIds() {
return false;
}
@Override
public View getGroupView(int groupPosition, boolean isExpanded, View convertView, ViewGroup parent) {
GroupHold groupHold;
if (convertView==null){
groupHold = new GroupHold();
convertView = View.inflate(BaseApplication.getApplictaion(), R.layout.gegulation_group_item,null);
groupHold.title = convertView.findViewById(R.id.tv_title);
groupHold.ivGoToChildLv = convertView.findViewById(R.id.iv_goToChildLV);
convertView.setTag(groupHold);
}
groupHold = (GroupHold) convertView.getTag();
ChapterDomain chapterDomain = lawChapter.get(groupPosition).getChapterDirectory().get(groupPosition);
groupHold.title.setText(chapterDomain.getChapterDirectory());
//取消默认的groupIndicator后根据方法中传入的isExpand判断组是否展开并动态自定义指示器
if (isExpanded) { //如果组展开
groupHold.ivGoToChildLv.setImageResource(R.mipmap.arrow_down);
} else {
groupHold.ivGoToChildLv.setImageResource(R.mipmap.arrow_right);
}
return convertView;
}
@Override
public View getChildView(final int groupPosition, final int childPosition, boolean isLastChild, View convertView,
ViewGroup parent) {
ChildHold childHold;
if (convertView==null){
childHold = new ChildHold();
convertView = View.inflate(BaseApplication.getApplictaion(),R.layout.gegylation_child_item,null);
childHold.itemTitle = convertView.findViewById(R.id.tv_title);
convertView.setTag(childHold);
}
childHold = (ChildHold) convertView.getTag();
String itemTitle = lawChapter.get(groupPosition).getChapterDirectory().get(groupPosition).getSection().get(childPosition).getItemTitle();
childHold.itemTitle.setText(itemTitle);
return convertView;
}
@Override
public boolean isChildSelectable(int groupPosition, int childPosition) {
return true;
}
class GroupHold {
TextView title;
ImageView ivGoToChildLv;
}
class ChildHold {
TextView itemTitle;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.views;
import android.content.Context;
import android.support.annotation.Nullable;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.widget.GridView;
/**
* Created by Administrator on 2018/4/24/024.
*/
public class MyRecycelerView extends RecyclerView{
public MyRecycelerView(android.content.Context context, android.util.AttributeSet attrs){
super(context, attrs);
}
public MyRecycelerView(android.content.Context context){
super(context);
}
@Override
public boolean dispatchTouchEvent(MotionEvent ev) {
//请求所有父控件及祖宗控件不要拦截事件
getParent().requestDisallowInterceptTouchEvent(true);
return super.dispatchTouchEvent(ev);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.mvp.contract.AddMemberContract;
import com.zhang.okinglawenforcementphone.mvp.model.AddMemberModel;
/**
* Created by Administrator on 2018/4/20.
*/
public class AddMemberPresenter implements AddMemberContract.Presenter {
private AddMemberContract.Model mModel;
private AddMemberContract.View mView;
public AddMemberPresenter(AddMemberContract.View view) {
mView = view;
mModel = new AddMemberModel(this);
}
@Override
public void addMember(String userid, String mtaskId, String userids) {
mModel.addMember(userid, mtaskId, userids);
}
@Override
public void addMemberSucc(String result) {
mView.addMemberSucc(result);
}
@Override
public void addMemberFail(Throwable ex) {
mView.addMemberFail(ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.widget.CompoundButton;
import com.chad.library.adapter.base.BaseMultiItemQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenEquipment;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Created by Administrator on 2018/4/10.
*/
public class EquipmentRecyAdapter extends BaseMultiItemQuickAdapter<GreenEquipment, BaseViewHolder> {
private List<GreenEquipment> checkItems = new ArrayList<GreenEquipment>();
private Map<Integer, Boolean> map = new HashMap<>();// 存放已被选中的CheckBox
public EquipmentRecyAdapter(List<GreenEquipment> data) {
super(data);
addItemType(0, R.layout.equipment_item_head);
addItemType(1, R.layout.equipment_item_layout);
}
@Override
protected void convert(final BaseViewHolder helper, GreenEquipment item) {
if (item.getItemType() == 1) {
helper.setText(R.id.type_textView, item.getMc1());
helper.setText(R.id.value_textView, item.getValue());
switch (item.getLy()) {
case "0":
helper.setText(R.id.tv_attribute, "自有");
break;
case "1":
helper.setText(R.id.tv_attribute, "租借");
break;
default:
break;
}
helper.setText(R.id.remarks_textView, item.getRemarks());
helper.setText(R.id.tv_mc, item.getMc2());
helper.setOnCheckedChangeListener(R.id.cb, new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
if (b) {
map.put(helper.getLayoutPosition(), true);
} else {
map.remove(helper.getLayoutPosition());
}
}
});
if (map != null && map.containsKey(helper.getLayoutPosition())) {
helper.setChecked(R.id.cb, true);
} else {
helper.setChecked(R.id.cb, false);
}
}
}
public List<GreenEquipment> getCheckItem() {
List<GreenEquipment> equipmentArrayList = getData();
checkItems.clear();
Iterator<Integer> iter = map.keySet().iterator();
while (iter.hasNext()) {
int key = iter.next();
checkItems.add(equipmentArrayList.get(key));
}
return checkItems;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import android.net.Uri;
import android.util.Log;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.beans.GreenMember;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadSignaturePicContract;
import org.json.JSONObject;
import java.io.File;
import java.util.Map;
import io.reactivex.Observable;
import io.reactivex.ObservableSource;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.functions.Predicate;
import io.reactivex.schedulers.Schedulers;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/26/026.
*/
public class UploadSignaturePicModel implements UploadSignaturePicContract.Model {
private UploadSignaturePicContract.Presenter mPresenter;
private String memberResult;
private int uploadSignaturePicCount=0;
private String mLastPathSegment;
public UploadSignaturePicModel(UploadSignaturePicContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void uploadSignaturePic(final GreenMissionLog mGreenMissionLog, final GreenMissionTask missionTask, final Map<String, RequestBody> photoParams) {
BaseHttpFactory
.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.getMissionRecordPicPath(mGreenMissionLog.getServer_id(), 1)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.observeOn(Schedulers.io())
.concatMap(new Function<ResponseBody, ObservableSource<GreenMember>>() {
@Override
public Observable<GreenMember> apply(ResponseBody responseBody) throws Exception {
memberResult = responseBody.string();
uploadSignaturePicCount=0;
Log.i("Oking1",memberResult+">>>>服务器返回member>>");
Log.i("Oking1","需要签名人员:"+missionTask.getMembers().size()+missionTask.getMembers().toString());
return Observable.fromIterable(missionTask.getMembers());
}
})
.concatMap(new Function<GreenMember, ObservableSource<ResponseBody>>() {
@Override
public ObservableSource<ResponseBody> apply(GreenMember greenMember) throws Exception {
mLastPathSegment = Uri.parse(greenMember.getSignPic()).getLastPathSegment();
Log.i("Oking1","遍历签名"+ mLastPathSegment);
if (memberResult.contains(mLastPathSegment)) {
//存在服务器
Log.i("Oking1",mLastPathSegment+"签名在服务器存在》》》》》》》》》"+memberResult);
uploadSignaturePicCount++;
mPresenter.uploadIsCount(uploadSignaturePicCount);
} else {
if (greenMember.getSignPic() != null) {
photoParams.clear();
File file = new File(greenMember.getSignPic());
photoParams.put("logId", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), mGreenMissionLog.getServer_id()));
photoParams.put("type", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), "1"));
photoParams.put("smallImg", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), ""));
photoParams.put("user_id", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), greenMember.getUserid()!=null?greenMember.getUserid():"880088"));
String fileName = file.getName();
photoParams.put("files" + "\"; filename=\"" + fileName, RequestBody.create(MediaType.parse("image/png"), file));
return BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.uploadFiles(photoParams);
}
}
return Observable.empty();
}
}).observeOn(AndroidSchedulers.mainThread())
.retry(5, new Predicate<Throwable>() {
@Override
public boolean test(Throwable throwable) throws Exception {
//最多让被观察者重新发射数据5次,但是这里返回值可以进行处理
//返回假就是不让重新发射数据了,调用观察者的onError就终止了。
//返回真就是让被观察者重新发射请求
Log.i("Oking1","签名图片上传异常,重试");
mPresenter.uploadRetry(throwable);
return true;
}
})
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
uploadSignaturePicCount++;
String result = responseBody.string();
JSONObject jsonObject = new JSONObject(result);
String path = jsonObject.getString("path");
memberResult = memberResult+","+path;
mPresenter.uploadSignaturePicSucc(result);
Log.i("Oking1","签名上传成功"+result);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Log.i("Oking1","签名上传失败"+throwable.toString());
mPresenter.uploadSignatureFail(throwable);
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.CaseListAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenCase;
import com.zhang.okinglawenforcementphone.beans.GreenCaseDao;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.CaseDealActivity;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.CaseManagerActivity;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
import io.reactivex.functions.Consumer;
import okhttp3.ResponseBody;
/**
* 案件处理列表
*/
public class CaseProcessingListFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.case_ry)
RecyclerView mCaseRy;
@BindView(R.id.layoutSwipeRefresh)
SwipeRefreshLayout mLayoutSwipeRefresh;
Unbinder unbinder;
@BindView(R.id.tv)
TextView mTv;
private CaseManagerActivity mActivity;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private View mInflate;
private CaseListAdapter mCaseListAdapter;
private List<GreenCase> mCaseList = new ArrayList<>();
public CaseProcessingListFragment() {
// Required empty public constructor
}
public static CaseProcessingListFragment newInstance(String param1, String param2) {
CaseProcessingListFragment fragment = new CaseProcessingListFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_caseprocessing_list, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
setListener();
return mInflate;
}
private void setListener() {
mLayoutSwipeRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
getHttpCaseList();
}
});
mCaseListAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
List<GreenCase> data = adapter.getData();
Intent intent = new Intent(getActivity(), CaseDealActivity.class);
intent.putExtra("AJID",data.get(position).getAJID());
startActivity(intent);
}
});
}
private void initData() {
mLayoutSwipeRefresh.setRefreshing(true);
mLayoutSwipeRefresh.setColorSchemeColors(getResources().getColor(R.color.refresh_color));
getHttpCaseList();
mCaseRy.setLayoutManager(new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false));
mCaseRy.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 10, BaseApplication.getApplictaion().getResources().getColor(R.color.activity_bg)));
mCaseListAdapter = new CaseListAdapter(R.layout.list_item_case, null);
mCaseRy.setAdapter(mCaseListAdapter);
}
private void getHttpCaseList() {
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.loadCaseList(OkingContract.CURRENTUSER.getUserid())
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody requestBody) throws Exception {
String result = requestBody.string();
JSONArray jsonArray = new JSONArray(result);
mLayoutSwipeRefresh.setRefreshing(false);
if (jsonArray.length() > 0) {
mCaseList.clear();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
String ajid = jsonObject.getString("AJID");
GreenCase unique = GreenDAOManager.getInstence().getDaoSession().getGreenCaseDao().queryBuilder().where(GreenCaseDao.Properties.AJID.eq(ajid)).unique();
if (unique == null) {
unique = new GreenCase();
unique.setAJID(ajid);
unique.setAFDD(jsonObject.getString("AFDD"));
unique.setAFSJ(jsonObject.optLong("AFSJ"));
unique.setAJLX(jsonObject.getString("AJLX"));
unique.setAJLXID(jsonObject.getString("AJLXID"));
unique.setAJLY(jsonObject.getString("AJLY"));
unique.setAJMC(jsonObject.getString("AJMC"));
unique.setAQJY(jsonObject.getString("AQJY"));
unique.setAY(jsonObject.getString("AY"));
unique.setCBR1(jsonObject.getString("CBR1"));
unique.setCBR2(jsonObject.getString("CBR2"));
unique.setCBRDW1(jsonObject.getString("CBRDW1"));
unique.setCBRDW2(jsonObject.getString("CBRDW2"));
unique.setCBRID1(jsonObject.getString("CBRID1"));
unique.setCBRID2(jsonObject.getString("CBRID2"));
unique.setCFNR(jsonObject.getString("CFNR"));
unique.setCFYJ(jsonObject.getString("CFYJ"));
unique.setDSRQK(jsonObject.getString("DSRQK"));
unique.setFLYJ(jsonObject.getString("FLYJ"));
unique.setJGD(jsonObject.getString("JGD"));
unique.setSLR(jsonObject.getString("SLR"));
unique.setSLRQ(jsonObject.optLong("SLRQ"));
unique.setSLXX_ZT(jsonObject.getString("SLXX_ZT"));
unique.setSQWTR(jsonObject.getString("SQWTR"));
unique.setSSD(jsonObject.getString("SSD"));
unique.setWHJGFSD(jsonObject.getString("WHJGFSD"));
unique.setXWZSD(jsonObject.getString("XWZSD"));
unique.setZFBM(jsonObject.getString("ZFBM"));
unique.setZFZH1(jsonObject.getString("ZFZH1"));
unique.setZFZH2(jsonObject.getString("ZFZH2"));
unique.setZT(jsonObject.getString("ZT"));
GreenDAOManager.getInstence().getDaoSession().getGreenCaseDao().insert(unique);
mCaseList.add(unique);
} else {
unique.setAJID(ajid);
unique.setAFDD(jsonObject.getString("AFDD"));
unique.setAFSJ(jsonObject.optLong("AFSJ"));
unique.setAJLX(jsonObject.getString("AJLX"));
unique.setAJLXID(jsonObject.getString("AJLXID"));
unique.setAJLY(jsonObject.getString("AJLY"));
unique.setAJMC(jsonObject.getString("AJMC"));
unique.setAQJY(jsonObject.getString("AQJY"));
unique.setAY(jsonObject.getString("AY"));
unique.setCBR1(jsonObject.getString("CBR1"));
unique.setCBR2(jsonObject.getString("CBR2"));
unique.setCBRDW1(jsonObject.getString("CBRDW1"));
unique.setCBRDW2(jsonObject.getString("CBRDW2"));
unique.setCBRID1(jsonObject.getString("CBRID1"));
unique.setCBRID2(jsonObject.getString("CBRID2"));
unique.setCFNR(jsonObject.getString("CFNR"));
unique.setCFYJ(jsonObject.getString("CFYJ"));
unique.setDSRQK(jsonObject.getString("DSRQK"));
unique.setFLYJ(jsonObject.getString("FLYJ"));
unique.setJGD(jsonObject.getString("JGD"));
unique.setSLR(jsonObject.getString("SLR"));
unique.setSLRQ(jsonObject.optLong("SLRQ"));
unique.setSLXX_ZT(jsonObject.getString("SLXX_ZT"));
unique.setSQWTR(jsonObject.getString("SQWTR"));
unique.setSSD(jsonObject.getString("SSD"));
unique.setWHJGFSD(jsonObject.getString("WHJGFSD"));
unique.setXWZSD(jsonObject.getString("XWZSD"));
unique.setZFBM(jsonObject.getString("ZFBM"));
unique.setZFZH1(jsonObject.getString("ZFZH1"));
unique.setZFZH2(jsonObject.getString("ZFZH2"));
unique.setZT(jsonObject.getString("ZT"));
GreenDAOManager.getInstence().getDaoSession().getGreenCaseDao().update(unique);
mCaseList.add(unique);
}
}
mCaseListAdapter.setNewData(mCaseList);
} else {
mLayoutSwipeRefresh.setVisibility(View.GONE);
mTv.setVisibility(View.VISIBLE);
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mLayoutSwipeRefresh.setRefreshing(false);
List<GreenCase> greenCases = GreenDAOManager.getInstence().getDaoSession().getGreenCaseDao().queryBuilder().where(GreenCaseDao.Properties.CBRID1.eq(OkingContract.CURRENTUSER.getUserid())).list();
mCaseListAdapter.setNewData(greenCases);
}
});
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context != null) {
mActivity = (CaseManagerActivity) getActivity();
}
}
}
<file_sep>package com.zhang.baselib.utils;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Environment;
import android.os.SystemClock;
import android.util.Log;
import com.zhang.okinglawenforcementphone.SendEmailManager;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import io.reactivex.schedulers.Schedulers;
/**
* Created by Administrator on 2018/5/23/023.
*/
public class CrashUtil implements Thread.UncaughtExceptionHandler {
private volatile static CrashUtil mInstance;
private Thread.UncaughtExceptionHandler mHandler;
private boolean mInitialized;
private String crashDir;
private String versionName;
private int versionCode;
private Context context;
private CrashUtil(Context context) {
this.context = context;
}
/**
* 获取单例
* <p>在Application中初始化{@code RxCrashUtils.getInstance().init(this);}</p>
*
* @return 单例
*/
public static CrashUtil getInstance(Context context) {
if (mInstance == null) {
synchronized (CrashUtil.class) {
if (mInstance == null) {
mInstance = new CrashUtil(context);
}
}
}
return mInstance;
}
/**
* 初始化
*
* @return {@code true}: 成功<br>{@code false}: 失败
*/
public boolean init() {
if (mInitialized) return true;
if (Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) {
crashDir = context.getExternalCacheDir().getPath() + File.separator + "crash" + File.separator;
} else {
crashDir = context.getCacheDir().getPath() + File.separator + "crash" + File.separator;
}
try {
PackageInfo pi = context.getPackageManager().getPackageInfo(context.getPackageName(), 0);
versionName = pi.versionName;
versionCode = pi.versionCode;
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
return false;
}
mHandler = Thread.getDefaultUncaughtExceptionHandler();
Thread.setDefaultUncaughtExceptionHandler(this);
return mInitialized = true;
}
@Override
public void uncaughtException(Thread thread, final Throwable throwable) {
String now = OkingContract.SDF.format(System.currentTimeMillis());
final String fullPath = crashDir + now + ".txt";
if (!FileUtil.createOrExistsFile(fullPath)) return;
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
PrintWriter pw = null;
try {
pw = new PrintWriter(new FileWriter(fullPath, false));
pw.write(getCrashHead());
throwable.printStackTrace(pw);
Throwable cause = throwable.getCause();
while (cause != null) {
cause.printStackTrace(pw);
cause = cause.getCause();
}
} catch (IOException e) {
e.printStackTrace();
} finally {
FileUtil.closeIO(pw);
}
}
});
if (mHandler != null) {
mHandler.uncaughtException(thread, throwable);
}
}
/**
* 获取崩溃头
*
* @return 崩溃头
*/
private String getCrashHead() {
return "\n************* Crash Log Head ****************" +
"\n设备厂商 : " + Build.MANUFACTURER +// 设备厂商
"\n设备型号 : " + Build.MODEL +// 设备型号
"\n系统版本 : " + Build.VERSION.RELEASE +// 系统版本
"\nSDK版本 : " + Build.VERSION.SDK_INT +// SDK版本
"\n程序版本名称 : " + versionName +
"\n程序版本号 : " + versionCode +
"\n************* Crash Log Head ****************\n\n";
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.zhang.okinglawenforcementphone.R;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class RegulationsDetailFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.tv_detail)
TextView tvDetail;
@BindView(R.id.tv_itemdetail)
TextView tvItemdetail;
Unbinder unbinder;
// TODO: Rename and change types of parameters
private String articlesContent;
private String rulesContent;
private View mInflate;
public RegulationsDetailFragment() {
// Required empty public constructor
}
public static RegulationsDetailFragment newInstance(String articlesContent, String rulesContent) {
RegulationsDetailFragment fragment = new RegulationsDetailFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, articlesContent);
args.putString(ARG_PARAM2, rulesContent);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
articlesContent = getArguments().getString(ARG_PARAM1);
rulesContent = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_regulations_detail, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
setListener();
return mInflate;
}
private void initData() {
tvDetail.setText(rulesContent);
tvItemdetail.setText(articlesContent);
}
private void setListener() {
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
public void setRulesContent(String articlesContent, String rulesContent) {
this.articlesContent = articlesContent;
this.rulesContent = rulesContent;
if (tvItemdetail!=null){
tvDetail.setText(rulesContent);
tvItemdetail.setText(articlesContent);
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.view.View;
import com.chad.library.adapter.base.BaseMultiItemQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.chad.library.adapter.base.entity.MultiItemEntity;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.WrittenItemBean;
import com.zhang.okinglawenforcementphone.beans.WrittenRecordLevel0;
import java.util.List;
/**
* Created by Administrator on 2018/5/7/007.
*/
public class ExpandableItemCaseRegistAdapter extends BaseMultiItemQuickAdapter<MultiItemEntity, BaseViewHolder> {
public static final int TYPE_LEVEL_0 = 0;
public static final int TYPE_LEVEL_1 = 1;
public static final int TYPE_LEVEL_2 = 2;
public static final int TYPE_LEVEL_3 = 3;
public ExpandableItemCaseRegistAdapter(List<MultiItemEntity> data) {
super(data);
addItemType(TYPE_LEVEL_0, R.layout.activity_mission_recor_level0);
addItemType(TYPE_LEVEL_1, R.layout.case_regist_info1);
addItemType(TYPE_LEVEL_2, R.layout.case_regist_info2);
addItemType(TYPE_LEVEL_3, R.layout.case_regist_info3);
}
@Override
protected void convert(final BaseViewHolder helper, MultiItemEntity item) {
switch (helper.getItemViewType()) {
case TYPE_LEVEL_0:
final WrittenRecordLevel0 lv0 = (WrittenRecordLevel0) item;
helper.setText(R.id.title, lv0.subTitle);
helper.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
int adapterPosition = helper.getAdapterPosition();
List<WrittenItemBean> subItems = lv0.getSubItems();
int subItemType = subItems.get(0).getItemType();
if (lv0.isExpanded()) {
collapse(adapterPosition);
} else {
expand(adapterPosition);
}
}
});
break;
case TYPE_LEVEL_1:
break;
case TYPE_LEVEL_2:
break;
case TYPE_LEVEL_3:
break;
default:
break;
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenCase;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.CaseDealActivity;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.EvidenceManagerActivity;
import java.text.SimpleDateFormat;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
public class CaseDealFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.ajid_tv)
TextView mAjidTv;
@BindView(R.id.ajlx_tv)
TextView mAjlxTv;
@BindView(R.id.ajly_tv)
TextView mAjlyTv;
@BindView(R.id.ajmc_tv)
TextView mAjmcTv;
@BindView(R.id.slsj_tv)
TextView mSlsjTv;
@BindView(R.id.dsr_tv)
TextView mDsrTv;
@BindView(R.id.afdd_tv)
TextView mAfddTv;
@BindView(R.id.aqjy_tv)
TextView mAqjyTv;
@BindView(R.id.ajzt_tv)
TextView mAjztTv;
@BindView(R.id.change_btn)
Button mChangeBtn;
Unbinder unbinder;
private GreenCase mUnique;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private View mInflate;
private SimpleDateFormat mSimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private CaseDealActivity mCaseDealActivity;
public CaseDealFragment() {
// Required empty public constructor
}
public static CaseDealFragment newInstance(String param1, String param2) {
CaseDealFragment fragment = new CaseDealFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_case_deal, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
return mInflate;
}
private void initData() {
if (mUnique != null) {
mAjlxTv.setText(mUnique.getAJLX());
mAjlyTv.setText(mUnique.getAJLY());
mAjmcTv.setText(mUnique.getAJMC());
mSlsjTv.setText(mSimpleDateFormat.format(mUnique.getSLRQ()));
mDsrTv.setText(mUnique.getDSRQK());
mAfddTv.setText(mUnique.getAFDD());
mAqjyTv.setText(mUnique.getAQJY());
mAjidTv.setText(mUnique.getAJID());
switch (mUnique.getSLXX_ZT()) {
case "SL":
mAjztTv.setText("受理");
break;
case "CBBDCQZ":
mAjztTv.setText("承办并调查取证");
break;
case "ZB":
mAjztTv.setText("转办");
break;
case "LA":
mAjztTv.setText("立案");
break;
case "AJSC":
mAjztTv.setText("案件审查");
break;
case "BYCF":
mAjztTv.setText("不予处罚");
break;
case "WSZL":
mAjztTv.setText("完善资料");
break;
case "YS":
mAjztTv.setText("移送");
break;
case "CFGZHTZ":
mAjztTv.setText("处罚告知或听证");
break;
case "TZ":
mAjztTv.setText("听证");
break;
case "FH":
mAjztTv.setText("复核");
break;
case "CFJD":
mAjztTv.setText("处罚决定");
break;
case "ZX":
mAjztTv.setText("执行");
break;
case "JABGD":
mAjztTv.setText("结案并归档");
break;
default:
mAjztTv.setText("");
break;
}
}
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
@OnClick({R.id.change_btn, R.id.evidence_btn})
public void onViewClicked(View view) {
switch (view.getId()) {
case R.id.change_btn:
getActivity().finish();
break;
case R.id.evidence_btn:
String ajid = mCaseDealActivity.getAJID();
Intent intent = new Intent(mCaseDealActivity, EvidenceManagerActivity.class);
intent.putExtra("AJID",ajid);
startActivity(intent);
// mCaseDealActivity.setToolbarText("证据管理");
// FragmentTransaction fragmentTransaction = getFragmentManager().beginTransaction();
// fragmentTransaction.hide(CaseDealFragment.this);
// if (mCaseEvidenceFragment == null) {
// mCaseEvidenceFragment = CaseEvidenceFragment.newInstance(null);
// fragmentTransaction.add(R.id.sub_fragment_root, mCaseEvidenceFragment, "caseEvidenceFragment").commit();
// }else {
// fragmentTransaction.show(mCaseEvidenceFragment).commit();
//
// }
break;
default:
break;
}
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
mCaseDealActivity = (CaseDealActivity) context;
}
public void setGreenCase(GreenCase greenCase) {
this.mUnique = greenCase;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.Context;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.UserRecyAdaper;
import com.zhang.okinglawenforcementphone.beans.UserItemOV;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.CaseManagerActivity;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import com.zhang.okinglawenforcementphone.views.GridDivider;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class HandlingMenuFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.recy_law_menu)
RecyclerView mRecyLawMenu;
Unbinder unbinder;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private View mInflate;
private Fragment mHandlingMenuFragment;
private UserItemOV mUserItemOV;
private UserRecyAdaper mHandingMenuRecyAdaper;
private CaseManagerActivity mCaseManagerActivity;
private CaseRegistrationFragment mCaseRegistrationFragment;
private OpenCasesFragment mOpenCasesFragment;
private CaseProcessingListFragment mCaseProcessingListFragment;
private CaseComplaintFragment mCaseComplaintFragment;
private CaseInAdvanceFragment mCaseInAdvanceFragment;
public HandlingMenuFragment() {
// Required empty public constructor
}
public static HandlingMenuFragment newInstance(String param1, String param2) {
HandlingMenuFragment fragment = new HandlingMenuFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_handling_menu, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
setListenner();
return mInflate;
}
private void setListenner() {
mHandingMenuRecyAdaper.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
FragmentTransaction fragmentTransaction = getFragmentManager().beginTransaction();
if (!mHandlingMenuFragment.isHidden()) {
fragmentTransaction.hide(mHandlingMenuFragment);
}
switch (position) {
case 0:
mCaseManagerActivity.setTitleText("案件登记");
if (mOpenCasesFragment != null) {
fragmentTransaction.hide(mOpenCasesFragment);
}
if (mCaseProcessingListFragment != null) {
fragmentTransaction.hide(mCaseProcessingListFragment);
}
if (mCaseComplaintFragment != null) {
fragmentTransaction.hide(mCaseComplaintFragment);
}
if (mCaseInAdvanceFragment != null) {
fragmentTransaction.hide(mCaseInAdvanceFragment);
}
if (mCaseRegistrationFragment == null) {
mCaseRegistrationFragment = CaseRegistrationFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_case_content, mCaseRegistrationFragment, "CaseRegistrationFragment").commit();
} else {
if (mCaseRegistrationFragment.isAdded()) {
fragmentTransaction.show(mCaseRegistrationFragment).commit();
}
}
break;
case 1:
mCaseManagerActivity.setTitleText("案件受理");
if (mCaseRegistrationFragment != null) {
fragmentTransaction.hide(mCaseRegistrationFragment);
}
if (mCaseProcessingListFragment != null) {
fragmentTransaction.hide(mCaseProcessingListFragment);
}
if (mCaseComplaintFragment != null) {
fragmentTransaction.hide(mCaseComplaintFragment);
}
if (mCaseInAdvanceFragment != null) {
fragmentTransaction.hide(mCaseInAdvanceFragment);
}
if (mOpenCasesFragment == null) {
mOpenCasesFragment = OpenCasesFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_case_content, mOpenCasesFragment, "OpenCasesFragment").commit();
} else {
if (mOpenCasesFragment.isAdded()) {
fragmentTransaction.show(mOpenCasesFragment).commit();
}
}
break;
case 2:
mCaseManagerActivity.setTitleText("案件处理");
if (mCaseRegistrationFragment != null) {
fragmentTransaction.hide(mCaseRegistrationFragment);
}
if (mOpenCasesFragment != null) {
fragmentTransaction.hide(mOpenCasesFragment);
}
if (mCaseComplaintFragment != null) {
fragmentTransaction.hide(mCaseComplaintFragment);
}
if (mCaseInAdvanceFragment != null) {
fragmentTransaction.hide(mCaseInAdvanceFragment);
}
if (mCaseProcessingListFragment == null) {
mCaseProcessingListFragment = CaseProcessingListFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_case_content, mCaseProcessingListFragment, "CaseProcessingListFragment").commit();
} else {
if (mCaseProcessingListFragment.isAdded()) {
fragmentTransaction.show(mCaseProcessingListFragment).commit();
}
}
break;
case 3:
mCaseManagerActivity.setTitleText("案件转办");
if (mCaseRegistrationFragment != null) {
fragmentTransaction.hide(mCaseRegistrationFragment);
}
if (mOpenCasesFragment != null) {
fragmentTransaction.hide(mOpenCasesFragment);
}
if (mCaseProcessingListFragment != null) {
fragmentTransaction.hide(mCaseProcessingListFragment);
}
if (mCaseInAdvanceFragment != null) {
fragmentTransaction.hide(mCaseInAdvanceFragment);
}
if (mCaseComplaintFragment == null) {
mCaseComplaintFragment = CaseComplaintFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_case_content, mCaseComplaintFragment, "CaseComplaintFragment").commit();
} else {
if (mCaseComplaintFragment.isAdded()) {
fragmentTransaction.show(mCaseComplaintFragment).commit();
}
}
break;
case 4:
mCaseManagerActivity.setTitleText("预立案");
if (mCaseRegistrationFragment != null) {
fragmentTransaction.hide(mCaseRegistrationFragment);
}
if (mOpenCasesFragment != null) {
fragmentTransaction.hide(mOpenCasesFragment);
}
if (mCaseComplaintFragment != null) {
fragmentTransaction.hide(mCaseComplaintFragment);
}
if (mCaseProcessingListFragment != null) {
fragmentTransaction.hide(mCaseProcessingListFragment);
}
if (mCaseInAdvanceFragment == null) {
mCaseInAdvanceFragment = CaseInAdvanceFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_case_content, mCaseInAdvanceFragment, "CaseInAdvanceFragment").commit();
} else {
if (mCaseInAdvanceFragment.isAdded()) {
fragmentTransaction.show(mCaseInAdvanceFragment).commit();
}
}
break;
default:
break;
}
}
});
}
private void initData() {
mHandlingMenuFragment = getFragmentManager().findFragmentByTag("HandlingMenuFragment");
mRecyLawMenu.setLayoutManager(new GridLayoutManager(BaseApplication.getApplictaion(),3));
mRecyLawMenu.addItemDecoration(new GridDivider(BaseApplication.getApplictaion(), 10, getResources().getColor(R.color.line)));
List<UserItemOV> userItemOVS = new ArrayList<>();
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("案件登记");
mUserItemOV.setIcon(R.mipmap.ajdj);
userItemOVS.add(mUserItemOV);
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("案件受理");
mUserItemOV.setIcon(R.mipmap.ajsl);
userItemOVS.add(mUserItemOV);
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("案件处理");
mUserItemOV.setIcon(R.mipmap.ajcl);
userItemOVS.add(mUserItemOV);
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("案件转办");
mUserItemOV.setIcon(R.mipmap.ajzb);
userItemOVS.add(mUserItemOV);
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("预立案");
mUserItemOV.setIcon(R.mipmap.yla);
userItemOVS.add(mUserItemOV);
mHandingMenuRecyAdaper = new UserRecyAdaper(R.layout.handling_menu_item, userItemOVS);
mHandingMenuRecyAdaper.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
mRecyLawMenu.setAdapter(mHandingMenuRecyAdaper);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
mCaseManagerActivity = (CaseManagerActivity) context;
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.utils;
import android.app.Dialog;
import android.content.Context;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.zhang.baselib.utils.DensityUtil;
import com.zhang.okinglawenforcementphone.R;
/**
* Created by Administrator on 2018/4/19.
*/
public class DialogUtil {
private Dialog bottomDialog;
private ViewGroup.MarginLayoutParams mParams;
public void showBottomDialog(Context context, View contentView,float height) {
if (bottomDialog==null){
bottomDialog = new Dialog(context, R.style.BottomDialog);
bottomDialog.setContentView(contentView);
mParams = (ViewGroup.MarginLayoutParams) contentView.getLayoutParams();
mParams.width = context.getResources().getDisplayMetrics().widthPixels - DensityUtil.dp2px(context, 16f);
mParams.bottomMargin = DensityUtil.dp2px(context, 8f);
contentView.setLayoutParams(mParams);
bottomDialog.setCanceledOnTouchOutside(true);
bottomDialog.getWindow().setGravity(Gravity.BOTTOM);
bottomDialog.getWindow().setWindowAnimations(R.style.BottomDialog_Animation);
}
mParams.height = DensityUtil.dp2px(context,height);
bottomDialog.show();
}
public void cancelDialog(){
bottomDialog.dismiss();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
/**
* Created by Administrator on 2017/10/20.
*/
public class SpinnerArrayAdapter extends ArrayAdapter<String> {
private String [] mStringArray;
public SpinnerArrayAdapter(String[] stringArray) {
super(BaseApplication.getApplictaion(), R.layout.sp_item_auto, stringArray);
mStringArray=stringArray;
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
//修改Spinner展开后的字体颜色
if (convertView == null) {
LayoutInflater inflater = LayoutInflater.from(BaseApplication.getApplictaion());
convertView = inflater.inflate(R.layout.spinner_item, parent,false);
}
//此处text1是Spinner默认的用来显示文字的TextView
TextView tv = (TextView) convertView.findViewById(R.id.tv);
tv.setText(mStringArray[position]);
// tv.setTextSize(5f);
// tv.setTextColor(Color.argb(255,161,168,174));
return convertView;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
// 修改Spinner选择后结果的字体颜色
if (convertView == null) {
LayoutInflater inflater = LayoutInflater.from(BaseApplication.getApplictaion());
convertView = inflater.inflate(R.layout.sp_item_auto, parent, false);
// AutoUtils.autoSize(convertView);
}
//此处text1是Spinner默认的用来显示文字的TextView
TextView tv = (TextView) convertView.findViewById(R.id.tv);
tv.setText(mStringArray[position]);
// tv.setTextSize(AutoUtils.getPercentWidthSize(21));
// tv.setTextColor(Color.argb(255,161,168,174));
return convertView;
}
}<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import android.text.format.DateFormat;
import com.google.gson.Gson;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.Point;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadLocationToServerContract;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import io.reactivex.functions.Consumer;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/20.
*/
public class UploadLocationToServerModel implements UploadLocationToServerContract.Model {
private UploadLocationToServerContract.Presenter mPresenter;
private Point mLocation;
public UploadLocationToServerModel(UploadLocationToServerContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void upploadLocationToServer(Long loginTime, SimpleDateFormat sdf, String imei, Gson gson) {
if (mLocation==null){
mLocation = new Point();
}
mLocation.setLatitude(Double.parseDouble(OkingContract.LOCATIONRESULT[1]));
mLocation.setLongitude(Double.parseDouble(OkingContract.LOCATIONRESULT[2]));
try {
mLocation.setDatetime(sdf.parse(OkingContract.LOCATIONRESULT[3]).getTime());
} catch (ParseException e) {
e.printStackTrace();
}
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.uploadLocation(loginTime,
OkingContract.CURRENTUSER.getUserid(),
imei,gson.toJson(mLocation), DateFormat.format("yyyy-MM-dd HH:mm:ss", System.currentTimeMillis()).toString())
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
mPresenter.uploadSucc(result);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.uploadFail(throwable);
}
});
}
}
<file_sep>package com.zhang.baselib.utils;
import android.content.Context;
import android.hardware.Camera;
import android.os.Build;
import java.util.Comparator;
import java.util.List;
/**
* Created by Administrator on 2018/4/25/025.
*/
public class Util {
public static List<Camera.Size> getResolutionList(Camera camera) {
Camera.Parameters parameters = camera.getParameters();
return parameters.getSupportedPreviewSizes();
}
public static boolean hasGingerbread() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD;
}
public static boolean hasHoneycomb() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;
}
public static boolean hasHoneycombMR1() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1;
}
public static boolean hasJellyBean() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN;
}
public static boolean hasKitKat() {
return Build.VERSION.SDK_INT >= 19;
}
public static boolean hasFroyo() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO;
}
public static class ResolutionComparator implements Comparator<Camera.Size> {
@Override
public int compare(Camera.Size lhs, Camera.Size rhs) {
if (lhs.height != rhs.height) {
return lhs.height - rhs.height;
} else {
return lhs.width - rhs.width;
}
}
}
/**
* 获取状态栏高度
* @param context
* @return
*/
public static int getStatusBarHeight(Context context) {
int result = 0;
int resourceId = context.getResources().getIdentifier("status_bar_height", "dimen", "android");
if (resourceId > 0) {
result = context.getResources().getDimensionPixelSize(resourceId);
}
return result;
}
public static int getDatePoor(long time1, long time2) {
long nd = 1000 * 24 * 60 * 60;
// long ns = 1000;
// 获得两个时间的毫秒时间差异
long diff = time2 - time1;
// 计算差多少天
int day = (int) (diff / nd);
return day;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.AddMemberContract;
import io.reactivex.functions.Consumer;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/20.
*/
public class AddMemberModel implements AddMemberContract.Model {
private AddMemberContract.Presenter mPresenter;
public AddMemberModel(AddMemberContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void addMember(String userid, String mtaskId, String userids) {
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.addMember(userid,mtaskId,userids)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
mPresenter.addMemberSucc(responseBody.string());
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.addMemberFail(throwable);
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
import java.util.Map;
/**
* Created by Administrator on 2018/4/28/028.
*/
public interface UploadRecordContract {
interface Model {
void uploadRecord(Map<String, Object> params);
}
interface View {
void uploadRecordSucc(String result);
void uploadRecordFail(Throwable ex);
}
interface Presenter {
void uploadRecord(Map<String, Object> params);
void uploadRecordSucc(String result);
void uploadRecordFail(Throwable ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.DialogInterface;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.Toast;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxDialogSure;
import com.zhang.baselib.ui.views.RxDialogSureCancel;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.NetUtil;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.CaseAudioVideoEvidenceListRecyAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenCase;
import com.zhang.okinglawenforcementphone.beans.GreenEvidence;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceMedia;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceSTZJOV;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import io.reactivex.Observable;
import io.reactivex.ObservableEmitter;
import io.reactivex.ObservableOnSubscribe;
import io.reactivex.Observer;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Consumer;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
/**
* A simple {@link Fragment} subclass.
*/
public class CaseAudioVideoEvidenceListFragment extends Fragment {
private static final String ARG_PARAM2 = "param2";
private String mParam2;
private GreenCase mycase;
private RecyclerView ryMain;
private ArrayList<GreenEvidence> evidences = new ArrayList<>();
private boolean uploadSound, uploadVideo;
private int uploadSoundCount, uploadVideoCount;
private SimpleDateFormat mSimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private CaseAudioVideoEvidenceListRecyAdapter mCaseAudioVideoEvidenceListRecyAdapter;
private RxDialogSureCancel mRxDialogSureCancel;
private RxDialogLoading mRxDialogLoading;
private ArrayList<GreenEvidenceMedia> mPicGreenMediaList = new ArrayList<>();
private ArrayList<GreenEvidenceMedia> mVoiceGreenMediaList = new ArrayList<>();
private ArrayList<GreenEvidenceMedia> mVideoGreenMediaList = new ArrayList<>();
private View mInflate;
public CaseAudioVideoEvidenceListFragment() {
// Required empty public constructor
}
public static CaseAudioVideoEvidenceListFragment newInstance( String param2) {
CaseAudioVideoEvidenceListFragment fragment = new CaseAudioVideoEvidenceListFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
if (mInflate ==null){
mInflate = inflater.inflate(R.layout.fragment_case_audio_video_evidence_list, container, false);
}
EventBus.getDefault().register(this);
initView(mInflate);
return mInflate;
}
@Override
public void onDestroyView() {
EventBus.getDefault().unregister(this);
super.onDestroyView();
}
public void initView(View rootView) {
ryMain = rootView.findViewById(R.id.ry_main);
ryMain.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
mCaseAudioVideoEvidenceListRecyAdapter = new CaseAudioVideoEvidenceListRecyAdapter(R.layout.list_item_audiovideoevidence, null);
ryMain.setAdapter(mCaseAudioVideoEvidenceListRecyAdapter);
loadEvidence();
mCaseAudioVideoEvidenceListRecyAdapter.setOnItemChildClickListener(new BaseQuickAdapter.OnItemChildClickListener() {
@Override
public void onItemChildClick(final BaseQuickAdapter adapter, View view, final int position) {
switch (view.getId()) {
case R.id.upload_button: //上传
if (NetUtil.isConnected(BaseApplication.getApplictaion())) {
GreenEvidence greenEvidence = evidences.get(position);
List<GreenEvidenceMedia> greenMedias = greenEvidence.getGreenMedia();
for (GreenEvidenceMedia greenMedia : greenMedias) {
if (greenMedia.getType()==1){
mPicGreenMediaList.clear();
mPicGreenMediaList.add(greenMedia);
}else if (greenMedia.getType()==2){
mVideoGreenMediaList.clear();
mVideoGreenMediaList.add(greenMedia);
}else if (greenMedia.getType()==3){
mVoiceGreenMediaList.clear();
mVoiceGreenMediaList.add(greenMedia);
}
}
saveEvidence(evidences.get(position));
} else {
RxToast.warning(BaseApplication.getApplictaion(), "网络无连接", Toast.LENGTH_SHORT).show();
}
break;
case R.id.delete_button: //删除
if (mRxDialogSureCancel == null) {
mRxDialogSureCancel = new RxDialogSureCancel(getActivity());
mRxDialogSureCancel.setTitle("提示");
mRxDialogSureCancel.setContent("是否删除证据");
}
mRxDialogSureCancel.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().delete(evidences.get(position));
evidences.remove(position);
mCaseAudioVideoEvidenceListRecyAdapter.setNewData(evidences);
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.getTvCancel().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSureCancel.cancel();
}
});
mRxDialogSureCancel.show();
break;
case R.id.edit_button: //查看编辑
if (((Button) view).getText().equals("查看")) {
FragmentTransaction ft = getFragmentManager().beginTransaction();
CaseAudioVideoEvidenceFragment caseAudioVideoEvidenceFragment = CaseAudioVideoEvidenceFragment.newInstance(0);
caseAudioVideoEvidenceFragment.setGreenCase(mycase, evidences.get(position));
ft.add(R.id.rl_sub_content, caseAudioVideoEvidenceFragment).commit();
} else {
FragmentTransaction ft = getFragmentManager().beginTransaction();
CaseAudioVideoEvidenceFragment caseAudioVideoEvidenceFragment = CaseAudioVideoEvidenceFragment.newInstance(1);
caseAudioVideoEvidenceFragment.setGreenCase(mycase, evidences.get(position));
ft.add(R.id.rl_sub_content, caseAudioVideoEvidenceFragment).commit();
}
break;
default:
break;
}
}
});
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void handleEvent1(GreenEvidenceSTZJOV event) {
if (event.getType() == 2) { //添加征据
evidences.add(event.getGreenEvidence());
mCaseAudioVideoEvidenceListRecyAdapter.setNewData(evidences);
} else {
loadEvidence();
}
}
/**
* 上传数据
* @param evidence
*/
private void saveEvidence(final GreenEvidence evidence) {
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(getActivity(), false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
mRxDialogLoading.cancel();
}
});
}
mRxDialogLoading.setLoadingText("上传数据中...");
mRxDialogLoading.show();
Map<String, Object> fields = new HashMap<>();
if (evidence.getZJID() != null) {
fields.put("zjid", evidence.getZJID());
}
if (evidence.getAJID() != null) {
fields.put("ajid", evidence.getAJID());
}
if (evidence.getZJLX() != null) {
fields.put("zjlx", evidence.getZJLX());
}
if (evidence.getZJMC() != null) {
fields.put("zjmc", evidence.getZJMC());
}
if (evidence.getZJLY() != null) {
fields.put("zjly", evidence.getZJLY());
}
if (evidence.getZJNR() != null) {
fields.put("zjnr", evidence.getZJNR());
}
if (evidence.getSL() != null) {
fields.put("sl", evidence.getSL());
}
if (((Long) evidence.getCJSJ()) != null) {
fields.put("cjsj", mSimpleDateFormat.format(evidence.getCJSJ()));
}
if (evidence.getCJR() != null) {
fields.put("cjr", evidence.getCJR());
}
if (evidence.getCJDD() != null) {
fields.put("cjdd", evidence.getCJDD());
}
if (evidence.getJZR() != null) {
fields.put("jzr", evidence.getJZR());
}
if (evidence.getDW() != null) {
fields.put("dw", evidence.getDW());
}
if (evidence.getBZ() != null) {
fields.put("bz", evidence.getBZ());
}
if (OkingContract.CURRENTUSER != null) {
fields.put("scr", OkingContract.CURRENTUSER.getUserName());
}
fields.put("scsj", mSimpleDateFormat.format(System.currentTimeMillis()));
if (evidence.getZT() != null) {
fields.put("zt", evidence.getZT());
}
if (evidence.getWSID() != null) {
fields.put("wsid", evidence.getWSID());
}
if (evidence.getLXMC() != null) {
fields.put("lxmc", evidence.getLXMC());
}
if (evidence.getZJLYMC() != null) {
fields.put("zjlymc", evidence.getZJLYMC());
}
if (evidence.getYS() != null) {
fields.put("ys", evidence.getYS());
}
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.uploadEvidence(fields)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
if ("success".equals(result)) {
uploadEvidenceFile(evidence);
} else {
RxToast.error(BaseApplication.getApplictaion(), "上传证据失败", Toast.LENGTH_SHORT).show();
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
}
});
}
private void uploadEvidenceFile(final GreenEvidence evidence) {
uploadSound = true;
uploadVideo = true;
uploadVideoCount = 0;
uploadSoundCount = 0;
Observable.create(new ObservableOnSubscribe<Integer>() {
@Override
public void subscribe(ObservableEmitter<Integer> e) throws Exception {
for (int i = 0; i < mVideoGreenMediaList.size(); i++) {
uploadVideo = false;
String videoPath = mVideoGreenMediaList.get(i).getPath();
uploadFile(videoPath, "mp4", evidence);
}
for (int i = 0; i < mVoiceGreenMediaList.size(); i++) {
uploadSound = false;
String voicePath = mVoiceGreenMediaList.get(i).getPath();
uploadFile(voicePath, "m4a", evidence);
}
e.onNext(1);
}
}).subscribe(new Observer<Integer>() {
@Override
public void onSubscribe(Disposable d) {
}
@Override
public void onNext(Integer value) {
if (value == 1) {
checkChangeState(evidence);
}
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
}
});
}
private void uploadFile(String path, final String type, final GreenEvidence evidence) {
Map<String, RequestBody> fileParams = new HashMap<>();
fileParams.put("zjid", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), evidence.getZJID()));
fileParams.put("type", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), type));
fileParams.put("ajid", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), evidence.getAJID()));
fileParams.put("userid", RequestBody.create(MediaType.parse("text/plain;charset=UTF-8"), OkingContract.CURRENTUSER.getUserid()));
File file = new File(Uri.parse(path).getPath());
fileParams.put("files" + "\"; filename=\"" + file.getName(), RequestBody.create(MediaType.parse("video/mp4"), file));
BaseHttpFactory.getInstence().createService(GDWaterService.class,Api.BASE_URL)
.uploadEvidenceFiles(fileParams)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
if ("success".equals(result)) {
switch (type) {
case "mp4":
uploadVideoCount++;
if (uploadVideoCount == mVideoGreenMediaList.size()) {
uploadVideo = true;
checkChangeState(evidence);
}
break;
case "m4a":
uploadSoundCount++;
if (uploadSoundCount == mVoiceGreenMediaList.size()) {
uploadSound = true;
checkChangeState(evidence);
}
break;
default:
break;
}
} else {
mRxDialogLoading.cancel();
RxToast.error(BaseApplication.getApplictaion(), "上传证据附件失败!", Toast.LENGTH_SHORT).show();
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mRxDialogLoading.cancel();
RxToast.error(BaseApplication.getApplictaion(), "上传附件失败!2"+throwable.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
public void notifyDataSetChanged() {
if (mCaseAudioVideoEvidenceListRecyAdapter != null) {
mCaseAudioVideoEvidenceListRecyAdapter.notifyDataSetChanged();
}
}
private void loadEvidence() {
evidences.clear();
if (mycase != null) {
for (int i = 0; i < mycase.getGreenEvidence().size(); i++) {
if ("YYSP".equals(mycase.getGreenEvidence().get(i).getOtype()) || "SP".equals(mycase.getGreenEvidence().get(i).getOtype())
|| "YY".equals(mycase.getGreenEvidence().get(i).getOtype())) {
evidences.add(mycase.getGreenEvidence().get(i));
}
}
mCaseAudioVideoEvidenceListRecyAdapter.setNewData(evidences);
}
}
private void checkChangeState(GreenEvidence evidence) {
if (uploadSound && uploadVideo ) {
mRxDialogLoading.cancel();
evidence.setIsUpload(true);
GreenDAOManager.getInstence().getDaoSession().getGreenEvidenceDao().update(evidence);
mCaseAudioVideoEvidenceListRecyAdapter.notifyDataSetChanged();
GreenDAOManager.getInstence().getDaoSession().getGreenCaseDao().update(mycase);
final RxDialogSure rxDialogSure = new RxDialogSure(getActivity());
rxDialogSure.setTitle("提示");
rxDialogSure.setContent("上传成功!");
rxDialogSure.show();
rxDialogSure.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
rxDialogSure.cancel();
}
});
}
}
public void setGreenCase(GreenCase greenCase){
this.mycase = greenCase;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.design.widget.TextInputEditText;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLogDao;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.mvp.contract.LoadBasicLogContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.LoadBasicLogPresenter;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class ApprovalTheLogFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.type_nature)
TextInputEditText mTypeNature;
@BindView(R.id.tv_tasktype)
TextInputEditText mTvTasktype;
@BindView(R.id.plan_spinner)
TextInputEditText mPlanSpinner;
@BindView(R.id.item_spinner)
TextInputEditText mItemSpinner;
@BindView(R.id.equipment_textView)
TextInputEditText mEquipmentTextView;
@BindView(R.id.tv_patrol)
TextInputEditText mTvPatrol;
@BindView(R.id.tv_result)
TextInputEditText mTvResult;
@BindView(R.id.tv_law_enforcement)
TextInputEditText mTvLawEnforcement;
Unbinder unbinder;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private View mInflate;
private GreenMissionLog mGreenMissionLog;
private RxDialogLoading mRxDialogLoading;
private LoadBasicLogPresenter mLoadBasicLogPresenter;
private GreenMissionTask mGreenMissionTask;
public ApprovalTheLogFragment() {
// Required empty public constructor
}
public static ApprovalTheLogFragment newInstance(String param1, String param2) {
ApprovalTheLogFragment fragment = new ApprovalTheLogFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_approval_the_log, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
return mInflate;
}
private void initData() {
if (mGreenMissionLog == null) {
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(getActivity(), false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
dialog.cancel();
}
});
}
mRxDialogLoading.setLoadingText("正在获取数据中请稍候...");
mRxDialogLoading.show();
//{"msg":"查询成功!","datas":[{"OTHER_PART":"交通,城管","EQUIPMENT":"交通工具:001003,001001,001001 ","PLAN":"0","TYPE":"0"}],"status":"1"}
//
if (mLoadBasicLogPresenter == null) {
mLoadBasicLogPresenter = new LoadBasicLogPresenter(new LoadBasicLogContract.View() {
@Override
public void getBasicLogSucc(String result) {
mRxDialogLoading.cancel();
//{"msg":"查询成功!","datas":[{"OTHER_PART":"交通,城管","EQUIPMENT":"交通工具:001003,001001,001001 ","PLAN":"0","TYPE":"0"}],"status":"1"}
try {
JSONObject jsonObject = new JSONObject(result);
String status = jsonObject.getString("status");
if (status.equals("1")) {
JSONArray datas = jsonObject.getJSONArray("datas");
mGreenMissionLog = new GreenMissionLog();
if (datas.length()>0){
JSONObject object = datas.getJSONObject(0);
mGreenMissionLog.setEquipment(object.getString("EQUIPMENT"));
mGreenMissionLog.setServer_id(object.getString("LOG_ID"));
mGreenMissionLog.setTask_id(mGreenMissionTask.getTaskid());
mGreenMissionLog.setOther_part(object.getString("OTHER_PART"));
mGreenMissionLog.setPlan(Integer.parseInt(object.getString("PLAN")));
mGreenMissionLog.setPatrol(object.getString("PATROL"));
mGreenMissionLog.setDzyj(object.getString("DZYJ"));
Log.i("Oking5",">>>>>>>>>>>"+mGreenMissionLog.toString());
GreenDAOManager.getInstence().getDaoSession().getGreenMissionLogDao().insert(mGreenMissionLog);
setTaskLogData();
}
}
} catch (JSONException e) {
e.printStackTrace();
}
//
}
@Override
public void getBasicLogFail(Throwable ex) {
mRxDialogLoading.cancel();
Log.i("Oking5","获取日志失败"+ex.toString());
RxToast.error("获取日志失败");
}
});
}
mLoadBasicLogPresenter.getBasicLog(mGreenMissionTask.getTaskid());
} else {
setTaskLogData();
}
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
public void setMissionLog(GreenMissionLog greenMissionLog) {
mGreenMissionLog = greenMissionLog;
}
private void setTaskLogData() {
mTypeNature.setText(mGreenMissionTask.getTypename());
if ("0".equals(mGreenMissionTask.getTypeoftask())) {
mTvTasktype.setText("河道管理");
} else if ("1".equals(mGreenMissionTask.getTypeoftask())) {
mTvTasktype.setText("河道采砂");
} else if ("2".equals(mGreenMissionTask.getTypeoftask())) {
mTvTasktype.setText("水资源管理");
} else if ("3".equals(mGreenMissionTask.getTypeoftask())) {
mTvTasktype.setText("水土保持管理");
} else if ("4".equals(mGreenMissionTask.getTypeoftask())) {
mTvTasktype.setText("水利工程管理");
}
if (0 == mGreenMissionLog.getPlan()) {
mPlanSpinner.setText("月计划");
} else if (1 == mGreenMissionLog.getPlan()) {
mPlanSpinner.setText("季度计划");
} else if (2 == mGreenMissionLog.getPlan()) {
mPlanSpinner.setText("年计划");
}
if (0 == mGreenMissionLog.getItem()) {
mItemSpinner.setText("河道管理执法巡查");
} else if (1 == mGreenMissionLog.getItem()) {
mItemSpinner.setText("河道采砂管理执法巡查");
} else if (2 == mGreenMissionLog.getItem()) {
mItemSpinner.setText("水资源管理执法巡查");
} else if (3 == mGreenMissionLog.getItem()) {
mItemSpinner.setText("水土保持管理执法巡查");
} else if (4 == mGreenMissionLog.getItem()) {
mItemSpinner.setText("水利工程管理执法巡查");
} else if (5 == mGreenMissionLog.getItem()) {
mItemSpinner.setText("举报案件核查巡查");
} else if (6 == mGreenMissionLog.getItem()) {
mItemSpinner.setText("其他");
}
String part = mGreenMissionLog.getOther_part();
mTvLawEnforcement.setText(part);
mEquipmentTextView.setText(mGreenMissionLog.getEquipment());
mTvPatrol.setText(mGreenMissionLog.getPatrol());
mTvResult.setText(mGreenMissionLog.getDzyj());
}
public void setMissionTask(GreenMissionTask greenMissionTask) {
mGreenMissionTask = greenMissionTask;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import android.os.Environment;
import android.util.Log;
import com.google.gson.Gson;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.baselib.utils.FileUtil;
import com.zhang.baselib.utils.Util;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.Point;
import com.zhang.okinglawenforcementphone.beans.RecordLogOV;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadJobLogContract;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import io.reactivex.Observable;
import io.reactivex.ObservableSource;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/4/26/026.
*/
public class UploadJobLogModel implements UploadJobLogContract.Model {
private UploadJobLogContract.Presenter mPresenter;
private int mDatePoor;
private long mBeforTime;
private String mLocJson;
private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
public UploadJobLogModel(UploadJobLogContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void uploadJobLog(final RecordLogOV recordLogOV, final Gson mGson) {
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.getHttpMissionLog("-1", recordLogOV.getGreenMissionTask().getTaskid())
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.observeOn(Schedulers.io())
.concatMap(new Function<ResponseBody, ObservableSource<ResponseBody>>() {
@Override
public Observable<ResponseBody> apply(ResponseBody responseBody) throws Exception {
//提交文本资料
String result = responseBody.string();
Log.i("Oking", ">>>>>>>>>>>>>>>>>1" + result);
Log.i("Oking5", ">>>>>>>>>>>>>>>>>1" + recordLogOV.toString());
JSONObject jsonObject = new JSONObject(result);
int count = jsonObject.getInt("total");
HashMap<String, Object> stringStringHashMap = new HashMap<>();
if (count > 0) {
JSONArray rows = jsonObject.getJSONArray("rows");
String oldId = rows.getJSONObject(0).getString("id");
stringStringHashMap.put("mode", 1);
stringStringHashMap.put("id", oldId);
recordLogOV.getGreenMissionLog().setServer_id(oldId);
} else {
stringStringHashMap.put("mode", 0);
}
stringStringHashMap.put("task_id", recordLogOV.getGreenMissionTask().getTaskid());
stringStringHashMap.put("name", OkingContract.CURRENTUSER.getUserid());
stringStringHashMap.put("time", recordLogOV.getTime());
stringStringHashMap.put("plan", recordLogOV.getSelePlanPos());
stringStringHashMap.put("item", recordLogOV.getSeleMattersPos());
stringStringHashMap.put("type", recordLogOV.getGreenMissionTask().getTask_type());
stringStringHashMap.put("area", recordLogOV.getArea());
boolean swisopen = recordLogOV.isSwisopen();
if (swisopen) {
stringStringHashMap.put("whetherComplete", "0");
} else {
stringStringHashMap.put("whetherComplete", "1");
}
stringStringHashMap.put("patrol", recordLogOV.getSummary());
stringStringHashMap.put("dzyj", recordLogOV.getLeaderSummary());
stringStringHashMap.put("status", 0);
stringStringHashMap.put("other_part", recordLogOV.getParts()==null?"":recordLogOV.getParts());
stringStringHashMap.put("examine_status", 0);
stringStringHashMap.put("equipment", recordLogOV.getEquipment()==null?"":recordLogOV.getEquipment());
//获取轨迹
getLocationTrajectory(recordLogOV, mGson);
stringStringHashMap.put("route", mLocJson);
stringStringHashMap.put("tbr", OkingContract.CURRENTUSER.getUserName());
stringStringHashMap.put("tbrid", OkingContract.CURRENTUSER.getUserid());
return BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL).uploadJobLogForText(stringStringHashMap);
}
})
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
Log.i("Oking", "位置数据、文本数据上传成功>>>>>>>>>>>>>>>>>2" + result);
mPresenter.uploadJobLogSucc(result);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Log.i("Oking", "异常>>>>>>>>>>>>>>>>>3" + throwable.getLocalizedMessage() + throwable.toString());
mPresenter.uploadJobLogFail(throwable);
}
});
}
private void getLocationTrajectory(RecordLogOV recordLogOV, Gson mGson) {
Long beginTime = recordLogOV.getGreenMissionTask().getExecute_start_time();
if (beginTime == null) {
beginTime = System.currentTimeMillis() - 1000 * 60 * 20;
}
Long endTime = recordLogOV.getGreenMissionTask().getExecute_end_time();
if (endTime == null) {
endTime = System.currentTimeMillis();
recordLogOV.getGreenMissionTask().setExecute_end_time(endTime);
}
mBeforTime = beginTime - 24 * 60 * 60 * 1000;
final String file1 = sdf.format(beginTime);
final ArrayList<Point> locationPath = new ArrayList<>();
mDatePoor = Util.getDatePoor(beginTime, endTime);
if (mDatePoor < 1) { //表示在同一天
// Log.i("Oking","是同一天");
List<String> locationPos = FileUtil.readFile2List(Environment.getExternalStorageDirectory() + "/oking/location/" + file1 + ".txt", "UTF-8");
if (locationPos != null) {
for (String s : locationPos) {
String[] items = s.split(",");
if (items.length != 3) {
continue;
}
String mLatitude = items[0];
String mLongitude = items[1];
String mDatetime = items[2];
if (Long.parseLong(mDatetime) > beginTime && Long.parseLong(mDatetime) < endTime) {
Point location = new Point();
location.setLatitude(Double.valueOf(mLatitude));
location.setLongitude(Double.valueOf(mLongitude));
location.setDatetime(Long.valueOf(mDatetime));
locationPath.add(location);
}
}
}
} else {
for (int i = 0; i <= mDatePoor; i++) {
File file = new File(Environment.getExternalStorageDirectory() + "/oking/location/" + getAfterData(mBeforTime) + ".txt");
if (file.exists()) {
// Log.i("Oking","不是同一天"+file.getName());
List<String> locationPos = FileUtil.readFile2List(file, "UTF-8");
for (String s : locationPos) {
String[] items = s.split(",");
if (items.length != 3) {
continue;
}
String Latitude = items[0];
String Longitude = items[1];
String datetime = items[2];
if (Long.parseLong(datetime) > beginTime && Long.parseLong(datetime) < endTime) {
Point location = new Point();
location.setLatitude(Double.valueOf(Latitude));
location.setLongitude(Double.valueOf(Longitude));
location.setDatetime(Long.valueOf(datetime));
locationPath.add(location);
}
}
}
}
}
//筛选一下,不然点集太多
if (locationPath.size() > 100) {
ArrayList<Point> newLocationPath = new ArrayList<>();
for (int i = 0; i < locationPath.size(); i = i + 2) {
newLocationPath.add(locationPath.get(i));
}
mLocJson = mGson.toJson(newLocationPath);
} else {
mLocJson = mGson.toJson(locationPath);
}
}
private String getAfterData(long time) {
//如果需要向后计算日期 -改为+
Date newDate = new Date(time + 24 * 60 * 60 * 1000);
mBeforTime = newDate.getTime();
String dateOk = sdf.format(newDate);
return dateOk;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.MapTaskInfo;
import java.util.List;
/**
* Created by Administrator on 2018/5/21/021.
*/
public class MapTaskRecyAdapter extends BaseQuickAdapter<MapTaskInfo, BaseViewHolder> {
public MapTaskRecyAdapter(int layoutResId, @Nullable List<MapTaskInfo> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, MapTaskInfo item) {
helper.setText(R.id.tv_title,item.getTaskName());
}
}
<file_sep>package com.zhang.baselib.http.interceptor;
import java.io.IOException;
import okhttp3.Interceptor;
import okhttp3.Response;
/**
* Created by Administrator on 2018/8/23/023.
*/
public class AddHeadInterceptor implements Interceptor {
@Override
public Response intercept(Chain chain) throws IOException {
return null;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.contract.LoadUsersByDeptIdContract;
import io.reactivex.functions.Consumer;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/6/8/008.
*/
public class LoadUsersByDeptIdModel implements LoadUsersByDeptIdContract.Model {
private LoadUsersByDeptIdContract.Presenter mPresenter;
public LoadUsersByDeptIdModel(LoadUsersByDeptIdContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void getUsersByDeptId(String deptId) {
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.getUsersByDeptId(deptId)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
mPresenter.getUsersByDeptIdSucc(result);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.getUsersByDeptIdFail(throwable);
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Process;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import com.zhang.baselib.http.BaseHttpFactory;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.ActivityUtil;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.http.service.GDWaterService;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import java.util.HashMap;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
import io.reactivex.functions.Consumer;
import okhttp3.ResponseBody;
public class ChangePasswordActivity extends BaseActivity {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.et_oldpwd)
EditText mEtOldpwd;
@BindView(R.id.et_newpwd)
EditText mEtNewpwd;
@BindView(R.id.et_confirm_pwd)
EditText mEtConfirmPwd;
@BindView(R.id.bt_submit)
TextView mBtSubmit;
private Unbinder mBind;
private RxDialogLoading mRxDialogLoading;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_change_password);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
}
private void initData() {
}
@OnClick(R.id.bt_submit)
public void onViewClicked() {
String oldPwd = mEtOldpwd.getText().toString().trim();
String newPwd = mEtNewpwd.getText().toString().trim();
String confirmPwd = mEtConfirmPwd.getText().toString().trim();
if (!TextUtils.isEmpty(oldPwd) && !TextUtils.isEmpty(newPwd) && !TextUtils.isEmpty(confirmPwd)) {
if (newPwd.equals(confirmPwd)) {
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(ChangePasswordActivity.this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
dialog.cancel();
}
});
mRxDialogLoading.setLoadingText("正在提交数据中请稍后...");
}
mRxDialogLoading.show();
Map<String, Object> params = new HashMap<>();
params.put("userid", OkingContract.CURRENTUSER.getUserid());
params.put("account", OkingContract.CURRENTUSER.getAcount());
params.put("oldPassword", <PASSWORD>);
params.put("password", <PASSWORD>);
BaseHttpFactory.getInstence().createService(GDWaterService.class, Api.BASE_URL)
.uploadUserInfo(params)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
String result = responseBody.string();
mRxDialogLoading.cancel();
if ("1".equals(result)) {
RxToast.success("修改密码成功!");
mBtSubmit.setEnabled(false);
OkingContract.CURRENTUSER = null;
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
ActivityUtil.finishAllActivity();
Intent intent = getBaseContext().getPackageManager()
.getLaunchIntentForPackage(getBaseContext().getPackageName());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
Process.killProcess(Process.myPid());
}
}, 500);
} else if ("0".equals(result)) {
RxToast.error("修改密码失败!");
} else {
RxToast.error("原密码错误!");
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Log.i("Oking", "修改失败" + throwable.toString());
mRxDialogLoading.cancel();
RxToast.error("网络错误!");
}
});
} else {
RxToast.error("两次输入的密码不一致");
}
}
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
/**
* Created by Administrator on 2018/4/20.
*/
public interface AddMemberContract {
interface Model {
void addMember(String userid, String mtaskId, String userids);
}
interface View {
void addMemberSucc(String result);
void addMemberFail(Throwable ex);
}
interface Presenter {
void addMember(String userid, String mtaskId, String userids);
void addMemberSucc(String result);
void addMemberFail(Throwable ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
import com.google.gson.Gson;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.beans.GreenMissionLog;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import okhttp3.RequestBody;
/**
* Created by Administrator on 2018/4/26/026.
*/
public interface UploadVideoContract {
interface Model {
void uploadVideo(GreenMissionLog greenMissionLog, List<GreenMedia> greenMedias, Map<String, RequestBody> photoParams, SimpleDateFormat videosdf, Gson gson);
}
interface View {
void loadVideoSucc(String result);
void uploadRetry(Throwable ex);
void loadVideoFail(Throwable ex);
void uploadIsCount(int pos);
}
interface Presenter {
void uploadVideo(GreenMissionLog greenMissionLog, List<GreenMedia> greenMedias, Map<String, RequestBody> photoParams, SimpleDateFormat videosdf, Gson gson);
void loadVideoSucc(String result);
void uploadRetry(Throwable ex);
void loadVideoFail(Throwable ex);
void uploadIsCount(int pos);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.UserRecyAdaper;
import com.zhang.okinglawenforcementphone.beans.UserItemOV;
import com.zhang.okinglawenforcementphone.mvp.ui.activitys.FromAllLawEnforcementSpecificationActivity;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
/**
* 执法规范
*/
public class LawEnforcementSpecificationFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
@BindView(R.id.recy_list)
RecyclerView mRecyList;
Unbinder unbinder;
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private AdministrativeEnforcementFragment mAdministrativeEnforcementFragment;
private View mInflate;
private EnforcementLanguageSpecificationFragment mEnforcementLanguageSpecificationFragment;
private EnforcementInspectionNormsFragment mEnforcementInspectionNormsFragment;
private UserItemOV mUserItemOV;
private UserRecyAdaper mLawEnforcementMenuRecyAdaper;
private FromAllLawEnforcementSpecificationActivity mFromAllLawEnforcementSpecificationActivity;
public LawEnforcementSpecificationFragment() {
// Required empty public constructor
}
public static LawEnforcementSpecificationFragment newInstance(String param1, String param2) {
LawEnforcementSpecificationFragment fragment = new LawEnforcementSpecificationFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
if (mInflate == null) {
mInflate = inflater.inflate(R.layout.fragment_law_enforcement_specification, container, false);
}
unbinder = ButterKnife.bind(this, mInflate);
initData();
setListener();
return mInflate;
}
private void setListener() {
mLawEnforcementMenuRecyAdaper.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
FragmentTransaction fragmentTransaction = getFragmentManager().beginTransaction();
fragmentTransaction.hide(LawEnforcementSpecificationFragment.this);
switch (position) {
case 0:
mFromAllLawEnforcementSpecificationActivity.setTitleText("水行政执法检查行为规范");
if (mEnforcementLanguageSpecificationFragment != null) {
fragmentTransaction.hide(mEnforcementLanguageSpecificationFragment);
}
if (mAdministrativeEnforcementFragment != null) {
fragmentTransaction.hide(mAdministrativeEnforcementFragment);
}
if (mEnforcementInspectionNormsFragment == null) {
mEnforcementInspectionNormsFragment = EnforcementInspectionNormsFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_administrative_content, mEnforcementInspectionNormsFragment, "EnforcementInspectionNormsFragment").commit();
} else {
if (mEnforcementInspectionNormsFragment.isAdded()) {
fragmentTransaction.show(mEnforcementInspectionNormsFragment).commit();
}
}
break;
case 1:
mFromAllLawEnforcementSpecificationActivity.setTitleText("水行政执法用语规范");
if (mEnforcementInspectionNormsFragment != null) {
fragmentTransaction.hide(mEnforcementInspectionNormsFragment);
}
if (mAdministrativeEnforcementFragment != null) {
fragmentTransaction.hide(mAdministrativeEnforcementFragment);
}
if (mEnforcementLanguageSpecificationFragment == null) {
mEnforcementLanguageSpecificationFragment = EnforcementLanguageSpecificationFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_administrative_content, mEnforcementLanguageSpecificationFragment, "EnforcementLanguageSpecificationFragment").commit();
} else {
if (mEnforcementLanguageSpecificationFragment.isAdded()) {
fragmentTransaction.show(mEnforcementLanguageSpecificationFragment).commit();
}
}
break;
case 2:
mFromAllLawEnforcementSpecificationActivity.setTitleText("水行政执法禁令");
if (mEnforcementInspectionNormsFragment != null) {
fragmentTransaction.hide(mEnforcementInspectionNormsFragment);
}
if (mEnforcementLanguageSpecificationFragment != null) {
fragmentTransaction.hide(mEnforcementLanguageSpecificationFragment);
}
if (mAdministrativeEnforcementFragment == null) {
mAdministrativeEnforcementFragment = AdministrativeEnforcementFragment.newInstance(null, null);
fragmentTransaction.add(R.id.rl_administrative_content, mAdministrativeEnforcementFragment, "AdministrativeEnforcementFragment").commit();
} else {
if (mAdministrativeEnforcementFragment.isAdded()) {
fragmentTransaction.show(mAdministrativeEnforcementFragment).commit();
}
}
break;
default:
break;
}
}
});
}
private void initData() {
mRecyList.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
mRecyList.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 20, getResources().getColor(R.color.activity_bg)));
List<UserItemOV> userItemOVS = new ArrayList<>();
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("水行政执法检查行为规范");
mUserItemOV.setIcon(R.mipmap.falvfaguiku);
userItemOVS.add(mUserItemOV);
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("水行政执法用语规范");
mUserItemOV.setIcon(R.mipmap.zfgf);
userItemOVS.add(mUserItemOV);
mUserItemOV = new UserItemOV();
mUserItemOV.setTitle("水行政执法禁令");
mUserItemOV.setIcon(R.mipmap.zfjl);
userItemOVS.add(mUserItemOV);
mLawEnforcementMenuRecyAdaper = new UserRecyAdaper(R.layout.user_item, userItemOVS);
mLawEnforcementMenuRecyAdaper.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
mRecyList.setAdapter(mLawEnforcementMenuRecyAdaper);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
mFromAllLawEnforcementSpecificationActivity = (FromAllLawEnforcementSpecificationActivity) context;
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.Manifest;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.location.LocationManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.provider.Settings;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Toast;
import com.dd.CircularProgressButton;
import com.jaeger.library.StatusBarUtil;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.DefaultContants;
import com.zhang.baselib.http.progress.ProgressListener;
import com.zhang.baselib.http.progress.ProgressManager;
import com.zhang.baselib.http.progress.body.ProgressInfo;
import com.zhang.baselib.ui.views.RxDialogLoading;
import com.zhang.baselib.ui.views.RxDialogSure;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.baselib.utils.CrashUtil;
import com.zhang.baselib.utils.LocationUtil;
import com.zhang.baselib.utils.NetUtil;
import com.zhang.baselib.utils.PermissionUtil;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.http.Api;
import com.zhang.okinglawenforcementphone.mvp.contract.AppVersionContract;
import com.zhang.okinglawenforcementphone.mvp.contract.LoginContract;
import com.zhang.okinglawenforcementphone.mvp.presenter.AppVersionPresenter;
import com.zhang.okinglawenforcementphone.mvp.presenter.LoginPresenter;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
public class LoginActivity extends BaseActivity {
@BindView(R.id.login_button)
CircularProgressButton loginBtn;
@BindView(R.id.userName_editText)
EditText userNameEditText;
@BindView(R.id.password_editText)
EditText passwordEditText;
@BindView(R.id.save_pwd_button)
CheckBox savePwdCheckBox;
private String mName;
private String mPwd;
private Unbinder mBind;
private String mNewDownloadUrl;
private RxDialogSure mRxDialogSure;
private RxDialogLoading mRxDialogLoading;
private File mApkFile;
private SharedPreferences mSp;
private Handler mHandler = new Handler();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
StatusBarUtil.setTransparent(this);
mBind = ButterKnife.bind(this, this);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPm();
}
initView();
initData();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (!Settings.Secure.isLocationProviderEnabled(getContentResolver(), LocationManager.GPS_PROVIDER)) {
finish();
}
}
private void initView() {
if (!LocationUtil.isGpsEnabled(BaseApplication.getApplictaion())) {
Intent intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS);
startActivityForResult(intent, 0); //此为设置完成后返回到获取界面Intent GPSIntent = new Intent();
}
}
private void initData() {
//初始化化数据存储
GreenDAOManager.getInstence().initGreenDao(this);
CrashUtil.getInstance(BaseApplication.getApplictaion()).init();
// int o = 1/0;
//检测更新
detectionUpdate();
loginBtn.setIndeterminateProgressMode(true);
mSp = BaseApplication.getApplictaion().getSharedPreferences("user_config", Context.MODE_PRIVATE);
if (mSp.getBoolean("savePwd", false)) {
final String spname = mSp.getString("username", "");
final String spwd = mSp.getString("pwd", "");
userNameEditText.setText(spname);
passwordEditText.setText(spwd);
}
savePwdCheckBox.setChecked(mSp.getBoolean("savePwd", false));
}
private void detectionUpdate() {
if (NetUtil.isConnected(BaseApplication.getApplictaion())) {
new AppVersionPresenter(new AppVersionContract.View() {
@Override
public void reqSucc(String result) {
if (mRxDialogSure == null) {
mRxDialogSure = new RxDialogSure(LoginActivity.this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
mRxDialogSure.setTitle("APP需要更新");
mRxDialogSure.setContent("本次更新内容:");
mRxDialogSure.setContent(result);
mRxDialogSure.getTvSure().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRxDialogSure.cancel();
donlowdApk();
}
});
}
mRxDialogSure.show();
}
@Override
public void reqFail(Throwable ex) {
}
}).reqAppVersion();
} else {
RxToast.warning(BaseApplication.getApplictaion(), "网络无连接", Toast.LENGTH_SHORT).show();
}
}
@OnClick(R.id.login_button)
public void onClick(View view) {
mName = userNameEditText.getText().toString().trim();
mPwd = passwordEditText.getText().toString().trim();
if (TextUtils.isEmpty(mName) || TextUtils.isEmpty(mPwd)) {
RxToast.warning(BaseApplication.getApplictaion(), "用户名或密码不能为空", Toast.LENGTH_SHORT).show();
return;
}
userNameEditText.setEnabled(false);
passwordEditText.setEnabled(false);
loginBtn.setProgress(50);
loginBtn.setClickable(false);
new LoginPresenter(new LoginContract.View() {
@Override
public void loginSucc(final String menuGroup) {
SharedPreferences.Editor edit = mSp.edit();
edit.putBoolean("savePwd", true);
edit.putString("username", mName);
edit.putString("pwd", mPwd);
edit.commit();
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
loginBtn.setCompleteText("登录成功");
loginBtn.setProgress(100);
DefaultContants.ISHTTPLOGIN = true;
}
});
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
Intent intent = new Intent(LoginActivity.this, MainActivity.class);
startActivity(intent);
finish();
}
}, 800);
}
@Override
public void loginFail(Throwable e) {
if (e.getMessage().equals("密码错误")) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
userNameEditText.setEnabled(true);
passwordEditText.setEnabled(true);
loginBtn.setErrorText("密码错误,请重新登录");
loginBtn.setProgress(-1);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
loginBtn.setClickable(true);
loginBtn.setProgress(0);
}
}, 1600);
}
});
} else {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
userNameEditText.setEnabled(true);
passwordEditText.setEnabled(true);
loginBtn.setErrorText("登录失败,请重新登录");
loginBtn.setProgress(-1);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
loginBtn.setClickable(true);
loginBtn.setProgress(0);
}
}, 1600);
}
});
}
}
}).login(mName, mPwd, mSp);
}
//下载apk
private void donlowdApk() {
if (mRxDialogLoading == null) {
mRxDialogLoading = new RxDialogLoading(this, false, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
dialogInterface.cancel();
}
});
}
mRxDialogLoading.setLoadingText("下载进度:0%");
mRxDialogLoading.show();
mNewDownloadUrl = ProgressManager.getInstance().addDiffResponseListenerOnSameUrl(Api.BASE_URL + "/gdWater/app/gdWater.apk", getDownloadListener());
final OkHttpClient okHttpClient = ProgressManager.getInstance().with(new OkHttpClient.Builder())
.build();
Schedulers.io().createWorker().schedule(new Runnable() {
@Override
public void run() {
try {
Request request = new Request.Builder()
.url(mNewDownloadUrl)
.build();
Response response = okHttpClient.newCall(request).execute();
InputStream is = response.body().byteStream();
mApkFile = new File(Environment.getExternalStorageDirectory().getPath(), "gdWater.apk");
mApkFile.mkdir();
if (mApkFile.exists()) {
mApkFile.delete();
}
FileOutputStream fos = new FileOutputStream(mApkFile);
BufferedInputStream bis = new BufferedInputStream(is);
byte[] buffer = new byte[1024];
int len;
while ((len = bis.read(buffer)) != -1) {
fos.write(buffer, 0, len);
}
fos.flush();
fos.close();
bis.close();
is.close();
} catch (IOException e) {
e.printStackTrace();
//当外部发生错误时,使用此方法可以通知所有监听器的 onError 方法
ProgressManager.getInstance().notifyOnErorr(Api.BASE_URL + "/gdWater/app/gdWater.apk", e);
}
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
private ProgressInfo mLastDownloadingInfo;
public ProgressListener getDownloadListener() {
return new ProgressListener() {
@Override
public void onProgress(ProgressInfo progressInfo) {
// 如果你不屏蔽用户重复点击上传或下载按钮,就可能存在同一个 Url 地址,上一次的上传或下载操作都还没结束,
// 又开始了新的上传或下载操作,那现在就需要用到 id(请求开始时的时间) 来区分正在执行的进度信息
// 这里我就取最新的下载进度用来展示,顺便展示下 id 的用法
if (mLastDownloadingInfo == null) {
mLastDownloadingInfo = progressInfo;
}
//因为是以请求开始时的时间作为 Id ,所以值越大,说明该请求越新
if (progressInfo.getId() < mLastDownloadingInfo.getId()) {
return;
} else if (progressInfo.getId() > mLastDownloadingInfo.getId()) {
mLastDownloadingInfo = progressInfo;
}
int progress = mLastDownloadingInfo.getPercent();
mRxDialogLoading.setLoadingText("下载进度:" + progress + "%");
if (progressInfo.isFinish()) {
//说明已经下载完成
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
RxToast.success(BaseApplication.getApplictaion(), "下载成功", Toast.LENGTH_SHORT).show();
mRxDialogLoading.cancel();
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setDataAndType(Uri.fromFile(mApkFile),
"application/vnd.android.package-archive");
startActivity(intent);
}
});
}
}
@Override
public void onError(long id, final Exception e) {
AndroidSchedulers.mainThread().createWorker().schedule(new Runnable() {
@Override
public void run() {
RxToast.error(BaseApplication.getApplictaion(), "下载失败,请检查网络和SD卡" + e.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
};
}
private void requestPm() {
PermissionUtil.with(this)
.addPermission(Manifest.permission.READ_PHONE_STATE)
.addPermission(Manifest.permission.READ_EXTERNAL_STORAGE)
.addPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE)
.addPermission(Manifest.permission.WAKE_LOCK)
.addPermission(Manifest.permission.RECEIVE_BOOT_COMPLETED)
.addPermission(Manifest.permission.INTERNET)
.addPermission(Manifest.permission.RECORD_AUDIO)
.addPermission(Manifest.permission.CAMERA)
.addPermission(Manifest.permission.ACCESS_NETWORK_STATE)
.addPermission(Manifest.permission.ACCESS_COARSE_LOCATION)
.addPermission(Manifest.permission.ACCESS_FINE_LOCATION)
.initPermission();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.view.KeyEvent;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.TextView;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.zhang.baselib.BaseApplication;
import com.zhang.baselib.ui.views.RxToast;
import com.zhang.okinglawenforcementphone.GreenDAOManager;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.SerchRecyAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTaskDao;
import com.zhang.okinglawenforcementphone.beans.GreenSearchHistory;
import com.zhang.okinglawenforcementphone.beans.GreenSearchHistoryDao;
import com.zhang.okinglawenforcementphone.beans.OkingContract;
import com.zhang.okinglawenforcementphone.beans.SeachBean;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import com.zhy.view.flowlayout.FlowLayout;
import com.zhy.view.flowlayout.TagAdapter;
import com.zhy.view.flowlayout.TagFlowLayout;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
public class SerchActivity extends BaseActivity {
@BindView(R.id.et_seach)
EditText mEtSeach;
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.rcy_serch)
RecyclerView mRcySerch;
@BindView(R.id.seach_flow_layout)
TagFlowLayout mSeachFlowLayout;
@BindView(R.id.tv_tag)
TextView mTvTag;
@BindView(R.id.tv_clear)
TextView mTvClear;
private Unbinder mBind;
private SerchRecyAdapter mSerchRecyAdapter;
private List<SeachBean> seachBeans = new ArrayList<>();
private String[] mStringArray;
private TagAdapter<GreenSearchHistory> mTagAdapter;
private List<GreenSearchHistory> mGreenSearchHistories;
private Intent mIntent;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_serch);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
mEtSeach.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
@Override
public void afterTextChanged(Editable editable) {
if (editable.length() == 0) {
seachBeans.clear();
mRcySerch.setVisibility(View.GONE);
mTvTag.setText("搜索结果");
mSeachFlowLayout.setVisibility(View.GONE);
}
}
});
mEtSeach.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View view, int keyCode, KeyEvent keyEvent) {
if (keyCode == KeyEvent.KEYCODE_ENTER) {
// 先隐藏键盘
((InputMethodManager) getSystemService(INPUT_METHOD_SERVICE))
.hideSoftInputFromWindow(SerchActivity.this.getCurrentFocus()
.getWindowToken(), InputMethodManager.HIDE_NOT_ALWAYS);
//进行搜索操作的方法,在该方法中可以加入mEditSearchUser的非空判断
String trim = mEtSeach.getText().toString().trim();
if (!TextUtils.isEmpty(trim)) {
search(trim);
}
}
return false;
}
});
mSeachFlowLayout.setOnTagClickListener(new TagFlowLayout.OnTagClickListener() {
@Override
public boolean onTagClick(View view, int position, FlowLayout parent) {
search(mGreenSearchHistories.get(position).getSearchText());
return false;
}
});
}
private void search(String trim) {
GreenSearchHistory unique = GreenDAOManager.getInstence().getDaoSession().getGreenSearchHistoryDao()
.queryBuilder().where(GreenSearchHistoryDao.Properties.UserId.eq(OkingContract.CURRENTUSER.getUserid()), GreenSearchHistoryDao.Properties.SearchText.eq(trim)).unique();
if (unique == null) {
GreenSearchHistory greenSearchHistory = new GreenSearchHistory();
greenSearchHistory.setSearchText(trim);
greenSearchHistory.setUserId(OkingContract.CURRENTUSER.getUserid());
greenSearchHistory.setTime(System.currentTimeMillis());
GreenDAOManager.getInstence().getDaoSession().getGreenSearchHistoryDao().insert(greenSearchHistory);
}
//查询任务
List<GreenMissionTask> greenMissionTasks = GreenDAOManager.getInstence().getDaoSession().getGreenMissionTaskDao()
.queryBuilder().where(GreenMissionTaskDao.Properties.Userid.eq(OkingContract.CURRENTUSER.getUserid()), GreenMissionTaskDao.Properties.Task_name.like("%" + trim + "%"))
.list();
seachBeans.clear();
if (greenMissionTasks.size() > 0) {
for (GreenMissionTask greenMissionTask : greenMissionTasks) {
SeachBean seachBean = new SeachBean();
seachBean.setItemType(0);
seachBean.setPublisherName(greenMissionTask.getPublisher_name());
seachBean.setState(greenMissionTask.getStatus());
seachBean.setId(greenMissionTask.getId());
seachBean.setTaskId(greenMissionTask.getTaskid());
seachBean.setTaskName(greenMissionTask.getTask_name());
seachBeans.add(seachBean);
}
} else {
//去搜索菜单
for (String s : mStringArray) {
if (s.contains(trim)) {
SeachBean seachBean = new SeachBean();
seachBean.setItemType(1);
seachBean.setMenuItme(s);
seachBeans.add(seachBean);
} else {
}
}
if (seachBeans.size() == 0) {
RxToast.warning("没有搜索到结果");
}
}
mRcySerch.setVisibility(View.VISIBLE);
mTvTag.setText("搜索结果");
mSeachFlowLayout.setVisibility(View.GONE);
if (mSerchRecyAdapter == null) {
mSerchRecyAdapter = new SerchRecyAdapter(seachBeans);
mSerchRecyAdapter.openLoadAnimation(BaseQuickAdapter.SLIDEIN_RIGHT);
mRcySerch.setAdapter(mSerchRecyAdapter);
mSerchRecyAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
public void onItemClick(BaseQuickAdapter adapter, View view, int position) {
List<SeachBean> seachBeans = adapter.getData();
SeachBean seachBean = seachBeans.get(position);
switch (seachBean.getItemType()) {
case 0:
switch (seachBean.getState()) {
case "0":
case "1":
case "2":
mIntent = new Intent(SerchActivity.this, ArrangeTeamMembersActivity.class);
mIntent.putExtra("id", seachBean.getId());
mIntent.putExtra("position", position);
startActivity(mIntent);
break;
case "3":
case "4":
mIntent = new Intent(SerchActivity.this, MissionActivity.class);
mIntent.putExtra("id", seachBean.getId());
mIntent.putExtra("position", position);
startActivity(mIntent);
break;
case "100":
case "5":
case "9":
mIntent = new Intent(SerchActivity.this, MissionRecorActivity.class);
mIntent.putExtra("id", seachBean.getId());
mIntent.putExtra("taskId", seachBean.getTaskId());
startActivity(mIntent);
break;
default:
break;
}
break;
case 1:
switch (seachBean.getMenuItme()) {
case "一般任务":
mIntent = new Intent(SerchActivity.this, PatrolsToReleaseActivity.class);
startActivity(mIntent);
break;
case "紧急任务":
mIntent = new Intent(SerchActivity.this, TemporaryEmergencyTaskActivity.class);
startActivity(mIntent);
break;
case "待办":
mIntent = new Intent(SerchActivity.this, ToDoActivity.class);
startActivity(mIntent);
break;
case "安排":
case "巡查任务接收安排":
mIntent = new Intent(SerchActivity.this, TaskMissionProjectActivity.class);
mIntent.putExtra("activity", "ArrangeMissionActivity");
startActivity(mIntent);
break;
case "任务执行":
case "执行":
mIntent = new Intent(SerchActivity.this, TaskMissionProjectActivity.class);
mIntent.putExtra("activity", "TaskExecutionActivity");
startActivity(mIntent);
break;
case "任务上报":
case "上报":
mIntent = new Intent(SerchActivity.this, TaskMissionProjectActivity.class);
mIntent.putExtra("activity", "ReportTaskActivity");
startActivity(mIntent);
break;
case "调查笔录":
case "笔录":
mIntent = new Intent(SerchActivity.this, WrittenRecordActivity.class);
startActivity(mIntent);
break;
case "办案":
mIntent = new Intent(SerchActivity.this, CaseManagerActivity.class);
mIntent.putExtra("position", 0);
startActivity(mIntent);
break;
case "现场勘验":
case "勘验":
mIntent = new Intent(SerchActivity.this, SceneInquestActivity.class);
startActivity(mIntent);
break;
case "日志统计":
case "统计":
mIntent = new Intent(SerchActivity.this, StatisticalActivity.class);
startActivity(mIntent);
break;
case "执法":
mIntent = new Intent(SerchActivity.this, LawEnforcementManagerActivity.class);
startActivity(mIntent);
break;
case "日志(已完成)":
case "日志":
mIntent = new Intent(SerchActivity.this, TaskMissionProjectActivity.class);
mIntent.putExtra("activity", "CompleteListActivity");
startActivity(mIntent);
break;
case "轨迹管理":
case "轨迹":
mIntent = new Intent(SerchActivity.this, TaskMissionProjectActivity.class);
mIntent.putExtra("activity", "TrajectoryListActivity");
startActivity(mIntent);
break;
case "全部":
mIntent = new Intent(SerchActivity.this, AllActivity.class);
startActivity(mIntent);
break;
case "责令停止违法行为通知":
mIntent = new Intent(SerchActivity.this, FromAllLawEnforcementActivity.class);
startActivity(mIntent);
break;
case "水行政当场处罚决定书":
mIntent = new Intent(SerchActivity.this, FromAllPenaltyTheSpotActivity.class);
startActivity(mIntent);
break;
case "法律法规库":
mIntent = new Intent(SerchActivity.this, FromAllLawsAndRegulationsActivity.class);
startActivity(mIntent);
break;
case "执法规范":
mIntent = new Intent(SerchActivity.this, FromAllLawEnforcementSpecificationActivity.class);
startActivity(mIntent);
break;
case "预立案":
mIntent = new Intent(SerchActivity.this, FromAllCaseInAdvanceActivity.class);
startActivity(mIntent);
break;
case "案例库":
mIntent = new Intent(SerchActivity.this, PuttedForwardConActivity.class);
startActivity(mIntent);
break;
case "地图查询":
mIntent = new Intent(SerchActivity.this, MapQueryActivity.class);
startActivity(mIntent);
break;
case "地图任务展示":
mIntent = new Intent(SerchActivity.this, MapTaskActivity.class);
startActivity(mIntent);
break;
case "区域执法记录查询":
mIntent = new Intent(SerchActivity.this, RegionalHistoryEnforcementActivity.class);
startActivity(mIntent);
break;
case "案件登记":
mIntent = new Intent(SerchActivity.this, FromAllCaseRegistrationActivity.class);
startActivity(mIntent);
break;
case "案件受理":
mIntent = new Intent(SerchActivity.this, FromAllOpenCasesActivity.class);
startActivity(mIntent);
break;
case "案件处理":
mIntent = new Intent(SerchActivity.this, FromAllCaseProcessingListActivity.class);
startActivity(mIntent);
break;
case "案件转办":
mIntent = new Intent(SerchActivity.this, FromAllCaseComplaintActivity.class);
startActivity(mIntent);
break;
case "修改密码":
mIntent = new Intent(SerchActivity.this, ChangePasswordActivity.class);
startActivity(mIntent);
break;
case "意见反馈":
mIntent = new Intent(SerchActivity.this, FeedbackActivity.class);
startActivity(mIntent);
break;
case "关于":
mIntent = new Intent(SerchActivity.this, AboutActivity.class);
startActivity(mIntent);
break;
default:
break;
}
break;
default:
break;
}
}
});
} else {
mSerchRecyAdapter.setNewData(seachBeans);
}
}
private void initData() {
mStringArray = getResources().getStringArray(R.array.menus);
mRcySerch.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
mRcySerch.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 20, getResources().getColor(R.color.activity_bg)));
mGreenSearchHistories = GreenDAOManager.getInstence().getDaoSession().getGreenSearchHistoryDao()
.queryBuilder().where(GreenSearchHistoryDao.Properties.UserId.eq(OkingContract.CURRENTUSER.getUserid()))
.list();
if (mGreenSearchHistories.size() > 0) {
mTagAdapter = new TagAdapter<GreenSearchHistory>(mGreenSearchHistories) {
@Override
public View getView(FlowLayout parent, int position, GreenSearchHistory greenSearchHistory) {
View inflate = View.inflate(BaseApplication.getApplictaion(), R.layout.search_history_item, null);
TextView tvTitle = inflate.findViewById(R.id.tv_title);
tvTitle.setText(mGreenSearchHistories.get(position).getSearchText());
return inflate;
}
};
mSeachFlowLayout.setAdapter(mTagAdapter);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
@OnClick(R.id.tv_clear)
public void onViewClicked() {
if (mGreenSearchHistories.size() > 0) {
GreenDAOManager.getInstence().getDaoSession().getGreenSearchHistoryDao().deleteAll();
mGreenSearchHistories.clear();
mTagAdapter.notifyDataChanged();
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.model;
import android.util.Log;
import com.google.gson.Gson;
import com.zhang.baselib.http.schedulers.RxSchedulersHelper;
import com.zhang.okinglawenforcementphone.beans.JPushMessageBean;
import com.zhang.okinglawenforcementphone.jpush.clien.JPushAPI;
import com.zhang.okinglawenforcementphone.jpush.clien.JPushClienService;
import com.zhang.okinglawenforcementphone.jpush.clien.JPushHttpFactory;
import com.zhang.okinglawenforcementphone.mvp.contract.JPushMessageContract;
import io.reactivex.functions.Consumer;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
/**
* Created by Administrator on 2018/8/27/027.
*/
public class JPushMessageModel implements JPushMessageContract.Model {
private JPushMessageContract.Presenter mPresenter;
public JPushMessageModel(JPushMessageContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void pushMessage(JPushMessageBean jPushMessageBean) {
String s = new Gson().toJson(jPushMessageBean);
Log.i("Oking5", s);
RequestBody requestBody = RequestBody.create(MediaType.parse("application/json"), s);
JPushHttpFactory.getInstence().createService(JPushClienService.class, JPushAPI.BASE_URL)
.pushMessage(requestBody)
.compose(RxSchedulersHelper.<ResponseBody>io_main())
.subscribe(new Consumer<ResponseBody>() {
@Override
public void accept(ResponseBody responseBody) throws Exception {
mPresenter.pushMessageSucc("推送成功");
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
mPresenter.pushMessageFail(throwable);
}
});
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.mvp.contract.LoadAcceptNumberContract;
import com.zhang.okinglawenforcementphone.mvp.model.LoadAcceptNumberModel;
/**
* Created by Administrator on 2018/6/9/009.
*/
public class LoadAcceptNumberPresenter implements LoadAcceptNumberContract.Presenter {
private LoadAcceptNumberContract.Model mModel;
private LoadAcceptNumberContract.View mView;
public LoadAcceptNumberPresenter(LoadAcceptNumberContract.View view) {
mView = view;
mModel = new LoadAcceptNumberModel(this);
}
@Override
public void loadAcceptNumber() {
mModel.loadAcceptNumber();
}
@Override
public void loadAcceptNumberSucc(String result) {
mView.loadAcceptNumberSucc(result);
}
@Override
public void loadAcceptNumberFail(Throwable ex) {
mView.loadAcceptNumberFail(ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
import java.util.List;
/**
* Created by Administrator on 2018/6/9/009.
*/
public class SmameLeveOV {
/**
* data : {"msg":"获人员成功!","records":[{"DEPTID":"001007","DEPTNAME":"西江局水政监察支队"},{"DEPTID":"001003","DEPTNAME":"汕头水政监察支队"},{"DEPTID":"001005","DEPTNAME":"广州市水务局执法监察支队"},{"DEPTID":"001001","DEPTNAME":"广东省水利厅水利水政监察局"},{"DEPTID":"001002","DEPTNAME":"中山水政监察支队"},{"DEPTID":"001006","DEPTNAME":"德庆水政监察大队"}]}
* status : 1
*/
private DataBean data;
private String status;
public DataBean getData() {
return data;
}
public void setData(DataBean data) {
this.data = data;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public static class DataBean {
/**
* msg : 获人员成功!
* records : [{"DEPTID":"001007","DEPTNAME":"西江局水政监察支队"},{"DEPTID":"001003","DEPTNAME":"汕头水政监察支队"},{"DEPTID":"001005","DEPTNAME":"广州市水务局执法监察支队"},{"DEPTID":"001001","DEPTNAME":"广东省水利厅水利水政监察局"},{"DEPTID":"001002","DEPTNAME":"中山水政监察支队"},{"DEPTID":"001006","DEPTNAME":"德庆水政监察大队"}]
*/
private String msg;
private List<RecordsBean> records;
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public List<RecordsBean> getRecords() {
return records;
}
public void setRecords(List<RecordsBean> records) {
this.records = records;
}
public static class RecordsBean {
/**
* DEPTID : 001007
* DEPTNAME : 西江局水政监察支队
*/
private String DEPTID;
private String DEPTNAME;
public String getDEPTID() {
return DEPTID;
}
public void setDEPTID(String DEPTID) {
this.DEPTID = DEPTID;
}
public String getDEPTNAME() {
return DEPTNAME;
}
public void setDEPTNAME(String DEPTNAME) {
this.DEPTNAME = DEPTNAME;
}
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.activitys;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.View;
import com.chad.library.adapter.base.entity.MultiItemEntity;
import com.zhang.baselib.BaseApplication;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.adapter.ExpandableItemCaseRegistAdapter;
import com.zhang.okinglawenforcementphone.beans.WrittenItemBean;
import com.zhang.okinglawenforcementphone.beans.WrittenRecordLevel0;
import com.zhang.okinglawenforcementphone.mvp.ui.base.BaseActivity;
import com.zhang.okinglawenforcementphone.views.DividerItemDecoration;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class FromAllCaseRegistrationActivity extends BaseActivity {
@BindView(R.id.case_regist_Recy)
RecyclerView mCaseRegistRecy;
@BindView(R.id.toolbar)
Toolbar mToolbar;
private Unbinder mBind;
private WrittenRecordLevel0 mWrittenRecordLevel0;
private WrittenItemBean mWrittenItemBean;
private ExpandableItemCaseRegistAdapter mExpandableItemCaseRegistAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_from_all_case_registration);
mBind = ButterKnife.bind(this);
initData();
setListener();
}
private void setListener() {
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
finish();
}
});
}
private void initData() {
mCaseRegistRecy.setNestedScrollingEnabled(false);
mCaseRegistRecy.setLayoutManager(new LinearLayoutManager(BaseApplication.getApplictaion(), LinearLayoutManager.VERTICAL, false));
mCaseRegistRecy.addItemDecoration(new DividerItemDecoration(BaseApplication.getApplictaion(), 0, 3, BaseApplication.getApplictaion().getResources().getColor(R.color.bottom_nav_normal)));
if (mExpandableItemCaseRegistAdapter == null) {
mExpandableItemCaseRegistAdapter = new ExpandableItemCaseRegistAdapter(generateData());
mCaseRegistRecy.setAdapter(mExpandableItemCaseRegistAdapter);
}
mExpandableItemCaseRegistAdapter.expand(0);
}
private List<MultiItemEntity> generateData() {
ArrayList<MultiItemEntity> res = new ArrayList<>();
mWrittenRecordLevel0 = new WrittenRecordLevel0("案件来源");
mWrittenItemBean = new WrittenItemBean();
mWrittenItemBean.setItemType(1);
mWrittenRecordLevel0.addSubItem(mWrittenItemBean);
res.add(mWrittenRecordLevel0);
mWrittenRecordLevel0 = new WrittenRecordLevel0("上传证据");
mWrittenItemBean = new WrittenItemBean();
mWrittenItemBean.setItemType(2);
mWrittenRecordLevel0.addSubItem(mWrittenItemBean);
res.add(mWrittenRecordLevel0);
mWrittenRecordLevel0 = new WrittenRecordLevel0("基本信息(可选)");
mWrittenItemBean = new WrittenItemBean();
mWrittenItemBean.setItemType(3);
mWrittenRecordLevel0.addSubItem(mWrittenItemBean);
res.add(mWrittenRecordLevel0);
return res;
}
@Override
protected void onDestroy() {
super.onDestroy();
mBind.unbind();
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import java.util.ArrayList;
/**
* Created by zhao on 2016/10/9.
*/
public abstract class SimpleGridViewAdapter extends BaseAdapter {
private ArrayList<GreenMedia> mGreenMedias;
public SimpleGridViewAdapter(ArrayList<GreenMedia> greenMedias) {
this.mGreenMedias = greenMedias;
}
@Override
public int getCount() {
return mGreenMedias.size() + 1;
}
@Override
public Object getItem(int i) {
return null;
}
@Override
public long getItemId(int i) {
return 0;
}
@Override
public abstract View getView(int i, View view, ViewGroup viewGroup);
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.AllMenuItemBean;
import java.util.List;
/**
* Created by Administrator on 2018/5/16.
*/
public class AllMenuSubRecyAdapter extends BaseQuickAdapter<String, BaseViewHolder> {
public AllMenuSubRecyAdapter(int layoutResId, @Nullable List<String> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, String item) {
helper.setText(R.id.tv_sub, item);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenMissionTask;
import java.util.List;
/**
* Created by Administrator on 2018/5/18/018.
*/
public class NoCompleteTaskListRecyAdapter extends BaseQuickAdapter<GreenMissionTask, BaseViewHolder> {
public NoCompleteTaskListRecyAdapter(int layoutResId, @Nullable List<GreenMissionTask> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, GreenMissionTask item) {
helper.setText(R.id.tv_taskname, "任务名称:" + item.getTask_name());
helper.setText(R.id.tv_taskid, "任务编号:" + item.getTaskid());
helper.setText(R.id.tv_fbr, "发布人:" + item.getPublisher_name());
if (item.getExamine_status() == -1) {
if ("0".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:未发布");
} else if ("1".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:已发布待审核");
} else if ("2".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:审核通过");
} else if ("3".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:已分配队员待执行");
} else if ("4".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:任务开始");
} else if ("5".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:任务完成");
} else if ("7".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:审核不通过");
} else if ("100".equals(item.getStatus())) {
helper.setText(R.id.tv_state, "任务状态:巡查结束,待上传");
}
} else {
helper.setText(R.id.tv_state, "任务状态:等待领导批示");
}
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.contract;
import java.util.Map;
import retrofit2.http.Query;
/**
* Created by Administrator on 2018/6/7/007.
*/
public interface TaskBackContract {
interface Model {
void taskBack(Map<String, Object> params);
}
interface View {
void taskBackSucc(String result);
void taskBackFail(Throwable ex);
}
interface Presenter {
void taskBack(Map<String, Object> params);
void taskBackSucc(String result);
void taskBackFail(Throwable ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.presenter;
import com.zhang.okinglawenforcementphone.beans.GreenEvidence;
import com.zhang.okinglawenforcementphone.beans.GreenEvidenceMedia;
import com.zhang.okinglawenforcementphone.beans.GreenMedia;
import com.zhang.okinglawenforcementphone.mvp.contract.UploadEvidenceContract;
import com.zhang.okinglawenforcementphone.mvp.model.UploadEvidenceModel;
import java.util.List;
import java.util.Map;
/**
* Created by Administrator on 2018/5/8/008.
*/
public class UploadEvidencePresenter implements UploadEvidenceContract.Presenter {
private UploadEvidenceContract.Model mModel;
private UploadEvidenceContract.View mView;
public UploadEvidencePresenter(UploadEvidenceContract.View view) {
mView = view;
mModel = new UploadEvidenceModel(this);
}
@Override
public void uploadEvidence(Map<String, Object> fields, GreenEvidence evidence, List<GreenEvidenceMedia> picGreenMedias) {
mModel.uploadEvidence(fields,evidence,picGreenMedias);
}
@Override
public void uploadEvidenceSucc(String result) {
mView.uploadEvidenceSucc(result);
}
@Override
public void uploadEvidenceFail(Throwable ex) {
mView.uploadEvidenceFail(ex);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.adapter;
import android.support.annotation.Nullable;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.BaseViewHolder;
import com.zhang.okinglawenforcementphone.R;
import com.zhang.okinglawenforcementphone.beans.GreenEvidence;
import java.util.List;
/**
* Created by Administrator on 2018/2/24.
*/
public class CaseAudioVideoEvidenceListRecyAdapter extends BaseQuickAdapter<GreenEvidence, BaseViewHolder> {
public CaseAudioVideoEvidenceListRecyAdapter(int layoutResId, @Nullable List<GreenEvidence> data) {
super(layoutResId, data);
}
@Override
protected void convert(BaseViewHolder helper, GreenEvidence item) {
helper.setText(R.id.zjmc_tv, item.getZJMC());
helper.setText(R.id.cjdd_tv, item.getCJDD());
helper.setText(R.id.zjnr_tv, item.getZJNR());
if (item.getOtype().equals("YYSP")) {
helper.setText(R.id.lx_tv, "语音、视频");
} else if (item.getOtype().equals("SP")) {
helper.setText(R.id.lx_tv, "视频");
} else if (item.getOtype().equals("YY")) {
helper.setText(R.id.lx_tv, "语音");
}
helper.addOnClickListener(R.id.upload_button)
.addOnClickListener(R.id.delete_button)
.addOnClickListener(R.id.edit_button);
if (item.getIsUpload()) {
helper.setVisible(R.id.upload_button, false);
helper.setVisible(R.id.delete_button, false);
helper.setText(R.id.edit_button, "查看");
}
}
}
<file_sep>package com.zhang.baselib.utils;
import android.content.Context;
import android.content.Intent;
/**
* Created by Administrator on 2018/4/29/029.
*/
public class IntentUtil {
/**
* 获取打开App的意图
*
* @param context 上下文
* @param packageName 包名
* @return 意图
*/
public static Intent getLaunchAppIntent(Context context, String packageName) {
return getIntentByPackageName(context, packageName);
}
/**
* 根据包名获取意图
*
* @param context 上下文
* @param packageName 包名
* @return 意图
*/
private static Intent getIntentByPackageName(Context context, String packageName) {
return context.getPackageManager().getLaunchIntentForPackage(packageName);
}
}
<file_sep>package com.zhang.okinglawenforcementphone.beans;
/**
* Created by Administrator on 2018/5/22/022.
*/
public class NewsTaskOV {
public int mType;
public String taskid;
public GreenMissionTask mGreenMissionTask;
public NewsTaskOV(int type, String taskid, GreenMissionTask greenMissionTask) {
mType = type;
this.taskid = taskid;
mGreenMissionTask = greenMissionTask;
}
}
<file_sep>package com.zhang.okinglawenforcementphone.mvp.ui.fragments;
import android.graphics.Color;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.zhang.okinglawenforcementphone.R;
/**
* 水行政执法禁令
*/
public class AdministrativeEnforcementFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private ListView mLv;
private View mInflate;
public AdministrativeEnforcementFragment() {
// Required empty public constructor
}
public static AdministrativeEnforcementFragment newInstance(String param1, String param2) {
AdministrativeEnforcementFragment fragment = new AdministrativeEnforcementFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
if (mInflate==null){
mInflate = inflater.inflate(R.layout.fragment_administrative_enforcement, container, false);
}
initView(mInflate);
return mInflate;
}
public void initView(View rootView) {
mLv = rootView.findViewById(R.id.lv);
initData();
setListener();
}
private void setListener() {
}
private void initData() {
final String[] administrationBanArray = getResources().getStringArray(R.array.lv_administration_ban);
mLv.setAdapter(new BaseAdapter() {
@Override
public int getCount() {
return administrationBanArray.length;
}
@Override
public Object getItem(int i) {
return null;
}
@Override
public long getItemId(int i) {
return 0;
}
@Override
public View getView(int position, View contentView, ViewGroup viewGroup) {
TextView textView = new TextView(getActivity());
textView.setPadding(10,10,10,10);
textView.setTextSize(12);
textView.setTextColor(Color.RED);
textView.setText(administrationBanArray[position]);
return textView;
}
});
}
@Override
public void onDestroyView() {
super.onDestroyView();
if (mInflate != null) {
ViewGroup parent = (ViewGroup) mInflate.getParent();
if (parent != null) {
parent.removeView(mInflate);
}
}
}
}
| c222fffdb90b3d4d042aec7b9303ed1dc90255a6 | [
"Markdown",
"Java",
"INI",
"Gradle"
] | 112 | Java | zhangyong5566/OkingLawEnforcementPhone | b7a9f32656472a6c29c1467460d947b0d14a82f4 | 2caed80730b5fe8b139f531cb389326bb2cab421 | |
refs/heads/master | <repo_name>hiucimon/DCOS_Ansible<file_sep>/vagrant/Vagrantfile
Vagrant.configure("2") do |config|
config.vm.box = "bento/ubuntu-17.10"
config.vm.box_version = "201710.25.0"
end
<file_sep>/README.md
# DCOS_Ansible
Create a DCOS cluster with Ansible
| 90cfcdfee29a2e449ab7db2b2e0e4e1fa54248cf | [
"Markdown",
"Ruby"
] | 2 | Ruby | hiucimon/DCOS_Ansible | 2b1379cd88d2205f8ec3271d171344f7367c3c68 | 155c6b990ec69edfe4122f3ecb7dc1207a2b79cf | |
refs/heads/master | <repo_name>sillasHead/SENAC_TADS_Projeto_Integrador_1<file_sep>/ProjetoIntegrador1/src/ADO05/Questao01.java
package ADO05;
import java.util.*;
public class Questao01 {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String nome, ls = System.getProperty("line.separator");
int inst, contador = 0, jogada;
Random r = new Random();
boolean jogadorVenceu = false;
do {
System.out.print("Escolha uma das seguintes opções:"
+ ls
+ ls + "1 - Instruções"
+ ls + "2 - Jogar"
+ ls + "3 - Créditos"
+ ls + "4 - Sair"
+ ls
+ ls + "Opção: ");
inst = in.nextInt();
switch (inst) {
case 1:
System.out.println(ls + "________________________INSTRUÇÕES________________________"
+ ls
+ ls + "Chegue até o 20!"
+ ls
+ ls + "O objetivo do jogo é chegar até o número 20. Primeiramente "
+ ls + "um jogador é escolhido randomicamente, após escolhe entre "
+ ls + "1 e 2 para começar. O seu adversário deve somar 1 ou 2 ao "
+ ls + "número que foi escolhido anteriormente, e assim sucessiva-"
+ ls + "mente até um dos jogadores chegar ao número 20."
+ ls + "__________________________________________________________" + ls);
break;
case 2:
System.out.print(ls + "___________________________PLAY___________________________"
+ ls
+ ls + "Digite o seu nome: ");
nome = in.next();
if(r.nextInt(200)%2 == 0){
System.out.println(ls + "\"" + nome + "\" foi escolhido para iniciar!");
do {
System.out.print("Comece com 1 ou 2"
+ ls
+ ls + nome + ": ");
jogada = in.nextInt();
if(jogada > 2 || jogada < 1)
System.out.println(ls + "jogada inválida!");
} while(jogada > 2 || jogada < 1);
contador = jogada;
System.out.println("Contador: " + contador+ ls);
} else {
System.out.println(ls + "\"cpu\" foi escolhido para iniciar!" + ls);
}
while(contador < 20) {
if(contador%3 == 0) {
contador += 2;
System.out.println("cpu: 2");
} else {
contador += 1;
System.out.println("cpu: 1");
}
System.out.println("Contador: " + contador + ls);
if(contador != 20) {
System.out.print(nome + ": ");
jogada = in.nextInt();
while(jogada > 2 || jogada < 1) {
System.out.print("Jogada inválida"
+ ls + nome + ": ");
jogada = in.nextInt();
}
contador += jogada;
System.out.println("Contador: " + contador + ls);
if(contador >= 20)
jogadorVenceu = true;
}
}
if(jogadorVenceu) {
System.out.println("Você venceu!!! :D"
+ ls + "__________________________________________________________" + ls);
} else {
System.out.println("Você perdeu!!! D:"
+ ls + "__________________________________________________________" + ls);
}
break;
case 3:
System.out.println(ls + "_________________________CRÉDITOS_________________________"
+ ls
+ ls + "Jogo produzido por Sillas Cavalcante"
+ ls
+ ls + "Parceria com Professor Márcio"
+ ls + "__________________________________________________________" + ls);
break;
}
} while (inst != 4);
System.out.print(ls + "Fim! :D");
in.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO03/Questao02.java
package ADO03;
import java.util.Scanner;
public class Questao02 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
double l1, l2, l3;
String ls = System.getProperty("line.separator");
System.out.print("Insira a seguir, um por vez, os valores de cada lado do triânggulo:"
+ ls
+ ls + "a = ");
l1 = input.nextDouble();
System.out.print("b = ");
l2 = input.nextDouble();
System.out.print("c = ");
l3 = input.nextDouble();
if(l1 >= l2+l3 || l2 >= l1+l3 || l3 >= l1+l2)
System.out.print(ls + "Os valores inseridos não formam um triângulo, pois não atendem à condição de existência do mesmo!");
else if(l1 == l2 && l1 == l3)
System.out.print(ls + "Esse triânggulo é equilátero!");
else if((l1 == l2 && l1 != l3) || (l1 == l3 && l1 != l2) || (l2 == l3 && l2 != l1))
System.out.print(ls + "Esse triânggulo é isósceles!");
else
System.out.print(ls + "Esse triânggulo é escaleno!");
input.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO03/Questao03.java
package ADO03;
import java.util.Scanner;
public class Questao03 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
String ls = System.clearProperty("line.separator");
System.out.print("Insira um ano, e eu te direi se ele é bissexto ;)" + ls + "Ano: ");
int year = input.nextInt();
if(year%400 == 0 || (year%4 == 0 && year%100 != 0))
System.out.print(ls + "é bissexto! :D");
else
System.out.print(ls + "Não é bissexto! :(");
input.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO06/Questao01.java
package ADO06;
import java.util.*;
public class Questao01 {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String ls = System.getProperty("line.separator");
int tentativa = 1;
List<String> questoes = new ArrayList<String>();
questoes.add("3 x 2^10");
questoes.add("3 x 2^13");
questoes.add("1 + 2^13");
questoes.add("2^23" );
questoes.add("2^43" );
do {
System.out.print(" _________________PROVA__________________"
+ ls + "|MATEMÁTICA PARA TECNOLOGIA DA INFORMAÇÃO|"
+ ls + " ----------------------------------------"
+ ls
+ ls + "Encontre a solução para a expressão abaixo"
+ ls + "e escolha a alternativa que apresenta a "
+ ls + "resposta correta:"
+ ls + " ___________"
+ ls + "√2^20 + 2^23 = ?"
+ ls
+ ls + "(a) 3 x 2^10"
+ ls + "(b) 3 x 2^13"
+ ls + "(c) 1 + 2^13"
+ ls + "(d) 2^23 "
+ ls + "(e) 2^43 "
+ ls
+ ls + "Resposta: ");
switch(in.next()) {
case "a":
System.out.print(ls + "Resposta correta na " + tentativa + "ª tentativa!");
tentativa = 4;
break;
case "b":
case "c":
case "d":
case "e":
System.out.print(ls + tentativa + "ª tentativa - Resposta incorreta!" + ls);
break;
default:
System.out.print(ls + tentativa + "ª tentativa - Resposta inválida!" + ls);
}
if(tentativa < 3)
System.out.print("Preste mais atenção na próxima tentativa!" + ls + ls);
else if(tentativa == 3)
System.out.println("Resposta incorreta nas 3 tentativas!");
tentativa++;
} while(tentativa <= 3);
in.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO02/Questao01.java
package ADO02;
import java.util.Scanner;
public class Questao01 {
public static void main(String[] args) {
Scanner entrada = new Scanner(System.in);
System.out.println("Insira três números");
double n1 = entrada.nextDouble();
double n2 = entrada.nextDouble();
double n3 = entrada.nextDouble();
double mult = n1 * n2 * n3;
System.out.println("O produto desses números resulta em: " + mult);
entrada.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO02/Questao04.java
package ADO02;
import java.util.Scanner;
public class Questao04 {
public static void main(String[] args) {
Scanner entrada = new Scanner(System.in);
System.out.println("Me diga um nome?");
String nome = entrada.next();
System.out.println("Esse nome possui " + nome.length() + " caracteres.");
entrada.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO07/Questao.java
package ADO07;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Scanner;
public class Questao {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String ls = System.getProperty("line.separator");
boolean resposta = false;
List<String> questoes = new ArrayList<String>();
questoes.add("3 x 2^10");
questoes.add("3 x 2^13");
questoes.add("1 + 2^13");
questoes.add("2^23");
questoes.add("2^43");
System.out.print(" _________________PROVA__________________"
+ ls + "|MATEMÁTICA PARA TECNOLOGIA DA INFORMAÇÃO|"
+ ls + " ----------------------------------------" + ls);
do {
Collections.shuffle(questoes);
System.out.print(ls + "Encontre a solução para a expressão abaixo"
+ ls + "e escolha a alternativa que apresenta"
+ ls + "a resposta correta:"
+ ls
+ ls + "(2^20 + 2^23)^(1/2) = ?"
+ ls
+ ls + "a) " + questoes.get(0)
+ ls + "b) " + questoes.get(1)
+ ls + "c) " + questoes.get(2)
+ ls + "d) " + questoes.get(3)
+ ls + "e) " + questoes.get(4)
+ ls
+ ls + "Resposta: ");
switch(in.next().toLowerCase()) {
case "a":
resposta = questoes.get(0).contains("10");
break;
case "b":
resposta = questoes.get(1).contains("10");
break;
case "c":
resposta = questoes.get(2).contains("10");
break;
case "d":
resposta = questoes.get(3).contains("10");
break;
case "e":
resposta = questoes.get(4).contains("10");
}
if(!resposta)
System.out.println("Resposta incorreta!");
} while(!resposta);
System.out.println("Resposta correta!");
in.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO04/Questao02.java
package ADO04;
import java.util.Scanner;
public class Questao02 {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String ls = System.getProperty("line.separator");
System.out.println("Encontre a solução da expressão abaixo, ");
System.out.println("e em seguida, insira a alternativa ");
System.out.println("correspondente à solução encontrada!" + ls);
System.out.println("[3(6+3)^2]/[(3!)+(3!)-(3!!)]" + ls);
System.out.println("<a> 9");
System.out.println("<b> 81");
System.out.println("<c> 45/2");
System.out.println("<d> 45/236");
System.out.print("<e> 27" + ls + ls + "Resposta: ");
switch(in.next().toLowerCase()) {
case "a":
case "e":
case "c":
case "d":
System.out.print(ls + "Resposta incorreta");
break;
case "b":
System.out.print(ls + "Resposta correta");
break;
default:
System.out.print(ls + "Você inseriu um valor inválido!");
}
in.close();
}
}
<file_sep>/ProjetoIntegrador1/src/ADO03/Questao01.java
package ADO03;
import java.util.Scanner;
public class Questao01 {
public static void main(String[] args) {
System.out.print("Insira o valor da compra: R$");
Scanner input = new Scanner(System.in);
double value = input.nextDouble();
double discount, discountedValue;
if (value >= 300) {
discount = value * 0.2;
discountedValue = value - discount;
System.out.println("Valor do seu desconto: R$" + String.format("%.2f", discount));
System.out.println("Valor da compra após o desconto: R$" + String.format("%.2f", discountedValue));
}else {
discount = value * 0.15;
discountedValue = value - discount;
System.out.println("Valor do seu desconto: R$" + String.format("%.2f", discount));
System.out.println("Valor da compra após o desconto: R$" + String.format("%.2f", discountedValue));
}
input.close();
}
} | 0882f3d795cc57f5621603cd88943f5161c825c6 | [
"Java"
] | 9 | Java | sillasHead/SENAC_TADS_Projeto_Integrador_1 | eb0e612a08ba309ff4aba544a4b183989f95bab0 | a71af9b35c75347c6892073d12c21fbbf900feee | |
refs/heads/master | <repo_name>wanghedi1991/cs6675-mobAlarm<file_sep>/MobAlarm/appserver/admin.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import User, Location
# Register your models here.
class LocationAdmin(admin.ModelAdmin):
list_display = ['category', 'name', 'latitude', 'longitude', 'grid_id', 'address']
class UserAdmin(admin.ModelAdmin):
list_display = ['username','last_grid_id']
admin.site.register(Location, LocationAdmin)
admin.site.register(User, UserAdmin)<file_sep>/MobAlarm/appserver/datamanager.py
from __future__ import unicode_literals
import requests
from django.db import models
from appserver.models import User, Location
from appserver.gridmanager import computeGridId, computeNearbyGridId, getLocationsInGrids, computeInnterBox, range_dict, computeNearbyGridIdwithDirection, handle_location
google_place_api = '<KEY>'
place_search_api_prefix = 'https://maps.googleapis.com/maps/api/place/radarsearch/json?location=33.775622,-84.398473&radius=5000&type='
place_detail_api_prefix = 'https://maps.googleapis.com/maps/api/place/details/json?placeid='
category_list = ['supermarket', 'gasstation', 'postoffice', 'atm', 'shoppingmall']
def downloadDataFromGoogle(password, category):
if password == '<PASSWORD>':
url = place_search_api_prefix + category + '&key=' + google_place_api
r = requests.get(url)
results = r.json()['results']
else:
res_status = dict(status='fail')
return res_status
for item in results:
place_id = item['place_id']
lat = item['geometry']['location']['lat']
lng = item['geometry']['location']['lng']
if lng < range_dict['left'] or lng > range_dict['right'] or lat < range_dict['bottom'] or lat > range_dict[
'top']:
continue
detail_url = place_detail_api_prefix + place_id + '&key=' + google_place_api
detail_r = requests.get(detail_url)
detail_result = detail_r.json()['result']
business_address = detail_result['formatted_address']
business_name = detail_result['name']
business_grid_id = computeGridId(lat, lng)
# business_description = ???
new_business = Location(category=category, name=business_name, grid_id=business_grid_id, latitude=lat, longitude=lng,
address=business_address)
new_business.save()
res_status = dict(status='success', numOfResult=len(results))
return res_status
<file_sep>/mobile/Location Notifier/Location Notifier/FirstViewController.swift
//
// FirstViewController.swift
// Location Notifier
//
// Created by <NAME> on 3/14/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class FirstViewController: UIViewController, UITableViewDelegate, UITableViewDataSource, UIPickerViewDataSource, UIPickerViewDelegate {
@IBOutlet weak var typePicker: UIPickerView!
@IBOutlet weak var notificationTypeTable: UITableView!
var types:[NotificationType] = []
var addList:[NotificationType] = [Constant.testType0, Constant.testType1, Constant.testType2]
var tempType:Int = 0
override func viewDidLoad() {
super.viewDidLoad()
let parentController = self.tabBarController as! TableHolderController
types = parentController.notificationTypes
self.notificationTypeTable.delegate = self
self.notificationTypeTable.dataSource = self
self.notificationTypeTable.tableFooterView = UIView()
self.typePicker.dataSource = self
self.typePicker.delegate = self
setNavigationBar()
}
func setNavigationBar(){
(self.tabBarController as! TableHolderController).navigationItem.rightBarButtonItem = UIBarButtonItem(title: "Add", style: UIBarButtonItemStyle.plain, target: self, action: #selector(addNotificationType))
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return self.types.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "notificationTypeCell", for: indexPath) as! NotificationTypeCell
cell.nameLabel.text = types[indexPath.row].name
return cell
}
func tableView(_ tableView: UITableView, editActionsForRowAt indexPath: IndexPath) -> [UITableViewRowAction]? {
let deleteAction = UITableViewRowAction(style: .destructive, title: "Delete", handler: { (action, indexPath) in
self.types.remove(at: indexPath.row)
(self.tabBarController as! TableHolderController).notificationTypes.remove(at: indexPath.row)
self.notificationTypeTable.reloadData()
})
deleteAction.backgroundColor = UIColor.red
return [deleteAction]
}
func numberOfComponents(in pickerView: UIPickerView) -> Int {
return 1
}
func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int {
return addList.count
}
func pickerView(_ pickerView: UIPickerView, titleForRow row: Int, forComponent component: Int) -> String? {
return addList[row].name
}
func pickerView(_ pickerView: UIPickerView, didSelectRow row: Int, inComponent component: Int) {
self.tempType = row
}
@IBAction func addNotificationType(_ sender: UIBarButtonItem) {
// self.doneButton.isHidden = false
if self.typePicker.isHidden {
self.typePicker.isHidden = false
self.notificationTypeTable.isHidden = true
sender.title = "Done"
} else {
self.typePicker.isHidden = true
self.notificationTypeTable.isHidden = false
if self.types.contains(addList[tempType]) {
showToast(message: "It is already added")
} else {
self.types.append(addList[tempType])
(self.tabBarController as! TableHolderController).notificationTypes.append(addList[tempType])
self.notificationTypeTable.reloadData()
}
sender.title = "Add"
}
}
}
extension UIViewController {
func showToast(message : String) {
let toastLabel = UILabel(frame: CGRect(x: self.view.frame.size.width/2 - 75, y: self.view.frame.size.height-100, width: 150, height: 35))
toastLabel.backgroundColor = UIColor.black.withAlphaComponent(0.6)
toastLabel.textColor = UIColor.white
toastLabel.textAlignment = .center;
toastLabel.font = UIFont(name: "Montserrat-Light", size: 12.0)
toastLabel.text = message
toastLabel.alpha = 1.0
toastLabel.layer.cornerRadius = 10;
toastLabel.clipsToBounds = true
self.view.addSubview(toastLabel)
UIView.animate(withDuration: 4.0, delay: 0.1, options: .curveEaseOut, animations: {
toastLabel.alpha = 0.0
}, completion: {(isCompleted) in
toastLabel.removeFromSuperview()
})
}
}
<file_sep>/MobAlarm/MobAlarm/urls.py
"""MobAlarm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from appserver import views
urlpatterns = [
url(r'^register/username=(?P<username>.*)&password=(?P<password>.*)',views.user_register, name = 'user_register'),
url(r'^login/username=(?P<username>.*)&password=(?P<password>.*)',views.user_login, name = 'user_login'),
url(r'^add/username=(?P<username>.*)&category=(?P<category>.*)', views.add_event, name = 'add_event'),
url(r'^delete/username=(?P<username>.*)&category=(?P<category>.*)', views.delete_event, name = 'delete_event'),
url(r'^location/username=(?P<username>.*)&latitude=(?P<latitude>[+-]?(\d*\.)?\d+)&longitude=(?P<longitude>[+-]?(\d*\.)?\d+)', views.handle_location_without_angle, name = 'handle_location_without_angle'),
url(r'^location/username=(?P<username>.*)&latitude=(?P<latitude>[+-]?(\d*\.)?\d+)&longitude=(?P<longitude>[+-]?(\d*\.)?\d+)&angle=(?P<angle>[+-]?(\d*\.)?\d+)', views.handle_location_with_angle, name = 'handle_location_with_angle'),
url(r'^processdata/password=(?P<password>.*)/category=(?P<category>.*)', views.process_data, name = 'process_data'),
url(r'^admin/', admin.site.urls),
]
<file_sep>/MobAlarm/appserver/views.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.shortcuts import render
from django.http import HttpResponse
from django.http import Http404
from django.http import JsonResponse
from appserver.models import User, Location
from appserver.datamanager import downloadDataFromGoogle, category_list
from appserver.gridmanager import computeGridId, computeNearbyGridId, getLocationsInGrids, computeInnterBox, range_dict, computeNearbyGridIdwithDirection, handle_location
import bz2
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User as U
from django.contrib.auth.decorators import login_required
def user_register(request, username, password):
# response with fail message if username exist
try:
user = U.objects.get(username=username)
encrypted_password = None
except U.DoesNotExist:
user = None
encrypted_password = <PASSWORD>(password.encode('utf-8'))
if user is not None:
status = dict(type='post_response', status='fail', reason='username has already been used')
return JsonResponse(status, safe=False)
# register auth user
new_u = U(username=username, password=<PASSWORD>)
new_u.save()
# add new user to User table
new_user = User(username=new_u)
new_user.save()
# print(new_user.username)
status = dict(type='post_response', status='succeed', reason='register succeeded')
return JsonResponse(status, safe=False)
def user_login(request, username, password):
user, u = verifyUser(username)
print(user)
input_password = str(bz2.compress(password.encode('utf-8')))
# print(input_password)
# test = authenticate(username='ll')
# print(test)
# reponse with fail message if username does not exist
if u is None:
status = dict(type='post_response', status='fail', reason='user does not exist')
return JsonResponse(status, safe=False)
# check password
if u is not None and input_password != <PASSWORD>:
# print("wrong password")
status = dict(type='post_response', status='fail', reason='wrong password')
return JsonResponse(status, safe=False)
# login
login(request, u)
status = dict(type='post_response', status='succeed', reason='user login succeeded')
return JsonResponse(status, safe=False)
# @login_required(login_url='/login/')
def add_event(request, username, category):
user, u = verifyUser(username)
# if not request.user.is_authenticated:
# print("here")
# reponse with fail message if username does not exist
if user is None:
status = dict(type='post_response', status='fail', reason='user does not exist')
return JsonResponse(status, safe=False)
# reponse with fail message if category is not supported
if category not in category_list:
status = dict(type='post_response', status='fail', reason='wrong category')
return JsonResponse(status, safe=False)
# register user with the category
user.__dict__[category] = True
user.save()
status = dict(type='post_response', status='succeed', reason='event is added successfully')
return JsonResponse(status, safe=False)
def delete_event(request, username, category):
user, u = verifyUser(username)
# reponse with fail message if username does not exist
if user is None:
status = dict(type='post_response', status='fail', reason='user does not exist')
return JsonResponse(status, safe=False)
# reponse with fail message if categories does not exist
if category not in category_list:
status = dict(type='post_response', status='fail', reason='wrong category')
return JsonResponse(status, safe=False)
# unregister user with the category
user.__dict__[category] = False
user.save()
status = dict(type='post_response', status='succeed', reason='event is deleted successfully')
# status = dict(type = 'post_response', status = 'succeed', reason = 'event is deleted successfully')
return JsonResponse(status, safe=False)
def handle_location_with_angle(request, username, latitude, longitude, angle):
user, u = verifyUser(username)
status = handle_location(user, latitude, longitude, angle)
return JsonResponse(status, safe=False)
def handle_location_without_angle(request, username, latitude, longitude):
user, u = verifyUser(username)
status = handle_location(user, latitude, longitude, angle = -1)
return JsonResponse(status, safe=False)
# admin method for grabbing location data from GOOGLE API, and put locationw with grid_id into online database
# !!!!!!!!!!!!DO NOT run this function, or database may get in trouble!!!!!!!!!!!
def process_data(request, password, category):
status = downloadDataFromGoogle(password, category)
return JsonResponse(status, safe=False)
# the function is for user authentication
def verifyUser(username):
try:
u = U.objects.get(username=username)
user = User.objects.get(username=u)
except U.DoesNotExist:
u = None
user = None
return user, u
<file_sep>/README.md
# cs6675-mobAlarm
## Prerequisit:
1. python 3.x
2. django 1.11.11 or laters
3. mysqlclient 1.3.7 or laters
## To Run on your mac
1. pip install django
2. pip install mysqlclient
3. mkdir cs6675-mobAlarm
4. cd cs6675-mobAlarm
5. git clone https://github.com/Rwang721/cs6675-mobAlarm.git
6. cd MobAlarm
7. python manage.py runserver
## Client-Server Interaction:
please see google drive 6675document.docx
## Now support category:
[atm, suppermarket, postoffice, shoppingmall, gasstation]
## To-Do Lists:
1. Support more categories
2. User authentitcation
3. Client IOS app that support:
(1) User safe register/login
(2) Add/Delete Event
(3) Location Notification
## For windows:
who knows...
<file_sep>/MobAlarm/appserver/models.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User as U
from django.conf import settings
# Create your models here.
class User(models.Model):
# username = models.CharField(max_length = 100)
username = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
# password = models.TextField(null = True)
last_grid_id = models.IntegerField(default = -1)
supermarket = models.BooleanField(default = False)
gasstation = models.BooleanField(default = False)
atm = models.BooleanField(default = False)
postoffice = models.BooleanField(default = False)
shoppingmall = models.BooleanField(default = False)
class Location(models.Model):
category = models.CharField(max_length = 100)
name = models.CharField(max_length = 200)
grid_id = models.IntegerField(db_index = True)
latitude = models.DecimalField(decimal_places = 6, max_digits = 8)
longitude = models.DecimalField(decimal_places = 6, max_digits = 8)
address = models.TextField(null = True)
description = models.TextField(null = True, blank = True)
<file_sep>/mobile/Location Notifier/Location Notifier/LoginViewController.swift
//
// LoginViewController.swift
// Location Notifier
//
// Created by <NAME> on 3/24/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class LoginViewController: UIViewController, UITextFieldDelegate {
@IBOutlet weak var loginLabel: UILabel!
@IBOutlet weak var usernameLabel: UILabel!
@IBOutlet weak var usernameText: UITextField!
@IBOutlet weak var passwordTextField: UITextField!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
usernameText.delegate = self
//self.navigationItem.titleView = UIImageView(image: UIImage(named: "title_small.png"))
self.navigationItem.title = "Login Page"
let loginItem = UIBarButtonItem(title: "Login", style: UIBarButtonItemStyle.plain, target: self, action: #selector(loginButtonPressed))
self.navigationItem.rightBarButtonItem = loginItem
self.navigationController?.navigationBar.tintColor = UIColor.white
usernameText.leftViewMode = UITextFieldViewMode.always
let imageView = UIImageView(frame: CGRect(x: 0, y: 0, width: 30, height: 30))
let image = UIImage(named: "user.png")
imageView.image = image
usernameText.leftView = imageView
passwordTextField.leftViewMode = UITextFieldViewMode.always
let imageViewPassword = UIImageView(frame: CGRect(x: 0, y: 0, width: 30, height: 30))
let imagePassword = UIImage(named: "password.png")
imageViewPassword.image = imagePassword
passwordTextField.leftView = imageViewPassword
passwordTextField.delegate = self
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
usernameText.endEditing(true)
return true
}
@IBAction func loginButtonPressed(_ sender: UIButton) {
let storyBoard: UIStoryboard = UIStoryboard(name: "Main", bundle: nil)
let newViewController = storyBoard.instantiateViewController(withIdentifier: "tableHolderController") as! TableHolderController
self.navigationController?.pushViewController(newViewController, animated: true)
}
}
<file_sep>/MobAlarm/appserver/gridmanager.py
from __future__ import unicode_literals
from django.db import models
from appserver.models import User, Location
category_list = ['supermarket', 'gasstation', 'postoffice', 'atm', 'shoppingmall']
# neighbor grids in grid system
dx = [1, 0, -1]
dy = [1, 0, -1]
num_of_col = 66
num_of_row = 66
# atlanta boundary
range_dict = dict(top=33.875448, bottom=33.575448, left=-84.563182, right=-84.263182)
# given coordinates, compute the grid_id
def computeGridId(lat, lng):
# the gird has 44 rows and 66 cols
row = int((range_dict['top'] - float(lat)) * 110000) // 500
col = int((float(lng) - range_dict['left']) * 110000) // 500
print(row, col)
return row * num_of_col + col
#input a clockwise angle from the north, in the range[0, 360)
def computeNearbyGridIdwithDirection(lat, lng, angle):
grid_id = computeGridId(lat, lng)
row_id = grid_id // num_of_col
col_id = grid_id % num_of_col
result = []
if (angle >= 0 and angle < 45) or (angle >= 315 and angle < 360) :
for i in range(-1, 2):
for j in (-1, 1):
x = row_id + i
y = col_id + j
if x >= 0 and y >= 0 and x < num_of_row and y < num_of_col:
result.append(x * num_of_col + y)
elif angle >= 45 and angle < 135:
for i in range(0, 2):
for j in (-1, 2):
x = row_id + i
y = col_id + j
if x >= 0 and y >= 0 and x < num_of_row and y < num_of_col:
result.append(x * num_of_col + y)
elif angle >= 135 and angle < 225:
for i in range(-1, 2):
for j in (0, 2):
x = row_id + i
y = col_id + j
if x >= 0 and y >= 0 and x < num_of_row and y < num_of_col:
result.append(x * num_of_col + y)
elif angle >=225 and angle < 315:
for i in range(-1, 1):
for j in (-1, 2):
x = row_id + i
y = col_id + j
if x >= 0 and y >= 0 and x < num_of_row and y < num_of_col:
result.append(x * num_of_col + y)
return result
# given a grid_id, given all the neighbors' gird_id
def computeNearbyGridId(lat, lng):
grid_id = computeGridId(lat, lng)
row_id = grid_id // num_of_col
col_id = grid_id % num_of_col
result = []
for i in range(-1, 2):
for j in range(-1, 2):
x = row_id + i
y = col_id + j
if x >= 0 and y >= 0 and x < num_of_row and y < num_of_col:
result.append(x * num_of_col + y)
return result
# return satisfying locations
def getLocationsInGrids(nearbyGridIds, userEvents):
print(nearbyGridIds)
print(userEvents)
locations = Location.objects.filter(grid_id__in=nearbyGridIds).filter(category__in=userEvents)
return locations
# compute the innerbox
def computeInnterBox(nearbyLocations, grid_id, user_lat, user_lng):
row_id = grid_id // num_of_col
col_id = grid_id % num_of_col
# the 3*3 gird neighborhood servers as a default box
top = min(range_dict['top'] - ((row_id - 1) * 0.0045), range_dict['top'])
bottom = max(range_dict['top'] - ((row_id + 2) * 0.0045), range_dict['bottom'])
left = max(range_dict['left'] + ((col_id - 1) * 0.0045), range_dict['left'])
right = min(range_dict['left'] + ((col_id + 2) * 0.0045), range_dict['right'])
results = []
# the innerbox shrinks as testing each candidate locations in the 3*3 neighborhood
for location in nearbyLocations:
l = float(location.longitude) - 0.0005
r = float(location.longitude) + 0.0005
t = float(location.latitude) + 0.0005
b = float(location.latitude) - 0.0005
print(l, r, t, b, location.longitude, location.latitude)
# Locations in user's range will not be considered for computing the innerbox, they will be added to the result lists.
if user_lat < t and user_lat > b and user_lng < r and user_lng > l:
item = dict(name=location.name, category=location.category, latitude=location.latitude,
longitude=location.longitude, description="Blank")
results.append(item)
continue
if l > user_lng and l < right:
right = l
if r < user_lng and r > left:
left = r
if b > user_lat and b < top:
top = b
if t < user_lat and t > bottom:
bottom = t
return dict(top=top, bottom=bottom, left=left, right=right), results
def handle_location(user, latitude, longitude, angle = -1):
# reponse with fail message if username does not exist
if user is None:
status = dict(type='post_response', status='fail', reason='user does not exist')
return status
lng = float(longitude)
lat = float(latitude)
# reponse with fail message if user is not in supporte area
if lng < range_dict['left'] or lng > range_dict['right'] or lat < range_dict['bottom'] or lat > range_dict['top']:
status = dict(type='post_response', status='fail', reason='user out of range')
return status
# update user's current grid_id
user.last_grid_id = computeGridId(latitude, longitude)
user.save()
userEvents = []
# get all register event of the user
for category in category_list:
if user.__dict__[category] is True:
userEvents.append(category)
# compute nearby grids
if angle == -1:
nearbyGridIds = computeNearbyGridId(latitude, longitude)
else:
nearbyGridIds = computeNearbyGridIdwithDirection(latitude, longitude, angle)
# compute locations in nearby grids
nearbyLocations = getLocationsInGrids(nearbyGridIds, userEvents)
# compute the request-free inner box for the user, and a set of notifiable results
innerbox, results = computeInnterBox(nearbyLocations, user.last_grid_id, lat, lng)
# reponse with results and the innerbox
status = dict(type='result_set', box=innerbox, numberOfResult=len(results), results=results)
return status
<file_sep>/mobile/Location Notifier/Location Notifier/TableHolderController.swift
//
// tableHolderController.swift
// Location Notifier
//
// Created by <NAME> on 3/25/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class TableHolderController: UITabBarController, UITabBarControllerDelegate {
var notificationTypes:[NotificationType] = [Constant.testType0, Constant.testType1, Constant.testType2]
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
self.navigationItem.title = "Notification Categories"
self.delegate = self
}
func tabBarController(_ tabBarController: UITabBarController, didSelect viewController: UIViewController) {
print(String(self.selectedIndex))
if self.selectedIndex == 0 {
(viewController as! FirstViewController).setNavigationBar()
} else if selectedIndex == 1 {
(viewController as! SecondViewController).setNavigationBar()
}
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
| b3231b7ce48221ed3e92aecf04c37dbf47d24e86 | [
"Swift",
"Python",
"Markdown"
] | 10 | Python | wanghedi1991/cs6675-mobAlarm | 90885687bafd42cf7737c0c4d98ae8c3db28814a | 800d95ba250a1dd0baebfb533d004c348c08689b | |
refs/heads/main | <repo_name>casatom/Macowins<file_sep>/src/main/java/Venta/Negocio.java
package Venta;
import java.time.LocalDate;
import java.util.Collections;
import java.util.List;
public class Negocio {
private List<Venta> ventas;
public Double gananciasDe(LocalDate fecha){
return ventas.stream().filter(v->v.esDeFecha(fecha)).mapToDouble(v->v.importe()).sum();
}
public Negocio(List<Venta> ventas) {
this.ventas = ventas;
}
public Negocio(Venta ... ventas) {
Collections.addAll(this.ventas,ventas);
}
}
<file_sep>/src/main/java/Venta/Tarjeta.java
package Venta;
public class Tarjeta extends Venta{
private Integer cantudadCuotas;
private Double coeficienteTarjeta;
@Override
public Double importe(){
return coeficienteTarjeta*cantudadCuotas + 0.01*super.importe() + super.importe();
}
}
<file_sep>/src/test/java/Estado/PromocionTest.java
package Estado;
import org.junit.Assert;
import org.junit.Test;
public class PromocionTest {
public Double delta = new Double(1e-15);
@Test
public void evaluarPromocion(){
Promocion promo = new Promocion(70);
Assert.assertEquals(500.0, promo.precioFinal(570.0),delta);
}
@Test
public void evaluarSobrePromocion(){
Promocion promo = new Promocion(70);
Assert.assertEquals(0.0, promo.precioFinal(50.0),delta);
}
}
<file_sep>/src/test/java/Item/PrendaTest.java
package Item;
import Estado.Liquidacion;
import Estado.Nueva;
import Estado.Promocion;
import org.junit.Assert;
import org.junit.Test;
public class PrendaTest {
public Double delta = new Double(1e-15);
@Test
public void evaluarPrenda(){
//Evaluamos que prendas que solo cambien su estado cambien de precio
Prenda prenda1 = new Prenda(new Promocion(50),300.0,TipoItem.SACO);
Prenda prenda2 = new Prenda(new Liquidacion(),300.0,TipoItem.SACO);
Prenda prenda3 = new Prenda(new Liquidacion(25.0),300.0,TipoItem.SACO);
Prenda prenda4 = new Prenda(new Nueva(),300.0,TipoItem.SACO);
Assert.assertEquals(250.0,prenda1.precio(),delta);
Assert.assertEquals(150.0,prenda2.precio(),delta);
Assert.assertEquals(225.0,prenda3.precio(),delta);
Assert.assertEquals(300.0,prenda4.precio(),delta);
}
//TODO ver que test faltan
}
| 4d79d4a701d4500ec0d5dba992be7b1159d2cb03 | [
"Java"
] | 4 | Java | casatom/Macowins | cb69ee6c96d5ba446226597ee85f4e9575d98cdc | ac1505fb207e6220e6f906e6f2056416f6d62a96 | |
refs/heads/master | <repo_name>anacat/Electronics-Super-Shop<file_sep>/Assets/Scripts/Devices/DeviceClick.cs
using UnityEngine;
using System.Collections;
public class DeviceClick : MonoBehaviour {
void OnMouseDown()
{
SpriteRenderer sr = GetComponent<SpriteRenderer>();
GameManager.AddDeviceSelection(GetComponent<DeviceComponent>());
transform.GetChild(0).gameObject.SetActive(true);
sr.color = new Color(sr.color.r, sr.color.g, sr.color.b, 0.5f);
}
}
<file_sep>/Assets/Scripts/DeviceSpawn.cs
using UnityEngine;
using System.Collections;
public class DeviceSpawn : MonoBehaviour {
void Awake()
{
if (GameManager.GetSelectedDevices() != null)
{
foreach (GameObject g in GameManager.GetSelectedDevices())
{
Instantiate(g);
}
}
}
}
<file_sep>/Assets/Scripts/Briefing/BriefingManager.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public static class BriefingManager
{
private static List<Briefing> _briefingList = new List<Briefing>();
private static List<GameObject> _prefabList = new List<GameObject>();
public static List<Briefing> GetAllBriefings()
{
_prefabList = new List<GameObject>();
_prefabList.AddRange(Resources.LoadAll<GameObject>("Briefings"));
foreach (GameObject g in _prefabList)
{
_briefingList.Add(g.GetComponentInChildren<BriefingInfo>(true).GetBriefing());
}
return _briefingList;
}
public static Briefing GetRandomBriefing()
{
int random = Random.Range(0, _briefingList.Count-1);
return _briefingList[random];
}
}
<file_sep>/Assets/Scripts/Store/ShopManager.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class ShopManager : MonoBehaviour
{
public GameObject storeItemPrefab;
public Transform storeContainer;
void Awake()
{
List<GameObject> cables = GameManager.GetAllCablesGameObjects();
List<GameObject> devices = GameManager.GetAllDeviceGameObjects();
foreach (GameObject g in cables)
{
GameObject obj = (GameObject)Instantiate(storeItemPrefab);
ShopItem shopItem = obj.GetComponent<ShopItem>();
obj.transform.SetParent(storeContainer);
shopItem.SetCable(g.GetComponentInChildren<CableComponent>(true));
}
foreach (GameObject g in devices)
{
GameObject obj = (GameObject)Instantiate(storeItemPrefab);
ShopItem shopItem = obj.GetComponent<ShopItem>();
obj.transform.SetParent(storeContainer);
shopItem.SetDevice(g.GetComponentInChildren<DeviceComponent>(true));
}
}
}
<file_sep>/Assets/Scripts/DeviceManager.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class DeviceManager : MonoBehaviour {
public DeviceComponent[] allDevices;
public Dictionary<string, DeviceComponent> availableComponents;
void Start()
{
availableComponents = GameManager.GetAllDevices();
SetAvailableDevices();
}
private void SetAvailableDevices()
{
for (int i = 0; i < allDevices.Length; i++)
{
if (!availableComponents[DeviceComponentHelper.DeviceName(allDevices[i].deviceType)].IsAvailabe)
{
allDevices[i].gameObject.SetActive(false);
}
}
}
void Update()
{
}
}
<file_sep>/Assets/Scripts/Game/DeviceComponentHelper.cs
using UnityEngine;
using System.Collections;
public class DeviceComponentHelper : MonoBehaviour
{
public enum ComponentType
{
dvi,
hdmi,
midi,
mini_dvi,
rca,
s_video,
scart,
thunderbolt,
trs,
trs35mm,
vga,
xlr
}
public enum DeviceType
{
camCorder,
cassete,
computer_crt,
crt,
crt_plus,
dibox,
dreamweaver,
dvd1,
dvd2,
dvd3,
funstation,
handycam,
hi8,
minidisc,
minidisc_player,
minidv,
tv_4k,
tv_lcd,
tv_plasma,
tv_tft,
vhs1,
vhs2,
vhs3,
vhs_camera,
vhs_tape,
walkman,
wiird,
zbox
}
public static string ComponentName(ComponentType component)
{
switch (component)
{
case ComponentType.dvi:
return "dvi";
case ComponentType.hdmi:
return "hdmi";
case ComponentType.midi:
return "midi";
case ComponentType.mini_dvi:
return "mini-dvi";
case ComponentType.rca:
return "rca";
case ComponentType.s_video:
return "s-video";
case ComponentType.scart:
return "scart";
case ComponentType.thunderbolt:
return "thunderbolt";
case ComponentType.trs:
return "trs";
case ComponentType.trs35mm:
return "trs-35mm";
case ComponentType.vga:
return "vga";
case ComponentType.xlr:
return "xlr";
default:
return "";
}
}
public static string DeviceName(DeviceType device)
{
switch(device)
{
case DeviceType.camCorder:
return "cam recorder";
case DeviceType.cassete:
return "cassete";
case DeviceType.computer_crt:
return "computer crt";
case DeviceType.crt:
return "crt";
case DeviceType.crt_plus:
return "crt plus";
case DeviceType.dibox:
return "di box";
case DeviceType.dreamweaver:
return "dreamweaver";
case DeviceType.dvd1:
return "dvd 1";
case DeviceType.dvd2:
return "dvd 2";
case DeviceType.dvd3:
return "dvd 3";
case DeviceType.funstation:
return "funstation";
case DeviceType.handycam:
return "handycam";
case DeviceType.hi8:
return "hi8";
case DeviceType.minidisc:
return "minidisc";
case DeviceType.minidisc_player:
return "minisc player";
case DeviceType.minidv:
return "minidv";
case DeviceType.tv_4k:
return "tv 4k";
case DeviceType.tv_lcd:
return "tv lcd";
case DeviceType.tv_plasma:
return "tv plasma";
case DeviceType.tv_tft:
return "tv tft";
case DeviceType.vhs1:
return "vhs 1";
case DeviceType.vhs2:
return "vhs 2";
case DeviceType.vhs3:
return "vhs 3";
case DeviceType.vhs_camera:
return "vhs camera";
case DeviceType.vhs_tape:
return "vhs tape";
case DeviceType.walkman:
return "walkman";
case DeviceType.wiird:
return "wiird";
case DeviceType.zbox:
return "zbox";
default:
return "";
}
}
}
<file_sep>/Assets/Scripts/Game/MontageManager.cs
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using System;
using UnityEngine.SceneManagement;
//TODO: take care of game logic and money being made (get thumbs up from cable entrance.cs
public class MontageManager : MonoBehaviour
{
[Header("Timer")]
public Text timer;
[Tooltip("in seconds")]
private float _timeInterval;
[Header("Pop-Ups")]
public GameObject popUpBackground;
public CanvasGroup endGamePopUp;
private float _moneyMade = 0;
void Start()
{
_timeInterval = GameManager.TimerState;
StartCoroutine(Timer());
}
private IEnumerator Timer()
{
while (_timeInterval >= 0)
{
_timeInterval -= Time.deltaTime;
timer.text = Mathf.Floor(_timeInterval / 60).ToString("00") + (_timeInterval % 60).ToString(":00");
yield return null;
}
ShowEndGame();
}
private void ShowEndGame()
{
if (popUpBackground != null)
{
StopAllCoroutines();
popUpBackground.SetActive(true);
endGamePopUp.alpha = 1;
endGamePopUp.interactable = true;
endGamePopUp.blocksRaycasts = true;
_moneyMade = GameManager.numberOfConections * 10;
endGamePopUp.transform.GetChild(1).GetComponent<Text>().text = "You made like " + _moneyMade + "$";
timer.enabled = false;
GameManager.SetMoney(_moneyMade);
GameManager.ResetSelectedItens();
GameManager.numberOfConections = 0;
}
}
public void GoToMarket()
{
SceneManager.LoadScene("market");
}
public void GoToStoreFront()
{
StopAllCoroutines();
GameManager.TimerState = _timeInterval;
SceneManager.LoadScene("store-front");
}
public void GoToDevicePickUp()
{
StopAllCoroutines();
GameManager.TimerState = _timeInterval;
SceneManager.LoadScene("store-back-devices");
}
public void GoToAssembly()
{
StopAllCoroutines();
GameManager.TimerState = _timeInterval;
SceneManager.LoadScene("store-back-building");
}
}
<file_sep>/Assets/Scripts/Cables/UI/ShopUIManager.cs
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class ShopUIManager : MonoBehaviour {
public Text moneyText;
public float money;
void Start () {
moneyText.text = GameManager.GetTotalMoney().ToString();
money = GameManager.GetTotalMoney();
}
public void UpdateMoney(float price)
{
money -= price;
moneyText.text = money.ToString();
GameManager.SetMoney(money);
}
public void GoBack()
{
SceneManager.LoadScene("store-front");
GameManager.GetAllCables();
}
}
<file_sep>/Assets/Scripts/Cables/UI/FrontStoreUIManager.cs
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class FrontStoreUIManager : MonoBehaviour {
public GameObject briefingUI;
public Image firstDevice;
public Image secondDevice;
public Image entrance;
public Text price;
public Text totalTime;
private Image _popUpBackground;
private Briefing _briefing;
void Start () {
_briefing = GameManager.GetRandomBriefing();
_popUpBackground = GameObject.Find("PopUpBackground").GetComponent<Image>();
GameManager.GetSetBriefing = _briefing;
SetBriefingUI();
}
private void SetBriefingUI()
{
firstDevice.sprite = _briefing.firstDevice;
secondDevice.sprite = _briefing.secondDevice;
entrance.sprite = _briefing.entrance;
price.text = _briefing.price.ToString();
totalTime.text = Mathf.Floor(_briefing.totalTime / 60).ToString("00") + (_briefing.totalTime % 60).ToString(":00");
}
public void GotItBtn()
{
GameManager.TimerState = _briefing.totalTime;
StartCoroutine(WaitForAnimation(briefingUI.GetComponent<CanvasGroup>()));
StartCoroutine(Fade(_popUpBackground));
}
private IEnumerator WaitForAnimation(CanvasGroup cg)
{
Animation anim = cg.gameObject.GetComponent<Animation>();
anim.Play();
cg.interactable = false;
cg.blocksRaycasts = false;
while (anim.isPlaying)
{
yield return null;
}
SceneManager.LoadScene("store-back-cables");
DestroyObject(cg.gameObject);
}
private IEnumerator Fade(Image background)
{
while(background.color.a > 0)
{
background.color = new Color(background.color.r, background.color.g, background.color.b, background.color.a - Time.deltaTime*0.5f);
yield return null;
}
}
}
<file_sep>/Assets/Scripts/Cables/CableEntrance.cs
using UnityEngine;
using System.Collections;
public class CableEntrance : MonoBehaviour
{
public DeviceComponentHelper.ComponentType componentType;
public Entrance entranceType;
public enum Entrance
{
inEntrance,
outEntrance,
}
private string _componentName;
void Awake()
{
_componentName = DeviceComponentHelper.ComponentName(componentType);
}
void Update()
{
}
void OnTriggerEnter2D(Collider2D col)
{
GameObject colliderObj = col.gameObject;
if (colliderObj.tag == EntranceType())
{
if (colliderObj.GetComponent<DragCable>().canDrag && colliderObj.GetComponent<CableComponent>() != null && DeviceComponentHelper.ComponentName(colliderObj.GetComponent<CableComponent>().componentType) == _componentName)
{
//TODO: something saying good job
//TODO: something about the game logic that is missing... oh ya send info to montage manager
GameManager.numberOfConections += 1;
colliderObj.GetComponent<DragCable>().HasConnection();
}
else
{
//TODO: something happens
}
}
}
private string EntranceType()
{
switch (entranceType)
{
case Entrance.inEntrance:
return "cable-in";
case Entrance.outEntrance:
return "cable-out";
default:
return "";
}
}
}
<file_sep>/Assets/Scripts/Game/GameManager.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
public static class GameManager
{
private static Dictionary<string, CableComponent> _cableList = new Dictionary<string, CableComponent>();
private static Dictionary<string, DeviceComponent> _deviceList = new Dictionary<string, DeviceComponent>();
private static List<GameObject> _cablePrefabs;
private static List<GameObject> _devicePrefabs;
private static Briefing _briefing;
private static GameObject _firsDevicePrefab;
private static GameObject _secondDevicePrefab;
private static List<GameObject> _selectedCables;
private static List<GameObject> _selectedDevices;
private static float _totalMoney = 10;
private static float _timerState = 30;
public static int numberOfConections = 0;
static GameManager()
{
GetAllCables();
BriefingManager.GetAllBriefings();
}
public static void SetMoney(float value)
{
_totalMoney = value;
}
public static float GetTotalMoney()
{
return _totalMoney;
}
public static List<GameObject> GetSelectedCables()
{
return _selectedCables;
}
public static List<GameObject> GetSelectedDevices()
{
return _selectedDevices;
}
public static void AddCableSelection(CableComponent component)
{
if (_selectedCables == null || _selectedCables.Count == 0)
{
_selectedCables = new List<GameObject>();
}
foreach (GameObject g in _cablePrefabs)
{
if ((DeviceComponentHelper.ComponentName(g.GetComponentInChildren<CableComponent>(true).componentType) ==
DeviceComponentHelper.ComponentName(component.componentType)) && !_selectedCables.Contains(g))
{
_selectedCables.Add(g);
break;
}
}
}
public static void AddDeviceSelection(DeviceComponent component)
{
if (_selectedDevices == null || _selectedDevices.Count == 0)
{
_selectedDevices = new List<GameObject>();
}
foreach (GameObject g in _devicePrefabs)
{
if ((DeviceComponentHelper.DeviceName(g.GetComponentInChildren<DeviceComponent>(true).deviceType) ==
DeviceComponentHelper.DeviceName(component.deviceType)) && !_selectedDevices.Contains(g))
{
_selectedDevices.Add(g);
break;
}
}
}
private static void SetCablePrefabList()
{
_cablePrefabs = new List<GameObject>();
_cablePrefabs.AddRange(Resources.LoadAll<GameObject>("Components/Cables"));
}
private static void SetDevicePrefabList()
{
_devicePrefabs = new List<GameObject>();
_devicePrefabs.AddRange(Resources.LoadAll<GameObject>("Components/Devices"));
}
public static Dictionary<string, CableComponent> GetAllCables()
{
SetCablePrefabList();
foreach (GameObject g in _cablePrefabs)
{
CableComponent componentComponent = g.GetComponentInChildren<CableComponent>(true);
if (!_cableList.ContainsKey(DeviceComponentHelper.ComponentName(componentComponent.componentType)))
{
_cableList.Add(DeviceComponentHelper.ComponentName(componentComponent.componentType), componentComponent);
}
}
return _cableList;
}
public static Dictionary<string, DeviceComponent> GetAllDevices()
{
SetDevicePrefabList();
foreach (GameObject g in _devicePrefabs)
{
DeviceComponent componentComponent = g.GetComponentInChildren<DeviceComponent>(true);
if (!_deviceList.ContainsKey(DeviceComponentHelper.DeviceName(componentComponent.deviceType)))
{
_deviceList.Add(DeviceComponentHelper.DeviceName(componentComponent.deviceType), componentComponent);
}
}
return _deviceList;
}
public static void ResetSelectedItens()
{
_selectedCables = new List<GameObject>();
_selectedDevices = new List<GameObject>();
}
public static List<GameObject> GetAllCablesGameObjects()
{
SetCablePrefabList();
return _cablePrefabs;
}
public static List<GameObject> GetAllDeviceGameObjects()
{
SetDevicePrefabList();
return _devicePrefabs;
}
public static List<GameObject> GetAllAvailableCables()
{
List<GameObject> list = new List<GameObject>();
foreach (CableComponent c in _cableList.Values)
{
if (c.IsAvailabe)
{
list.Add(c.gameObject);
}
}
return list;
}
public static List<GameObject> GetAllAvailableDevices()
{
List<GameObject> list = new List<GameObject>();
foreach (DeviceComponent c in _deviceList.Values)
{
if (c.IsAvailabe)
{
list.Add(c.gameObject);
}
}
return list;
}
public static CableComponent GetCableComponent(string key)
{
return _cableList[key];
}
public static DeviceComponent GetDeviceComponent(string key)
{
return _deviceList[key];
}
public static void UpdateCableAvailability(string key, bool available)
{
_cableList[key].IsAvailabe = available;
}
public static void UpdateDeviceAvailability(string key, bool available)
{
_deviceList[key].IsAvailabe = available;
}
public static Briefing GetSetBriefing
{
get { return _briefing; }
set { _briefing = value; }
}
public static Briefing GetRandomBriefing()
{
return BriefingManager.GetRandomBriefing();
}
public static float TimerState
{
get { return _timerState; }
set { _timerState = value; }
}
}
<file_sep>/Assets/Scripts/Briefing/BriefingInfo.cs
using UnityEngine;
using System.Collections;
public class BriefingInfo : MonoBehaviour {
public Sprite firstDevice;
public Sprite secondDevice;
public Sprite entrance;
public float price;
public float totalTime;
[Header("Prefabs")]
public GameObject firstDevicePrefab;
public GameObject secondDevicePrefab;
private Briefing _briefing;
public Briefing GetBriefing()
{
_briefing = new Briefing(firstDevice, secondDevice, entrance, price, totalTime, firstDevicePrefab, secondDevicePrefab);
return _briefing;
}
}
public class Briefing
{
public Sprite firstDevice;
public Sprite secondDevice;
public Sprite entrance;
public float price;
public float totalTime;
public GameObject firstDevicePrefab;
public GameObject secondDevicePrefab;
public Briefing(Sprite firstDevice, Sprite secondDevice, Sprite entrance, float price, float totalTime, GameObject firstDevicePrefab, GameObject secondDevicePrefab)
{
this.firstDevice = firstDevice;
this.secondDevice = secondDevice;
this.entrance = entrance;
this.price = price;
this.totalTime = totalTime;
this.firstDevicePrefab = firstDevicePrefab;
this.secondDevicePrefab = secondDevicePrefab;
}
}
<file_sep>/Assets/Scripts/Cables/DragCable.cs
using UnityEngine;
using System.Collections;
using System;
[RequireComponent(typeof(CableComponent))]
public class DragCable : MonoBehaviour
{
public GameObject childJoint;
public DragCable otherEnd;
public float endDistances = 3;
[HideInInspector]
public bool canDrag = true;
private bool _drag;
private Rigidbody2D _myRigidbody;
private bool _wasKinematic;
void Start()
{
_myRigidbody = GetComponent<Rigidbody2D>();
_wasKinematic = _myRigidbody.isKinematic;
}
void OnMouseDrag()
{
if (canDrag && otherEnd.canDrag)
{
DragFunction();
}
else if (!otherEnd.canDrag && canDrag && Vector2.Distance(transform.position, otherEnd.transform.position) < endDistances)
{
DragFunction();
}
}
void FixedUpdate()
{
if (_drag)
{
transform.rotation = childJoint.transform.rotation;
}
}
public void HasConnection()
{
canDrag = false;
}
void OnMouseDown()
{
_drag = true;
_myRigidbody.isKinematic = true;
if (otherEnd != null && otherEnd.canDrag)
otherEnd.DisableKinemactic(_drag);
}
void OnMouseUp()
{
if (_drag == true)
{
_myRigidbody.isKinematic = _wasKinematic;
}
if (otherEnd != null && !otherEnd.canDrag && Vector2.Distance(transform.position, otherEnd.transform.position) >= endDistances)
{
_myRigidbody.isKinematic = false;
}
_drag = false;
}
void DragFunction()
{
// We are converting a 2D mouse coordinate to 3D
float distance_to_screen = Camera.main.WorldToScreenPoint(gameObject.transform.position).z;
Vector3 pos_move = Camera.main.ScreenToWorldPoint(new Vector3(Input.mousePosition.x, Input.mousePosition.y, distance_to_screen));
_myRigidbody.position = new Vector3(pos_move.x, pos_move.y, pos_move.z);
transform.rotation = childJoint.transform.rotation;
}
public void DisableKinemactic(bool isDragging)
{
_myRigidbody.isKinematic = !isDragging;
}
}<file_sep>/Assets/Splash.cs
using UnityEngine;
using System.Collections;
using UnityEngine.SceneManagement;
public class Splash : MonoBehaviour {
void Start()
{
StartCoroutine(FadeAndLoad());
}
private IEnumerator FadeAndLoad()
{
float timer = 2f;
while(timer > 0)
{
timer -= Time.deltaTime;
yield return null;
}
SceneManager.LoadScene(1);
}
}
<file_sep>/Assets/Scripts/Devices/DeviceComponent.cs
using UnityEngine;
using System.Collections;
public class DeviceComponent : MonoBehaviour {
public DeviceComponentHelper.DeviceType deviceType;
public string deviceName;
public Sprite image;
public float price;
public Sprite firstEntrance;
public Sprite secondEntrance;
public bool _isAvailable;
public bool IsAvailabe
{
get { return _isAvailable; }
set { _isAvailable = value; }
}
public float GetPrice()
{
return price;
}
}
<file_sep>/Assets/Scripts/Store/ShopItem.cs
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
//THIS WHOLE THING IS A MESS
public class ShopItem : MonoBehaviour
{
[Header("Children")]
public Text nameObject;
public Image imageObject;
public Image firstEntrance;
public Image secondEntrance;
public Text priceObject;
public Button buyButton;
public GameObject itemPrefab;
private CableComponent cableComponent;
private DeviceComponent deviceComponent;
private ShopUIManager _uiManager;
private bool _isCable;
void Awake()
{
_uiManager = GameObject.Find("Canvas").GetComponent<ShopUIManager>();
}
public void SetCable(CableComponent itemComponent)
{
_isCable = true;
cableComponent = itemComponent;
nameObject.text = cableComponent.cableName;
priceObject.text = cableComponent.price.ToString() + " $";
imageObject.sprite = cableComponent.image;
firstEntrance.sprite = cableComponent.firstEntrance;
if (cableComponent.secondEntrance != null)
{
secondEntrance.sprite = cableComponent.secondEntrance;
}
else
{
secondEntrance.gameObject.SetActive(false);
}
if (cableComponent.IsAvailabe)
{
buyButton.interactable = false;
}
}
public void SetDevice(DeviceComponent itemComponent)
{
_isCable = false;
deviceComponent = itemComponent;
nameObject.text = deviceComponent.deviceName;
priceObject.text = deviceComponent.price.ToString() + " $";
imageObject.sprite = deviceComponent.image;
firstEntrance.sprite = deviceComponent.firstEntrance;
if (deviceComponent.secondEntrance != null)
{
secondEntrance.sprite = deviceComponent.secondEntrance;
}
else
{
secondEntrance.gameObject.SetActive(false);
}
if (deviceComponent.IsAvailabe)
{
buyButton.interactable = false;
}
}
public void OnBuy()
{
if (_isCable)
{
if (_uiManager.money >= cableComponent.price)
{
if (cableComponent != null)
{
cableComponent.IsAvailabe = true;
}
buyButton.interactable = false;
_uiManager.UpdateMoney(cableComponent.price);
GameManager.UpdateCableAvailability(DeviceComponentHelper.ComponentName(cableComponent.componentType), true);
}
}
else
{
if (_uiManager.money >= deviceComponent.price)
{
if (deviceComponent != null)
{
deviceComponent.IsAvailabe = true;
}
buyButton.interactable = false;
_uiManager.UpdateMoney(deviceComponent.price);
GameManager.UpdateCableAvailability(DeviceComponentHelper.DeviceName(deviceComponent.deviceType), true);
}
}
}
}
<file_sep>/Assets/Scripts/Cables/CableManager.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
public class CableManager : MonoBehaviour
{
public CableComponent[] allCables;
public Dictionary<string, CableComponent> availableComponents;
void Start()
{
availableComponents = GameManager.GetAllCables();
SetAvailableCables();
}
private void SetAvailableCables()
{
for (int i = 0; i < allCables.Length; i++)
{
if (!availableComponents[DeviceComponentHelper.ComponentName(allCables[i].componentType)].IsAvailabe)
{
allCables[i].gameObject.SetActive(false);
}
}
}
void Update()
{
}
}
| b6b6420caa3d72232c663bdc60afa03afab408d2 | [
"C#"
] | 17 | C# | anacat/Electronics-Super-Shop | 0dcabb032c390f5a0d261dd07f74f992be39eb03 | ceefefb4aabff7336913906e166013d5f71b1d00 | |
refs/heads/master | <repo_name>esellors/foodoo-nodb-project<file_sep>/server/foodoo.js
require('dotenv').config();
const express = require('express');
const app = express();
const mmc = require('./controllers/MenuMealController');
const { SERVER_PORT } = process.env;
app.use(express.static(`${__dirname}/../build`));
app.use((req, res, next) => {
console.log('...API request received');
next();
});
app.use(express.json());
app.get("/api/menumeal", mmc.getItems);
app.post("/api/menumeal", mmc.addItem);
app.put("/api/menumeal/:section/:category/:item/:destinationSection", mmc.moveItems);
app.delete("/api/menumeal/:category/:item", mmc.deleteItem);
app.put("/api/meals/:mealOfDay", mmc.addMealToMeals)
app.delete("/api/meals/:mealOfDay", mmc.deleteMealFromMeals)
app.listen(SERVER_PORT, () => {
console.log(`Chilling to Lo-fi ${8080} fm`);
})<file_sep>/src/components/AddItem.js
import React from 'react';
function AddItem(props) {
function addItemHandler(e) {
e.preventDefault();
const tgtCategory = e.target.parentNode.id;
const newItemInputField = e.target.previousSibling;
if(newItemInputField.value === '') {
alert("Please enter an item");
} else {
props.addItemRelay(tgtCategory, newItemInputField.value);
newItemInputField.value = '';
newItemInputField.focus();
}
}
function cancelAddItem(e) {
e = e || window.event;
var target = e.target || e.srcElement;
setTimeout(() => {
target.value = '';
target.parentNode.classList.add("hide_element");
}, 350);
}
return (
<form className="add_to_menu_form hide_element" id={props.id} onBlur={e => cancelAddItem(e)}>
<input className="add_item" placeholder="enter item..."/>
<input type="submit" className="add_btn" onClick={e => addItemHandler(e)} hidden />
<span id="cancel_add_item">x</span>
</form>
);
}
export default AddItem;<file_sep>/src/components/Header.js
import React, {Component} from 'react';
class Header extends Component {
constructor(props) {
super(props);
this.state = {
headerTxt: 'foodoo'
}
}
render() {
return (
<header>
<h2>---------------<span id="site_title">{this.state.headerTxt}</span></h2>
<h4>your daily meal planner</h4>
</header>
)
}
}
export default Header;<file_sep>/src/components/RenderMenuItems.js
import React from 'react';
function RenderMenuItems(props) {
const {categoryName, categoryData} = props;
return (
<>
{ categoryData
? categoryData.map((item, index) => {
return (
<p className={categoryName} key={index} onClick={e => props.moveItem(e)}>{item}<span onClick={e => props.deleteMenuItem(e)}>x</span></p>
)})
: null
}
</>
);
}
export default RenderMenuItems;<file_sep>/src/components/Menu.js
import React from 'react';
import AddItem from './AddItem';
import RenderMenuItems from './RenderMenuItems';
function Menu(props) {
function toggleShowForm(e) {
const category = e.target.parentNode.firstChild.nodeValue;
const clickedForm = document.getElementById(category);
const targetInputForm = clickedForm.firstChild;
const menuInputForms = document.getElementsByClassName('add_item_form');
Array.from(menuInputForms).forEach(el => {
el.firstChild.value = '';
el.classList.add("hide_element")
})
clickedForm.classList.remove("hide_element");
targetInputForm.focus();
}
function addItemRelay(tgtCategory, newItem) {
const newItemPackage =
{
category: tgtCategory,
item: newItem
};
props.addItem(newItemPackage);
}
const { drinks, mains, sides, desserts } = props.menu;
return (
<section id="menu">
<h1>MENU</h1>
<h4 className="menu_categories">drinks<span onClick={e => toggleShowForm(e)}>+</span></h4>
<AddItem id="drinks" addItemRelay={addItemRelay} />
<RenderMenuItems categoryName="drinks" moveItem={props.moveItem} deleteMenuItem={props.deleteMenuItem} categoryData={drinks} />
<h4 className="menu_categories">mains<span onClick={e => toggleShowForm(e)}>+</span></h4>
<AddItem id="mains" addItemRelay={addItemRelay} />
<RenderMenuItems categoryName="mains" moveItem={props.moveItem} deleteMenuItem={props.deleteMenuItem} categoryData={mains} />
<h4 className="menu_categories">sides<span onClick={e => toggleShowForm(e)}>+</span></h4>
<AddItem id="sides" addItemRelay={addItemRelay} />
<RenderMenuItems categoryName="sides" moveItem={props.moveItem} deleteMenuItem={props.deleteMenuItem} categoryData={sides} />
<h4 className="menu_categories">desserts<span onClick={e => toggleShowForm(e)}>+</span></h4>
<AddItem id="desserts" addItemRelay={addItemRelay} />
<RenderMenuItems categoryName="desserts" moveItem={props.moveItem} deleteMenuItem={props.deleteMenuItem} categoryData={desserts} />
</section>
)
}
export default Menu;<file_sep>/src/components/Meal.js
import React from 'react';
import MealsSelect from './MealsSelect';
function Meal(props) {
const { drinks, mains, sides, desserts } = props.meal;
return (
<section id="meal">
<div id="meal_header">
<h1>MEAL</h1>
<MealsSelect updateMealsHandler={props.updateMealsHandler} />
</div>
{drinks
? drinks.map((item, index) => {
return (
<p className="drinks" key={index} onClick={e => props.moveItem(e)}>{item}<span>(-)</span></p>
)
})
: null
}
{mains
? mains.map((item, index) => {
return (
<p className="mains" key={index} onClick={e => props.moveItem(e)}>{item}<span>(-)</span></p>
)
})
: null
}
{sides
? sides.map((item, index) => {
return (
<p className="sides" key={index} onClick={e => props.moveItem(e)}>{item}<span>(-)</span></p>
)
})
: null
}
{desserts
? desserts.map((item, index) => {
return (
<p className="desserts" key={index} onClick={e => props.moveItem(e)}>{item}<span>(-)</span></p>
)
})
: null
}
</section>
)
}
export default Meal;<file_sep>/src/App.js
import React, { Component } from 'react';
import './App.css';
import axios from 'axios';
import Referrer from './components/Referrer/Referrer';
import Header from './components/Header';
import Menu from './components/Menu';
import Meal from './components/Meal';
import Meals from './components/Meals';
import Footer from './components/Footer';
class App extends Component {
constructor(props) {
super(props);
this.state = {
menu: {},
meal: {},
meals: {}
}
this.addItem = this.addItem.bind(this);
this.moveItem = this.moveItem.bind(this);
this.deleteMenuItem = this.deleteMenuItem.bind(this);
this.updateMealsHandler = this.updateMealsHandler.bind(this);
this.deleteMealOfDayHandler = this.deleteMealOfDayHandler.bind(this);
}
componentDidMount() {
axios
.get("/api/menumeal")
.then(res => {
this.setState({
menu: res.data.menu,
meal: res.data.meal,
});
})
.catch(error => console.log(error))
}
addItem(newItemPackage) {
axios
.post("/api/menumeal/", newItemPackage)
.then(res => {
this.setState({
menu: res.data.menu,
meal: res.data.meal,
});
})
.catch(error => console.log(error))
}
moveItem(e) {
const section = e.target.parentNode.id;
const category = e.target.className;
const item = e.target.firstChild.data;
const destinationSection = section === "menu" ? "meal" : "menu";
axios
.put(`/api/menumeal/${section}/${category}/${item}/${destinationSection}`)
.then(res => {
this.setState({
menu: res.data.menu,
meal: res.data.meal
});
})
.catch(error => console.log(error))
}
deleteMenuItem(e) {
e.stopPropagation();
const category = e.target.parentNode.className;
const item = e.target.parentNode.firstChild.data;
axios
.delete(`/api/menumeal/${category}/${item}`)
.then(res => {
this.setState({
menu: res.data.menu,
});
})
.catch(error => console.log(error))
}
updateMealsHandler(resFromMealsSelectJs) {
const meal = resFromMealsSelectJs.data.meal;
const meals = resFromMealsSelectJs.data.meals;
this.setState({
meal: meal,
meals: meals
})
}
deleteMealOfDayHandler(responseFromMealsJs) {
this.setState({
meals: responseFromMealsJs.data
})
}
render() {
return (
<>
{ document.referrer === 'https://www.esellors.com/' ? <Referrer /> : null }
<div className="wrapper">
<Header />
<main>
<Meals meals={this.state.meals} deleteMealOfDayHandler={this.deleteMealOfDayHandler} />
<div id="menu_meal_selection">
<Menu menu={this.state.menu} addItem={this.addItem} moveItem={this.moveItem} deleteMenuItem={this.deleteMenuItem} />
<Meal meal={this.state.meal} moveItem={this.moveItem} updateMealsHandler={this.updateMealsHandler} />
</div>
</main>
<Footer />
</div>
</>
);
}
}
export default App;
<file_sep>/src/components/RenderMeals.js
import React from 'react';
function RenderMeals(props) {
const {mealName} = props;
return (
<>
{ mealName
? mealName.map((item, index) => {
return <li key={`${mealName}-${index}`}>{item}</li>
})
: null
}
</>
);
}
export default RenderMeals;<file_sep>/server/controllers/MenuMealController.js
let menuMeal = {
menu: { drinks: ['orange juice'], mains: ['eggs'], sides: ['hash browns'], desserts: ['cake'] },
meal: { drinks: [], mains: [], sides: [], desserts: [] },
meals: { breakfast: [], lunch: [], dinner: [] }
};
const addMealToMeals = (req, res) => {
const { mealOfDay } = req.params;
let mealOfDayDrinks = menuMeal.meal.drinks.splice(0);
let mealOfDayMains = menuMeal.meal.mains.splice(0);
let mealOfDaySides = menuMeal.meal.sides.splice(0);
let mealOfDayDesserts = menuMeal.meal.desserts.splice(0);
menuMeal.meals[mealOfDay] = [...mealOfDayDrinks, ...mealOfDayMains, ...mealOfDaySides, ...mealOfDayDesserts];
res.json(menuMeal);
}
const deleteMealFromMeals = (req, res) => {
const { mealOfDay } = req.params;
menuMeal.meals[mealOfDay] = [];
res.json(menuMeal.meals);
}
const getItems = (req, res) => {
res.json(menuMeal);
};
const addItem = (req, res) => {
const { category, item } = req.body;
menuMeal.menu[category].unshift(item);
res.json(menuMeal);
}
const moveItems = (req, res) => {
const { section, category, item, destinationSection } = req.params;
let index = menuMeal[section][category].findIndex(arrayItem => arrayItem === item);
let tgtItem = menuMeal[section][category].splice(index, 1).toString();
menuMeal[destinationSection][category].unshift(tgtItem);
res.json(menuMeal);
}
const deleteItem = (req, res) => {
const { category, item } = req.params;
console.log(req.params);
let index = menuMeal.menu[category].findIndex(arrayItem => arrayItem === item);
menuMeal.menu[category].splice([index], 1);
res.json(menuMeal);
};
module.exports = {
getItems,
addItem,
moveItems,
deleteItem,
addMealToMeals,
deleteMealFromMeals
}; | dbdc5802e328fa66d0fd9a1e586840ab0f1e2fa1 | [
"JavaScript"
] | 9 | JavaScript | esellors/foodoo-nodb-project | 237f29bcc35238e81a65897aebf5823a8fc6aa5b | 41e04c5ffccd2b5dc63bc8fb5cf2e7550bbd82ec | |
refs/heads/master | <repo_name>gpdrosa/Noticia<file_sep>/Noticia-master/src/br/edu/cesmac/si/noticia/testes/TestaNoticia.java
package br.edu.cesmac.si.noticia.testes;
import br.edu.cesmac.si.noticia.model.Jornalista;
import br.edu.cesmac.si.noticia.model.JornalistaTrainee;
import br.edu.cesmac.si.noticia.model.Noticia;
public class TestaNoticia {
public static void main(String[] args) {
Jornalista j1 = new Jornalista();
j1.setNome("<NAME>");
Noticia n1 = new Noticia();
n1.setJornalista(j1);
JornalistaTrainee j2 = new JornalistaTrainee();
j2.setNome("<NAME>");
Noticia n2 = new Noticia();
n2.setJornalista(j2);
}
}
<file_sep>/Noticia-master/src/br/edu/cesmac/si/noticia/testes/TestaJornalistaComNoticia.java
package br.edu.cesmac.si.noticia.testes;
import java.util.ArrayList;
import br.edu.cesmac.si.noticia.model.Jornalista;
import br.edu.cesmac.si.noticia.model.Noticia;
public class TestaJornalistaComNoticia {
public static void main(String[] args) {
Jornalista jornalista1 = new Jornalista();
jornalista1.setNome("<NAME>");
jornalista1.setEmail("<EMAIL>");
ArrayList<Noticia> noticiasJ1 = new ArrayList<>();
Noticia n1 = new Noticia();
n1.setTitulo("Provocando o caos e a destruição");
Noticia n2 = new Noticia();
n2.setTitulo("Guerra Mundial Z");
noticiasJ1.add(n1);
noticiasJ1.add(n2);
jornalista1.setNoticias(noticiasJ1);
jornalista1.mostrarDados();
ArrayList<Noticia> noticiasJ2 = new ArrayList<>();
Noticia n3 = new Noticia();
n3.setTitulo("Glória a <NAME>");
noticiasJ2.add(n3);
Jornalista jornalista2 = new Jornalista();
jornalista2.setNome("<NAME>");
jornalista2.setEmail("<EMAIL>");
jornalista2.setNoticias(noticiasJ2);
jornalista2.mostrarDados();
}
}
<file_sep>/Noticia-master/src/br/edu/cesmac/si/noticia/model/Editor.java
package br.edu.cesmac.si.noticia.model;
public class Editor extends Jornalista implements Avaliador {
@Override
public void revisar(Noticia noticia) {
System.out.println("Um Editor revisou a notícia");
}
}
<file_sep>/Noticia-master/src/br/edu/cesmac/si/noticia/testes/TestaConstrutor.java
package br.edu.cesmac.si.noticia.testes;
import br.edu.cesmac.si.noticia.model.Editoria;
import br.edu.cesmac.si.noticia.model.Jornalista;
import br.edu.cesmac.si.noticia.model.Noticia;
public class TestaConstrutor {
public static void main(String[] args) {
Jornalista j1 = null;
Editoria e1 = null;
Noticia n1 = null;
n1 = new Noticia(j1, e1);
n1.mostrarDados();
}
}
<file_sep>/Noticia-master/src/br/edu/cesmac/si/noticia/model/Avaliador.java
package br.edu.cesmac.si.noticia.model;
public interface Avaliador {
void revisar(Noticia noticia);
}
<file_sep>/Noticia-master/src/br/edu/cesmac/si/noticia/testes/TestaEditoriaDAO.java
package br.edu.cesmac.si.noticia.testes;
import java.util.List;
import br.edu.cesmac.si.noticia.dao.EditoriaDAO;
import br.edu.cesmac.si.noticia.model.Editoria;
public class TestaEditoriaDAO {
public static void main(String[] args) {
EditoriaDAO editoriaDAO = new EditoriaDAO();
List<Editoria> editorias = editoriaDAO.listarTodas();
for (Editoria e : editorias) {
System.out.println(e.getId() + " - " + e.getNome());
}
editoriaDAO.excluirPorId(2l);
editorias = editoriaDAO.listarTodas();
for (Editoria e : editorias) {
System.out.println(e.getId() + " - " + e.getNome());
}
}
}
<file_sep>/Noticia-master/src/br/edu/cesmac/si/noticia/model/PlanoDeSaude.java
package br.edu.cesmac.si.noticia.model;
public class PlanoDeSaude {
private String nome;
public PlanoDeSaude() {
}
public PlanoDeSaude(String nome) {
this.nome = nome;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
}
| d0921ea430ebf9245e4952b8a23fa4d54904f49c | [
"Java"
] | 7 | Java | gpdrosa/Noticia | 6cec36a9b04a402fded8ec2149b73a804352ff9a | 65a32be61be346e504e6cc01d7bef24588fb0255 | |
refs/heads/master | <file_sep>package com.pyg.springsecurity.service.impl;
import com.pyg.springsecurity.mapper.SysMenuMapper;
import com.pyg.springsecurity.service.ISysMenuService;
import com.pyg.springsecurity.entity.SysMenu;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.springframework.stereotype.Service;
/**
* <p>
* 服务实现类
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
@Service
public class SysMenuServiceImpl extends ServiceImpl<SysMenuMapper, SysMenu> implements ISysMenuService {
}
<file_sep>package com.pyg.springsecurity.mapper;
import com.pyg.springsecurity.entity.SysUserRole;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
public interface SysUserRoleMapper extends BaseMapper<SysUserRole> {
SysUserRole selectByUserId(Integer userId);
}
<file_sep>package com.pyg.springsecurity.service;
import com.pyg.springsecurity.entity.SysUser;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* <p>
* 系统用户 服务类
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
public interface ISysUserService extends IService<SysUser> {
}
<file_sep>package com.pyg.springsecurty;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
/**
* @ClassName test
* @Description TODO
* @Date 2020/1/3 下午 06:25
* @Version 1.0
**/
public class test {
public static void main(String[] args) {
String encode = new BCryptPasswordEncoder().encode("123");
System.out.println(encode);
}
}
<file_sep>package com.pyg.springsecurity.mapper;
import com.pyg.springsecurity.entity.SysMenu;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
public interface SysMenuMapper extends BaseMapper<SysMenu> {
}
<file_sep>package com.pyg.springsecurity.service.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.pyg.springsecurity.entity.SysRoleMenu;
import com.pyg.springsecurity.entity.SysUser;
import com.pyg.springsecurity.entity.SysUserRole;
import com.pyg.springsecurity.mapper.SysRoleMenuMapper;
import com.pyg.springsecurity.mapper.SysUserMapper;
import com.pyg.springsecurity.mapper.SysUserRoleMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
@Service
public class UserDetailsServiceImpl implements UserDetailsService{
@Resource
SysUserMapper userMapper;
@Resource
SysUserRoleMapper userRoleMapper;
@Resource
SysRoleMenuMapper roleMenuMapper;
Logger logger = LoggerFactory.getLogger(this.getClass());
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
logger.debug("===============用户名:"+username);
SysUser user = userMapper.selectOne(new QueryWrapper<SysUser>().lambda().eq(SysUser::getUsername,username));
if(user == null){
throw new UsernameNotFoundException("用户[" + username + "]不存在");
}
//获取角色
SysUserRole userRole = userRoleMapper.selectByUserId(user.getId());
//获取菜单权限
List<String> menuCodeList = new ArrayList<String>();
if(userRole != null){
List<SysRoleMenu> roleMenuList = roleMenuMapper.selectByRoleId(userRole.getRoleId());
menuCodeList = roleMenuList.stream().map(roleMenu -> roleMenu.getMenuCode()).collect(Collectors.toList());
}else{
menuCodeList.add("none_menu");
}
return new User(username, user.getPassword(), user.getEnabled(), true, true, user.getAccountNonLocked(), AuthorityUtils.createAuthorityList(menuCodeList.toArray(new String[menuCodeList.size()])));
}
}
<file_sep>package com.pyg.springsecurity.mapper;
import com.pyg.springsecurity.entity.SysRole;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* Mapper 接口
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
public interface SysRoleMapper extends BaseMapper<SysRole> {
}
<file_sep>package com.pyg.springsecurity.service.impl;
import com.pyg.springsecurity.entity.SysUser;
import com.pyg.springsecurity.service.ISysUserService;
import com.pyg.springsecurity.mapper.SysUserMapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
/**
* <p>
* 系统用户 服务实现类
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
@Service
@Slf4j
public class SysUserServiceImpl extends ServiceImpl<SysUserMapper, SysUser> implements ISysUserService {
}
<file_sep>package com.pyg.springsecurity.mapper;
import com.pyg.springsecurity.entity.SysRoleMenu;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
public interface SysRoleMenuMapper extends BaseMapper<SysRoleMenu> {
List<SysRoleMenu> selectByRoleId(Integer roleId);
}
<file_sep>package com.pyg.springsecurity.mapper;
import com.pyg.springsecurity.entity.SysUser;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 系统用户 Mapper 接口
* </p>
*
* @author zouYunHao
* @since 2020-01-02
*/
public interface SysUserMapper extends BaseMapper<SysUser> {
SysUser selectByUsername(String username);
}
| 15b06d1b8ee6c446db7064be01c367566fc0b20e | [
"Java"
] | 10 | Java | AthylAlcohol/spring-securty | cfc49ac37f8b353b791a716d48c59fbccdb04113 | 4b67f6fa0c343d8bd295acb1cbfcd53664066d3c | |
refs/heads/main | <file_sep>var popup = d3.select(".g-popup-box");
d3.select(".g-popup-box-x").on("click", function(){
popup.classed("g-active", false);
})
if (!localStorage.iswimloggedin) {
localStorage.iswimloggedin = false
}
if (localStorage.iswimloggedin == "true") {
d3.select(".g-login").attr("data-state", "loggedin")
d3.select(".g-login-inner").text("Hi, <NAME>")
} else {
}
d3.select(".g-login").on("click", function(){
console.log("hi")
if (localStorage.iswimloggedin == "false") {
location.href = 'login.html';
} else {
location.href = 'client.html';
}
})
d3.selectAll(".g-button").on("click", function(){
var el = d3.select(this);
var action = el.attr("data-id");
console.log(localStorage.iswimloggedin)
if (action == "login") {
console.log("login")
var u = d3.select("input#name").property("value")
var p = d3.select("input#pw").property("value")
if (u == "sunlai" && p == "12345678") {
localStorage.iswimloggedin = true;
location.href = 'document.html';
} else {
d3.select(".g-bad-login").style("opacity", 0).transition().duration(100).style("opacity", 1)
}
} else if (action == "logout") {
localStorage.iswimloggedin = false;
location.href = "index.html"
} else if (localStorage.iswimloggedin == "false") {
console.log("hi")
location.href = "login.html"
} else {
// if (action == "request") {
popup.classed("g-active", true);
var popupinner = d3.select(".g-popup-inner-cont").html("");
console.log(action)
if (action == "data") {
popupinner.append("div.g-bold").html("Data request processed <div class='g-check'>✅</div>")
popupinner.append("div.g-text").html("An email has been sent to you with the data attached.")
} else if (action == "copy") {
popupinner.append("div.g-bold").html("Copy request processed <div class='g-check'>✅</div>")
popupinner.append("div.g-text").html("An email has been sent to you with a copy of the document attached.")
} else {
popupinner.append("div.g-bold").html("Original request processed <div class='g-check'>✅</div>")
popupinner.append("div.g-text").text("A copy of the original document will be delivered to you within three working days. An email has been sent to you with the relevant information.")
}
// }
}
})
function sendMail() {
Email.send({
SecureToken : "<PASSWORD>",
To : '<EMAIL>',
From : "<EMAIL>",
Subject : "Data request for 154170-RAI XXXX XXXX-20201210-LETTER-FROM LD",
Body : "This is the data for 154170-RAI XXXX XXXX-20201210-LETTER-FROM LD"
}).then(
message => alert(message)
);
} | ddaaf702a984962324911d67251b51ae2bc0d591 | [
"JavaScript"
] | 1 | JavaScript | iswim-hk/iswim-hk.github.io | 6b937a7756874e3b7ccfd26edee3c91e0eba83ea | 90fe0d87ecb2158669a9810351615d40bcc28599 | |
refs/heads/main | <file_sep>const staticDevRecipes = "pwa-recipes";
const assets = [
"/",
"/index.html",
"/recipe.html?id=1",
"/recipe.html?id=2",
"/recipe.html?id=3",
"/recipe.html?id=4",
"/css/main.css",
"/css/recipe.css",
"/css/style.css",
"/scripts/recipe.js",
"/scripts/main.js",
"/images/background.png",
"/images/burn.svg",
"/images/chef.svg",
"/images/clock.svg",
"/images/cook.png",
"/images/dish.svg",
"/images/heart.svg",
"/images/menu.svg",
"/images/recipe_02.webp",
"/images/recipe_03.webp",
"/images/recipe_04.webp",
"/images/recipe_05.webp",
];
self.addEventListener("install", (installEvent) => {
installEvent.waitUntil(
caches.open(staticDevRecipes).then((cache) => {
cache.addAll(assets);
})
);
});
self.addEventListener("fetch", (fetchEvent) => {
fetchEvent.respondWith(
caches.match(fetchEvent.request).then((res) => {
return res || fetch(fetchEvent.request);
})
);
});
<file_sep>if ("serviceWorker" in navigator) {
window.addEventListener("load", function () {
navigator.serviceWorker
.register("/serviceWorker.js")
.then((res) => console.log("Service worker registered"))
.catch((err) => console.log("Service worker not registered", err));
});
}
window.onload = init;
function init() {
//извлечь id рецепта из URL
let recipeId = window.location.search.substring(1).split("=")[1];
//найти нужный рецепт
let recipe = getRecipeById(recipeId);
//вставить сведения о рецепте на страницу
bindRecipe(recipe);
}
function getRecipeById(id) {
//В качестве примера вручную добавлены несколько рецептов.
//На практике сведения о рецептах должны извлекаться из базы данных
const recipe1 = {
id: 1,
name: "Курица с печеными овощами",
tags: ["Популярные", "Для духовки", "Из курицы"],
imageUrl: "./images/recipe_04.webp",
stats: { values: [50, 150, 4], labels: ["минут", "ккал", "порции"] },
ingredients: [
"Куриные ножки, 400 г",
"Картофель, 300 г",
"Морковь, 1 шт",
"Лук репчатый, 1 шт",
"Брокколи, 1 шт",
"Масло растительное, 2 ст.л.",
"Приправы, по вкусу",
],
cooking: [
`Очистите овощи, нарежьте картофель средними кусочками, морковь - кружочками, лук - полукольцами. Не нужно
слишком измельчать овощи, они должны сохранить форму и сочность при запекании.`,
`Курицу разрежьте на порционные куски, выложите на противень. Овощи разложите равномерно вокруг курицы.
Добавьте специи по вкусу, полейте растительным маслом (1-2 ст.л.). Поставьте курицу с овощами в
духовку на 40-45 минут при температуре 180 градусов. В процессе приготовления пару раз полейте курицу и
овощи выделившимся соком.`,
],
};
const recipe2 = {
id: 2,
name: "Тыквенный суп-пюре",
tags: ["Популярные", "Супы", "Вегетерианское"],
imageUrl: "./images/recipe_02.webp",
stats: { values: [20, 70, 4], labels: ["минут", "ккал", "порции"] },
ingredients: [
"Тыква, 400 г",
"Лук репчатый, 1 шт",
"Чеснок, 1 зубчик",
"Сливки 10%, 100 мл",
"Масло растительное, 2 ст.л.",
"Соль, по вкусу",
],
cooking: [
`Разогрейте в кастрюле растительное масло. Обжарьте измельченные лук и чеснок до мягкости.`,
`Тыкву мелко порежьте, добавьте к луку, обжаривайте 2–3 минуты. Добавьте воду, посолите.
Варить на среднем огне 10 минут до мягкости тыквы. Измельчите суп в блендере до однородного состояния,
влейте сливки. Снова поставьте на огонь и доведите до кипения.`,
`Разлейте по тарелкам, украсьте тыквенными семечками.`,
],
};
const recipe3 = {
id: 3,
name: "Рисовые макароны с креветками",
tags: ["Популярные", "Морепродукты"],
imageUrl: "./images/recipe_03.webp",
stats: { values: [30, 100, 5], labels: ["минут", "ккал", "порций"] },
ingredients: [
"Рисовая лапша, 250 г",
"Креветки, 750 г",
"Морковь, 1 шт",
"Лук репчатый, 1 шт",
"Баклажаны, 300 г",
"Болгарский перец, 200 г",
"Масло растительное, 8 ст.л.",
"Приправы, по вкусу",
],
cooking: [
`Довести воду до кипения и посолить. Рисовую лапшу варить в течение 3-4 минут – согласно инструкции на упаковке.`,
`Подготовить овощной набор – очистить морковь, лук, болгарский перец. Все овощи нарезать небольшими по размеру кусочками.`,
`Нарезать баклажан небольшими полосками, почистить креветки.`,
`Прогреть сковороду, влить растительное масло и выложить все овощи. Жарить 3-4 минуты. После этого добавить креветки. Жарить на слабом огне еще 4-5 минут.`,
`Переложить в сковороду к овощам и креветкам рисовую лапшу. Добавить соевый соус, специи по вкусу.`,
`Перемешать все компоненты и выложить лапшу в тарелку. Все, подавать к столу.`,
],
};
const recipe4 = {
id: 4,
name: "<NAME>",
tags: ["Популярные", "Супы"],
imageUrl: "./images/recipe_05.webp",
stats: { values: [90, 120, 6], labels: ["минут", "ккал", "порций"] },
ingredients: [
"Говядина, 1 кг",
"Рис, 120 г",
"Томатная паста, 2 ст.л.",
"Лук репчатый, 3 шт",
"Мука, 1 ст.л.",
"Грецкий орех, 50 г",
"Чеснок, 5 зубчиков",
"Гранатовый сок без сахара, 125 мл",
"Масло растительное, 2 ст.л.",
"Лавровый лист, 1 шт",
"Корень петрушки, 1 шт",
"Базилик, 4 шт",
"Приправы, по вкусу",
],
cooking: [
`Говядину нарежьте маленькими кусочками, положите в кастрюлю, залейте 8-10 стаканами холодной воды и поставьте
мясо вариться до полуготовности (около 40 минут). При закипании снимите пену.`,
`Лук очистите и нарежьте кубиками. Выложите лук в бульон.
Рис промойте и добавьте в кастрюлю с супом. Варите 20 минут.`,
`Через 20 минут добавьте половину зелени, соль и перец.`,
`Грецкие орехи измельчите в блендере и всыпьте в суп. Готовьте 5 минут. Добавьте в харчо хмели-сунели и гранатовый сок. Варите на слабом огне 5 минут.`,
`Кастрюлю с харчо снимите с огня. Добавьте смесь чеснока, зелени и острого перца. Кастрюлю закройте и укутайте большим полотенцем. Оставьте на 10 минут, затем разлейте по тарелкам.`,
],
};
const recipes = [];
recipes.push(recipe1);
recipes.push(recipe2);
recipes.push(recipe3);
recipes.push(recipe4);
//найти и вернуть рецепт с требуемым id
return recipes.find((element) => {
return element.id == id;
});
}
function bindRecipe(recipe) {
bindName(recipe.name);
bindTags(recipe.tags);
bindImage(recipe.imageUrl, recipe.name);
bindStats(recipe.stats);
bindIngredients(recipe.ingredients);
bindCookingInfo(recipe.cooking);
}
function bindName(name) {
document.title = name;
let header1 = document.getElementsByClassName("recipe-title")[0];
header1.innerHTML = name;
}
function bindTags(tags) {
//Найти на странице div с классом "tags"
let tagsDiv = document.getElementsByClassName("tags")[0];
for (let i = 0; i < tags.length; i++) {
//Программно создать HTML-элемент <p>
let taskP = document.createElement("p");
//Вставить содержимое элемента <p>
taskP.innerHTML = tags[i];
//Поместить элемент <p> внутрь div-контейнера
tagsDiv.appendChild(taskP);
}
}
function bindImage(imageUrl, name) {
//Найти на странице img с классом "recipe-img-big"
let recipeImg = document.getElementsByClassName("recipe-img-big")[0];
//Программно задать элементу img атрибуты src и alt
recipeImg.src = imageUrl;
recipeImg.alt = name;
}
function bindStats(stats) {
let statsDiv = document.getElementsByClassName("recipe-info")[0];
const statIcons = [
"./images/clock.svg",
"./images/burn.svg",
"./images/dish.svg",
];
const statAlts = ["Время приготовления", "Калории", "Количество порций"];
for (let i = 0; i < stats.values.length; i++) {
let statDiv = document.createElement("div");
statDiv.setAttribute("class", "recipe-stat");
let statIcon = document.createElement("img");
statIcon.src = statIcons[i];
statIcon.alt = statAlts[i];
let statP = document.createElement("p");
statP.innerHTML = `<span>${stats.values[i]}</span> ${stats.labels[i]}`;
statDiv.appendChild(statIcon);
statDiv.appendChild(statP);
statsDiv.appendChild(statDiv);
}
}
function bindIngredients(ingredients) {
let ingrList = document.getElementsByClassName("ingredient-list")[0];
for (let i = 0; i < ingredients.length; i++) {
let ingrLi = document.createElement("li");
ingrLi.innerHTML = ingredients[i];
ingrList.appendChild(ingrLi);
}
}
function bindCookingInfo(cooking) {
let cookingSection = document.getElementsByClassName("cooking")[0];
for (let i = 0; i < cooking.length; i++) {
let cookP = document.createElement("p");
cookP.innerHTML = cooking[i];
cookingSection.appendChild(cookP);
}
}
| fd65fd79d10ff052ab42e9327266772d1aadf25a | [
"JavaScript"
] | 2 | JavaScript | AyurM/PWA-Example | 04fa9a284a639ac0d1774aa97bf3ca8cf36990d0 | c96603b0703361e1c6d1774955ea0d4e22831521 | |
refs/heads/master | <file_sep>import useSound from 'react-guitar-sound'
import E2 from 'react-guitar-sound/resources/E2.ogg'
import D3 from 'react-guitar-sound/resources/D3.ogg'
import G3 from 'react-guitar-sound/resources/G3.ogg'
import E4 from 'react-guitar-sound/resources/E4.ogg'
import useKey from './key'
const samples = { E2, D3, G3, E4 }
export default function useClassicSound(
strings: number[],
tuning: number[],
muted?: boolean
) {
const { play, strum, ...rest } = useSound(samples, strings, tuning, muted)
useKey(
() => true,
e => {
const string = parseInt(e.key) - 1
string >= 0 && string < tuning.length && play(string)
},
[tuning, play]
)
useKey('w', () => strum(true), [strum])
useKey('s', () => strum(), [strum])
return { play, strum, ...rest }
}
<file_sep>import { Styles, Theme } from 'react-select'
export const className = 'font-semibold text-gray-700 hover:shadow rounded'
export const theme = (theme: Theme) => ({
...theme,
borderRadius: 4,
colors: {
...theme.colors,
neutral20: '#e2e8f0',
neutral40: 'rgba(203, 213, 224)'
}
})
export const styles: Partial<Styles> = {
singleValue: provided => ({ ...provided, color: 'inherit' }),
menu: provided => ({ ...provided, width: '18em', zIndex: 3 }),
control: (provided, state) => ({
...provided,
borderColor: state.theme.colors.neutral20,
borderWidth: '2px',
'&:hover': {
borderColor: state.theme.colors.neutral40
},
height: '2.5rem',
boxShadow: state.isFocused && `0 0 0 3px rgba(66, 153, 225, 0.5)`
})
}
<file_sep>import { SamplerOptions } from 'tone'
import { useEffect, useState, useCallback } from 'react'
import makePlayer, { Player } from './util/player'
export default function useSound(
samples: SamplerOptions['urls'],
fretting: number[],
tuning: number[],
muted?: boolean
) {
const [player, setPlayer] = useState<Player>()
const [playing, setPlaying] = useState(tuning.map(() => false))
useEffect(() => {
const promise = makePlayer(samples, tuning, setPlaying)
promise.then(setPlayer)
return () => {
setPlayer(undefined)
promise.then(player => {
player.dispose()
})
}
}, [samples, tuning])
const play = useCallback(
(string: number, when: number = 0) => {
if (!muted) player?.play(string, fretting[string] ?? 0, when)
},
[muted, player, fretting]
)
const strum = useCallback(
(up?: boolean) =>
tuning.forEach((_, i) => play(!up ? tuning.length - i - 1 : i, 0.05 * i)),
[tuning, play]
)
return { play, strum, playing }
}
<file_sep>import { Frequency, Sampler, SamplerOptions, immediate } from 'tone'
export type Player = {
play: (string: number, fret: number, when: number) => Promise<void>
dispose: () => void
}
const toMidi = (note: string | number) => {
if (typeof note === 'string') return Frequency(note).toMidi()
return note
}
const closest = (midi: number, samples: SamplerOptions['urls']) => {
let min = Object.keys(samples)[0]
Object.keys(samples).forEach(key => {
if (Math.abs(midi - toMidi(key)) < Math.abs(midi - toMidi(min))) {
min = key
}
})
return { [min]: samples[min] }
}
export default async (
samples: SamplerOptions['urls'],
tuning: number[],
onChange: (playing: boolean[]) => void
): Promise<Player> => {
const synths = await Promise.all(
tuning.map(
midi =>
new Promise<Sampler>(resolve => {
const synth: Sampler = new Sampler(closest(midi, samples), () =>
resolve(synth)
).toDestination()
})
)
)
const resolvers: Partial<{ [K: number]: (change?: boolean) => void }> = {}
const playing = tuning.map(() => false)
const setPlaying = (string: number, value: boolean) => {
if (playing[string] !== value) {
playing[string] = value
setTimeout(() => onChange([...playing]), 0)
}
}
return {
play: (string, fret, when = 0) =>
new Promise(resolve => {
resolvers[string]?.(when === 0)
if (fret < 0) return resolve()
const startTimeout = setTimeout(
() => setPlaying(string, true),
when * 1000
)
const endTimeout = setTimeout(
(resolvers[string] = change => {
delete resolvers[string]
clearTimeout(startTimeout)
clearTimeout(endTimeout)
resolve()
if (!change) setPlaying(string, false)
}),
3000 + when * 1000
)
synths[string].triggerAttackRelease(
Frequency(tuning[string] + fret, 'midi').toFrequency(),
4,
immediate() + when
)
}),
dispose: () => synths.map(synth => synth.dispose())
}
}
<file_sep># React-Guitar · [](https://www.npmjs.com/package/react-guitar)
A beautiful and accessible guitar component for React.
See https://react-guitar.com for a live demo.

[](https://codesandbox.io/s/interesting-breeze-ll7zh)
## Usage
```
npm i react-guitar
```
```jsx
import React from 'react'
import { render } from 'react-dom'
import Guitar from 'react-guitar'
render(
<Guitar strings={[0, 1, 2, 2, 0, -1]} />,
document.getElementById('root')
)
```
Check out [the storybook](https://react-guitar.com/storybook) for more advanced examples.
### Props
| Name | Description |
| -------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `id` | An optional id in case several guitars are on the screen at the same time. This is used to generate the radio button names used internally which must be unique. If not specified an autoincremented id will be generated. |
| `className` | A CSS class string to apply to the container element. |
| `strings` | An array where each number represents the fret the string is pressed on (`0` means open string and `-1` muted). `[0, 1, 2, 2, 0, -1]` is an `A minor` in a standard guitar and `[3, 0, 0, 0]` is a `C major` in a ukelele. |
| `frets` | An object with the shape `{ from: number amount: number }` to configure the frets of the guitar (`{ from: 0, amount: 22 }` by default). It can start on any fret which is useful for displaying just a chord instead of the whole guitar. |
| `lefty` | A boolean to configure the guitar for left handed people like me. |
| `center` | A boolean to indicate if the current fretting should be centered. If set to `true` the guitar horizontal scroll will be set so that the middle fret is centered. |
| `renderFinger` | A function `(string: number, fret: number) => JSX.Element` that will be used to render the content of the white bubble used for the fingers. This can be used to render the note name. |
| `theme` | A theme object to customise the guitar look and feel. See [Theming](#theming). |
| `playOnHover` | A boolean to indicate if hovering with the mouse should trigger play events. |
| `onChange` | A function `(strings: number[]) => void` that will be called when a string is press/unpressed. If not present the guitar will be read only. |
| `onPlay` | A function `(string: number) => void` that will be called each time the user plays a string (hovering with the mouse). This can be used to play the sound of the string. |
### Hooks
#### useSound
In order to enable sound playing `react-guitar` offers the [useSound](packages/react-guitar-sound) hook:
```
npm i react-guitar react-guitar-sound react-guitar-tunings
```
```jsx
import React, { useMemo } from 'react'
import { render } from 'react-dom'
import Guitar from 'react-guitar'
import { standard } from 'react-guitar-tunings'
import useSound from 'react-guitar-sound'
import E2 from 'react-guitar-sound/resources/E2.ogg'
import D3 from 'react-guitar-sound/resources/D3.ogg'
import G3 from 'react-guitar-sound/resources/G3.ogg'
import E4 from 'react-guitar-sound/resources/E4.ogg'
const samples = { E2, D3, G3, E4 }
function SampleGuitarWithSound() {
const strings = useMemo(() => [0, 1, 2, 2, 0, -1], [])
const { play, strum } = useSound(samples, strings, standard)
return <Guitar strings={strings} onPlay={play} />
}
render(<SampleGuitarWithSound />, document.getElementById('root'))
```
It receives:
| Name | Description |
| -------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `samples` | A map from note names to audio files representing the samples. `react-guitar` offers 4 samples out of the box recorded from a Spanish guitar. |
| `strings` | The same value passed as the `strings` prop to the `<Guitar />` component with the current fretting. |
| `tuning` | An array of midi values for each string. `react-guitar` offers 4 tunings out of the box (`standard`, `ukelele`, `rondeña` and `dadgad`). |
And will return an object containing:
| Name | Description |
| ------- | --------------------------------------------------------------------- |
| `play` | A function that receives a string number and plays its current sound. |
| `strum` | A function that will strum all the strings of the guitar. |
### Theming
`react-guitar` look and feel can be customised by passing the `theme` prop. A theme is an object describing the customisable properties of the guitar:
```tsx
{
description: string // for screen readers
color: string
nut: { color: string }
fret: {
color: string
separator: {
color: string
radius?: boolean
shadow?: boolean
width?: 'sm' | 'md'
}
marker?: (fret: number) => JSX.Element | null
counter: { color: string }
}
string: { color: (string: number) => string }
finger: { text: { color: string }; color: string }
}
```
See https://react-guitar.com/storybook/?path=/story/guitar--theming for an interactive example.
By default the guitar is styled as a Spanish guitar but some other themes are available:
- [react-guitar-theme-dark](packages/react-guitar-theme-dark): A dark theme.
- [react-guitar-theme-coco](packages/react-guitar-theme-coco): A theme for the guitar from the [Coco](<https://en.wikipedia.org/wiki/Coco_(2017_film)>) film.
## Developing
- `yarn build` will build the component and the site, this is mandatory the first time you clone the repo.
- `yarn start` will spin up the storybook and the site and all the packages in watch mode.
| 1db4dbc25cd143b8b174ac84719739b19a24d876 | [
"Markdown",
"TypeScript"
] | 5 | TypeScript | marcoslimacom/react-guitar | 99d92534e2332ce7b21689d9636482f16547d5ef | adad647ac0e6612f859fa3d9c1f5f2174ecfd95f | |
refs/heads/master | <repo_name>vishalmodem/ExampleProject<file_sep>/ExampleProject/Controllers/ViewController.swift
//
// ViewController.swift
// ExampleProject
//
// Created by <NAME> on 6/19/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
import CoreData
import RequestManagerFramework
import CustomButtonFramework
class ViewController: UIViewController {
var actors : [Actor] = [Actor]()
var hollyActors = [HollyActors]()
let context = (UIApplication.shared.delegate as! AppDelegate).persistentContainer.viewContext
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
loadData()
}
@IBAction func Make(_ sender: UIButton) {
RequestManager.shared.getActorsData({ (actors) in
self.actors = actors
if self.hollyActors.count < 6 {
self.hollyActors.forEach({ (hA) in
self.context.delete(hA)
self.saveData()
})
self.loadData()
let operationQueue = OperationQueue()
operationQueue.maxConcurrentOperationCount = 1
for actor in actors{
let operation = {
let hA = HollyActors(context: self.context)
hA.image = actor.image
hA.name = actor.name
self.hollyActors.append(hA)
self.saveData()
}
operationQueue.addOperation(operation)
}
}
})
DispatchQueue.main.async {
self.performSegue(withIdentifier: "segue", sender: self)
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "segue" {
if let destinationVC = segue.destination as? ActorsTVC {
guard let s = sender as? ViewController else{ return }
destinationVC.actors = s.actors
}
}
}
@IBAction func unwindSegue(_ sender: UIStoryboardSegue){
}
private func saveData(){
do{
try context.save()
} catch{
print(error)
}
}
private func loadData(){
let request: NSFetchRequest = HollyActors.fetchRequest()
do{
hollyActors = try context.fetch(request)
print(hollyActors.count)
}
catch{
print(error)
}
}
}
<file_sep>/ExampleProject/Controllers/ImageShowVC.swift
//
// ImageShowVC.swift
// ExampleProject
//
// Created by <NAME> on 6/20/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
import CustomButtonFramework
class ImageShowVC: UIViewController {
@IBOutlet weak var NameLbl: UILabel!
@IBOutlet weak var imageView: UIImageView!
var image :UIImage?
var name : String?{
didSet{
navigationItem.title = name
}
}
override func viewDidLoad() {
super.viewDidLoad()
guard let img = image else { return }
imageView.image = img
NameLbl.text = name
}
@IBAction func dismissClicked(_ sender: UIButton) {
}
}
<file_sep>/ExampleProject/Controllers/ActorTableViewCell.swift
//
// ActorTableViewCell.swift
// ExampleProject
//
// Created by <NAME> on 6/21/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
import RequestManagerFramework
class ActorTableViewCell: UITableViewCell {
@IBOutlet weak var imageViewer: UIImageView!
@IBOutlet weak var nameLbl: UILabel!
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
func updateCell(actor:Actor){
nameLbl.text = actor.name
let url = URL(string: actor.image!)
if let imageUrl = url {
do {
let data = try Data(contentsOf: imageUrl)
self.imageViewer.image = UIImage(data: data)
} catch{
print(error.localizedDescription)
}
}
}
}
<file_sep>/ExampleProject/Controllers/ActorsTVC.swift
//
// ActorsTVC.swift
// ExampleProject
//
// Created by <NAME> on 6/20/18.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
import CoreData
import RequestManagerFramework
class ActorsTVC: UITableViewController {
var actors : [Actor]?
var hollyActors = [HollyActors]()
let context = (UIApplication.shared.delegate as! AppDelegate).persistentContainer.viewContext
override func viewDidLoad() {
super.viewDidLoad()
tableView.estimatedRowHeight = tableView.rowHeight
tableView.rowHeight = UITableViewAutomaticDimension
loadData()
}
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return actors?.count ?? 0
}
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) as! ActorTableViewCell
cell.updateCell(actor: actors![indexPath.row])
return cell
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "segue1" {
if let destinationVC = segue.destination as? ImageShowVC {
//guard let actors = self.actors else{ return}
guard let cell = sender as? UITableViewCell else { return }
guard let indexpath = tableView.indexPath(for: cell), let imgURL = hollyActors[indexpath.row].image, let name = hollyActors[indexpath.row].name else { return }
do{
guard let url = URL(string: imgURL) else{ return}
let data = try Data(contentsOf: url)
let image = UIImage(data: data)
destinationVC.image = image
destinationVC.name = name
} catch {
print(error.localizedDescription)
}
}
}
}
override func tableView(_ tableView: UITableView, canEditRowAt indexPath: IndexPath) -> Bool {
return true
}
override func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCellEditingStyle, forRowAt indexPath: IndexPath) {
if editingStyle == .delete {
// Delete the row from the data source
actors?.remove(at: indexPath.row)
context.delete(hollyActors[indexPath.row])
do{
try context.save()
} catch{
print(error)
}
tableView.deleteRows(at: [indexPath], with: .fade)
loadData()
}
}
private func loadData(){
let request: NSFetchRequest = HollyActors.fetchRequest()
do{
hollyActors = try context.fetch(request)
}
catch{
print(error)
}
}
}
| e0869f8abec0dd669a1496315fcf032ff0064f7b | [
"Swift"
] | 4 | Swift | vishalmodem/ExampleProject | ff9303df10a851f14862c9a71f4c9b9820c6c2c8 | 8838a2a8d6d61b0c01d1989fea2731653cd3284c | |
refs/heads/master | <file_sep>var pkgmerge = require('../pkgmerge');
var expect = require('chai').expect;
describe('pkgmerge', function(){
it('is a function', function(){
expect(pkgmerge).to.be.a('function');
});
it('returns an object', function(){
expect(pkgmerge()).to.be.an('object');
});
it('accepts a string or array of strings as parameter', function(){
expect(pkgmerge('keywords')).to.be.an('array');
expect(pkgmerge(['keywords'])).to.be.an('array');
});
it('returns an object representation of package.json when given no arguments', function(){
var pkg = pkgmerge();
expect(pkg).to.have.a.property('name');
expect(pkg.name).to.equal('pkgmerge');
expect(pkg).to.have.a.property('version');
expect(pkg).to.have.a.property('description');
});
it('returns a subtree from package.json when given an argument', function(){
var keywords = pkgmerge('keywords');
expect(keywords).to.be.an('array');
});
});<file_sep># pkgmerge
Move those pesky configuration files to your package.json with pkgmerge
| cf07d6dab177ad4fc07af1d3a0698b8350dca2e7 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | Download/pkgmerge | 5a80b5ac7413e30b9af7717eeeecdbd4e34c2069 | 52fd9ae75b7c86f1e9b1799e57bf21e1fefab7f4 | |
refs/heads/master | <repo_name>juliecoding/node-basics<file_sep>/notes-node/playground/debugging.js
var person = {
name: 'Julie'
};
person.age = 29;
debugger;
person.name = 'Sam';
debugger;
console.log(person);<file_sep>/notes-node/playground/arrow-function.js
var square = x => x * x;
console.log(square(9));
var user = {
name: 'Julie',
// sayHi: () => {
// console.log(arguments) //gets global arguments object
// console.log(`Hello. I'm ${this.name}.name`); //Doesn't work in ES6 because ES6 does not bind this
// },
sayHiAlt () {
console.log(arguments);
console.log(`Hi. I'm ${this.name}`);
}
}
user.sayHiAlt(1, 2, 3); | 298c3a2af421a7f51beffe148418c3d205a9e901 | [
"JavaScript"
] | 2 | JavaScript | juliecoding/node-basics | c9692fb107fc6bd6667f63dfe8030c5df0c94432 | c4b3cc32e934bfd0aacf9df4778227f810e2e285 | |
refs/heads/master | <repo_name>introtopcpgrmblox/dreampredictor<file_sep>/js/index.js
var restart = 0;
var dream = ["nightmare", "dream", "nightmare"]
var action = ["1", "flying over", "sleeping in", "running through", "being chased in"];
var location = ["1", "Texas", "The White House", "Your home", "A Giant Clock", "A field"];
var caviat = ["1", "with fog", "while your phone is dead", "with nobody else around", "in total silence"]
var wake = ["1", "sweating", "pleasantly"]
$("button").click(sayFortune)
function sayFortune() {
var random = Math.floor(Math.random() * dream.length) + 1;
var random2 = Math.floor(Math.random() * action.length) + 1;
var random3 = Math.floor(Math.random() * location.length) + 1;
var random4 = Math.floor(Math.random() * caviat.length) + 1;
var random5 = Math.floor(Math.random() * wake.length) + 1;
$("#output").text(dream [random])
$("#output2").text(dream [random2])
$("#output3").text(dream [random3])
$("#output4").text(dream [random4])
$("#output5").text(dream [random5])
}<file_sep>/README.txt
A Pen created at CodePen.io. You can find this one at https://codepen.io/BloxytheFox/pen/MzpZPx.
| 6eceee61f0550cca6f065be2f7ad6a2b905b7aa2 | [
"JavaScript",
"Text"
] | 2 | JavaScript | introtopcpgrmblox/dreampredictor | 774ff1ad72e6563f826cd7ba7fa9bbc445ae3dd6 | fd06fb3ad42034bfd0c11da3fc0171d2cf3587c4 | |
refs/heads/master | <file_sep>package pkg12;
public class Class_12 {
}
| 96d506a177685ac773b1bf8c8cefa736c3c5077f | [
"Java"
] | 1 | Java | smy1583/jan_12 | f6c1dc19773ed9fcb0456ef4673a76f0cb670c5b | a517784ea1fd9fc8bfea63a9bb316ff53d7084f5 | |
refs/heads/master | <repo_name>Sebastan12/Necroc<file_sep>/README.md
# Necroc
Welcome :D This is a palce to find Scripts and resources iuse in my Tutorials!<file_sep>/LinodeAutomaticDeployScripts/updatenodeapp.sh
#! /bin/bash
docker stop nodeapp
docker rm nodeapp
docker rmi sebastan12/firstnodeapp:master
docker pull sebastan12/firstnodeapp:master
docker run -t -d -p 80:8080 --name nodeapp sebastan12/firstnodeapp:master
| 11ce59febe029214ff9dc5d373d79a7527dd8f61 | [
"Markdown",
"Shell"
] | 2 | Markdown | Sebastan12/Necroc | becc5fccb844487e7fe2c9abf136cc6d28348f11 | 68d660f15ca87ea579c9e3150fa575de56e414d0 | |
refs/heads/main | <file_sep>import torch
from collections import defaultdict
from itertools import chain
import os
class TransferTransfoConstants:
SPECIAL_TOKENS = ["<bos>", "<eos>", "<speaker1>", "<speaker2>", "<pad>"]
MODEL_INPUTS = ["input_ids", "mc_token_ids", "lm_labels", "mc_labels", "token_type_ids"]
PADDED_INPUTS = ["input_ids", "lm_labels", "token_type_ids"]
ATTR_TO_SPECIAL_TOKEN = {
'bos_token': '<bos>',
'eos_token': '<eos>',
'pad_token': '<pad>',
'additional_special_tokens': ["<speaker1>", "<speaker2>", "<end>", "<eot>"]
}
ADDITIONAL_TOKENS = ["_nofact"]
class TransferTransfoWithKnowledgeConstants(object):
SPECIAL_TOKENS = TransferTransfoConstants.SPECIAL_TOKENS
MODEL_INPUTS = TransferTransfoConstants.MODEL_INPUTS
PADDED_INPUTS = TransferTransfoConstants.PADDED_INPUTS
ADDITIONAL_TOKENS = TransferTransfoConstants.ADDITIONAL_TOKENS + [
"<person>", "</person>",
"<genre>", "</genre>",
"<movie_title>", "</movie_title>",
]
ATTR_TO_SPECIAL_TOKEN = TransferTransfoConstants.ATTR_TO_SPECIAL_TOKEN
def collate_batch_elements(batch, tokenizer, device, pad_left=False):
batch_inputs = defaultdict(list)
for instance in batch:
for field, data in instance.items():
batch_inputs[field].append(data)
pad_token_map = {
"labels": -100,
"input_ids": tokenizer.eos_token_id,
"default": 0
}
padded_inputs = ["input_ids", "labels", "token_type_ids"]
model_inputs = ["input_ids", "labels", "token_type_ids"]
if pad_left:
model_inputs.append("attention_mask")
padded_batch = pad_batch_items(batch_inputs, pad_token_map, padded_inputs, pad_left)
tensorized_input = []
for input_name in model_inputs:
tensor = torch.tensor(padded_batch[input_name], device=device)
tensorized_input.append(tensor)
return tensorized_input
def collate_transfertransfo_batch_elements(batch, tokenizer, args):
batch_inputs = defaultdict(list)
chained_batch = chain(*batch)
for instance in chained_batch:
for field, data in instance.items():
batch_inputs[field].append(data)
pad_token = tokenizer.convert_tokens_to_ids(TransferTransfoConstants.SPECIAL_TOKENS[-2])
padded_dataset = pad_dataset(batch_inputs, padding=pad_token)
tensorized_input = []
batch_size = tuple([len(batch_inputs[TransferTransfoConstants.MODEL_INPUTS[0]]) // args.num_candidates])
for input_name in TransferTransfoConstants.MODEL_INPUTS:
tensor = torch.tensor(padded_dataset[input_name])
if input_name != "mc_labels":
tensor = tensor.view((-1, args.num_candidates) + tensor.shape[1:])
else:
tensor = torch.ones(size=batch_size, dtype=torch.long) * (args.num_candidates - 1)
tensorized_input.append(tensor)
return tensorized_input
def pad_dataset(dataset, padding=0):
""" Pad the dataset. This could be optimized by defining a Dataset class and padding at the batch level, but this is simpler. """
max_l = max(len(x) for x in dataset["input_ids"])
for name in TransferTransfoConstants.PADDED_INPUTS:
dataset[name] = [x + [padding if name != "lm_labels" else -100] * (max_l - len(x)) for x in dataset[name]]
return dataset
def pad_batch_items(batch_items, pad_token_map, padded_inputs, pad_left):
max_seq_len = max(len(x) for x in batch_items["input_ids"])
default_pad_token = pad_token_map["default"]
if pad_left:
# Attention mask is necessary to avoid attending on left padding tokens
# this isn't a problem for the right-padding case since
# the logits from the right padding tokens can be ignored.
# See: https://github.com/huggingface/transformers/issues/808
batch_items["attention_mask"] = [[0 if i < max_seq_len - len(x) else 1 for i in range(max_seq_len)] for x in batch_items["input_ids"]]
for name in padded_inputs:
pad_token = pad_token_map.get(name, default_pad_token)
if pad_left:
# Experimenting with left padding for batch inference
batch_items[name] = [ ([pad_token] * (max_seq_len - len(x)) + x) for x in batch_items[name]]
else:
batch_items[name] = [ (x + [pad_token] * (max_seq_len - len(x))) for x in batch_items[name]]
return batch_items
def save_model_config_and_tokenizer(config, tokenizer, args):
config_path = os.path.join(args.experiment_path, args.experiment_name)
os.makedirs(config_path, exist_ok=True)
config.save_pretrained(config_path)
tokenizer.save_pretrained(config_path)
def save_model_checkpoint(model, args, checkpoint_name="checkpoint.pt"):
checkpoint_path = os.path.join(args.experiment_path, args.experiment_name, "checkpoints")
os.makedirs(checkpoint_path, exist_ok=True)
checkpoint_file_path = os.path.join(checkpoint_path, checkpoint_name)
torch.save(model.state_dict(), checkpoint_file_path)
def save_full_model(model, tokenizer, args, model_name):
checkpoint_path = os.path.join(args.experiment_path, args.experiment_name, model_name)
model.save_pretrained(checkpoint_path)
tokenizer.save_pretrained(checkpoint_path)
<file_sep>import json
import csv
def load_conversations(conversations_file):
with open(conversations_file, 'r') as conversations_file:
conversations = []
for line in conversations_file:
conversations.append(json.loads(line.strip()))
return conversations
def dump_conversations_to_file(conversations, output_file):
with open(output_file, 'w') as conversations_file:
for conversation in conversations:
conversations_file.write(json.dumps(conversation))
conversations_file.write('\n')
def load_imdb_list(file_path):
with open(file_path, 'r') as imdb_file:
entities = [row["name"] for row in csv.DictReader(imdb_file)]
return entities
def popular_actors_list():
actors_list = load_imdb_list('top_1000_actors.csv')
actors_list.extend([
"<NAME>",
"<NAME>", # Tesla in The Prestige
"<NAME>", #
"<NAME>", # Joan from Mad Men
"<NAME>", # Dr. Who
"<NAME>", # Finn from Force Awakens
"<NAME>", # Rey from Force Awakens
"<NAME>", # SNL
"<NAME>", # Dude from Scrubs
"<NAME>", # Surprised this bad mofo isn't on the list
"<NAME>", # Masters of Sex
"<NAME>", # The iconic dracula
"<NAME>", # Mr. Bean, Johnny English
"<NAME>", # Baby Driver
"<NAME>", # High School Musical
"<NAME>", # Dinesh from Silicon Valley
"<NAME>", # 8 Mile
"<NAME>", # The Simpsons
"<NAME>", #The Best Little Whorehouse in Texas
"<NAME>", # Troy from Community
"<NAME>",
"<NAME>", # Whole bunch of martial arts movies
"<NAME>", # Grey's Anatomy
"<NAME>", # Eraserhead and Twin Peaks
"<NAME>", # Watson from Sherlock ; Shaun of the Dead
"<NAME>", # Honey I shrunk .. ; Ghostbusters
"<NAME>", # Girls Trip
"<NAME>", # The Breakfast Club
"<NAME>", # Hamilton
"<NAME>", # <NAME>, Deadpool
"<NAME>", # Maleficent
"<NAME>", # <NAME> from a nightmare on Elm Street
"<NAME>", # Ong Bak
"<NAME>", # Criminal Minds
])
return actors_list
def popular_directors_list():
directors_list = load_imdb_list('top_250_directors.csv')
directors_list.extend([
"<NAME>", # Grand Budapest Hotel, Isle of Dogs etc.
"<NAME>", # A nightmare on Elm Street
"<NAME>", # Star wars
"<NAME>", # Baby Driver
"<NAME>", # Any movie with Johnny Depp / <NAME>
"<NAME>", # Paprika
"<NAME>", "<NAME>", # South Park, Orgazmo etc.
"<NAME>", # Iron Man, Avengers
"<NAME>", # Pacific Rim, The Shape of Water
"<NAME>", # BlacKKKlansman, Da Five Bloods
"<NAME>", # Escape from New York/LA
"<NAME>", # Nosferatu
"<NAME>", # Star Trek/Star Wars the Force Awakens
"<NAME>", # <NAME>
"<NAME>", # Harry Potter
"<NAME>", # Crouching Tiger, Hidden Dragon; Brokeback Mountain
"<NAME>", # Get Out
"<NAME>", # Doctor Sleep, The Haunting of Hill House, Hush
"<NAME>", # Star Wars the Last Jedi
"<NAME>", # Black Swan, Requiem for a Dream
"<NAME>", # Man of Steel, Watchmen, Justice League
"<NAME>", # Platoon, Snowden
])
return directors_list<file_sep>import random
from torch.utils.data import Dataset
from itertools import chain
class RedialDialoGPTDataset(Dataset):
"""
This dataset class prepares input in the vanilla DialoGPT format:
<|endoftext|> TURN_1 <|endoftext|> TURN_2 <|endoftext|> ... <|endoftext|> TURN_N <|endoftext|>
This baseline is expected to perform worse since there's
nothing for the model to condition on
"""
def __init__(self, dataset, tokenizer, args):
self.dataset = dataset
self.tokenizer = tokenizer
self.max_history_turns = args.max_history_turns
self.inference = args.inference
def __getitem__(self, index):
example = self.dataset[index]
example = self._truncate_example(example)
instance = self.prepare_input_for_dialogpt(example, self.tokenizer, self.inference)
return instance
def _truncate_example(self, example):
if len(example.context) > self.max_history_turns:
truncated_context = example.context[-self.max_history_turns:]
example = example._replace(context=truncated_context)
return example
@staticmethod
def prepare_input_for_dialogpt(example, tokenizer, inference=False):
bos, eos = tokenizer.convert_tokens_to_ids([tokenizer.bos_token, tokenizer.eos_token])
context_turns = example.context
system_turn = example.response
input_ids = [bos]
labels = [-100]
for turn in context_turns:
input_ids += turn + [eos]
labels += len(turn) * [eos]
if not inference: # Don't add system response for inference
input_ids += system_turn + [eos]
labels += system_turn + [eos]
token_type_ids = [0 for _ in labels]
instance = {
"input_ids": input_ids,
"labels": labels,
"token_type_ids": token_type_ids
}
return instance
def __len__(self):
return len(self.dataset)
class RedialTransferTransfoDataset(Dataset):
"""
Mimics the setup from the TransferTransfo paper. This setup has been
fairly reliable for knowledge grounded models such as Topical Chats.
"""
def __init__(self, dataset, tokenizer, special_tokens, args):
self.dataset = dataset
self.tokenizer = tokenizer
self.special_tokens = special_tokens
self.dataset = dataset
self.tokenizer = tokenizer
self.special_tokens = special_tokens
self.max_history = args.max_history_turns
self.num_candidates = args.num_candidates
@staticmethod
def sample_candidates(dataset, index, num_samples):
# Sample candidates IID
candidates = [response for (_, response, _) in random.sample(dataset, num_samples)]
return candidates
@staticmethod
def build_input_from_segments(context, response, knowledge, tokenizer, special_tokens, lm_labels):
#print("context:", context)
#print("response:", response)
#print("knowledge:", knowledge)
bos, eos, speaker1, speaker2 = tokenizer.convert_tokens_to_ids(special_tokens[:4])
sequence = [[bos] + knowledge] + context + [response + [eos]]
sequence = [sequence[0]] + [
[(speaker2 if (len(sequence) - i) % 2 else speaker1)] + s for i, s in enumerate(sequence[1:])]
instance = {}
instance["input_ids"] = list(chain.from_iterable(sequence))
instance["token_type_ids"] = [speaker2 if (len(sequence) - i - 1) % 2 else speaker1 for i, s in enumerate(sequence) for _ in s]
instance["mc_token_ids"] = len(instance["input_ids"]) - 1
if lm_labels:
instance["lm_labels"] = ([-100] * sum(len(s) for s in sequence[:-1])) + [-100] + sequence[-1][1:]
else:
instance["lm_labels"] = [-100] * len(instance["input_ids"])
#print(tokenizer.decode(instance["input_ids"]))
#print(instance["mc_token_ids"])
#input(">>>")
return instance
@staticmethod
def truncate_inputs(context, max_context_exchanges):
# Limit the number of context exchanges to retain
if len(context) > (2 * max_context_exchanges + 1):
context = context[-(2 * max_context_exchanges + 1):]
return context
def __getitem__(self, index):
context, response, knowledge = self.dataset[index]
context = self.truncate_inputs(context, self.max_history)
tokenized_context = [self.tokenizer.encode(turn) for turn in context]
tokenized_response = self.tokenizer.encode(response)
candidates = self.sample_candidates(self.dataset, index, self.num_candidates - 1)
tokenized_candidates = [self.tokenizer.encode(candidate) for candidate in candidates]
tokenized_candidates.append(tokenized_response)
tokenized_knowledge = []
for (type, item) in knowledge:
#print(f" * type: {type}\titem: {item}")
if type == "dact":
tokenized_knowledge.extend(self.tokenizer.encode(item))
else:
start_tag = f"<{type}>"
end_tag = f"</{type}>"
tokenized_knowledge.append(self.tokenizer.convert_tokens_to_ids(start_tag))
tokenized_knowledge.extend(self.tokenizer.encode(item))
tokenized_knowledge.append(self.tokenizer.convert_tokens_to_ids(end_tag))
instances = []
for j, candidate in enumerate(tokenized_candidates):
lm_labels = bool(j == self.num_candidates - 1)
instance = self.build_input_from_segments(tokenized_context, candidate, tokenized_knowledge, self.tokenizer, self.special_tokens, lm_labels)
instances.append(instance)
return instances
def __len__(self):
return len(self.dataset)
def get_num_batches(self, batch_size):
return len(self) // batch_size # has the effect of performing "drop last"<file_sep># ReDial Data
More info at https://github.com/RaymondLi0/conversational-recommendations .
The repository contains the code for NeurIPS 2018 paper "Towards Deep Conversational Recommendations" https://arxiv.org/abs/1812.07617
# imdb info
More files can be found in the shared drive at CSE243 > DATA .
- `retrieved-imdb-info.zip`: imdb info on movies talked about
- `traintest-with-movie-info.zip`: imdb movie info aligned with redial dialogue turns.
<file_sep>
class Metric(object):
def get(self):
raise NotImplementedError("Subclasses must implement this")
class RunningMetric(Metric):
def __init__(self):
self.current_value = 0.0
self.num_steps = 0
def _add(self, value):
self.current_value += (value - self.current_value) / (self.num_steps + 1)
self.num_steps += 1
def add(self, value):
self._add(value)
def get(self):
return self.current_value
class RunningLambdaMetric(RunningMetric):
def __init__(self, fn):
self.func = fn
super().__init__()
def add(self, *args, **kwargs):
self._add(float(self.func(*args, **kwargs)))
class MetricLambda(Metric):
def __init__(self, fn, metric):
self.func = fn
self.metric = metric
def get(self):
return self.func(self.metric.get())
class Accuracy(RunningLambdaMetric):
def __init__(self):
super().__init__(self._compute_accuracy)
def _compute_accuracy(self, logits, labels):
predictions = logits.argmax(dim=-1)
return (predictions == labels).mean()
<file_sep># conversational-movie-recommender<file_sep>import re
class DBPedia(object):
# These correspond to labels that have been misrecognized
BLACKLIST_URIS = {"http://dbpedia.org/resource/Glossary_of_tennis_terms",
"http://dbpedia.org/resource/Good_Movie",
"http://dbpedia.org/resource/Sierra_Entertainment",
"http://dbpedia.org/resource/Nice",
"http://dbpedia.org/resource/Take_Care_(album)",
"http://dbpedia.org/resource/Cloning",
"http://dbpedia.org/resource/Blood",
"http://dbpedia.org/resource/Downhill_creep",
"http://dbpedia.org/resource/Movies",
"http://dbpedia.org/resource/Hey_There",
"http://dbpedia.org/resource/Swimming_(sport)",
"http://dbpedia.org/resource/Princess_Falls",
"http://dbpedia.org/resource/Haha_(entertainer)",
"http://dbpedia.org/resource/LOL",
"http://dbpedia.org/resource/Drag_queen",
"http://dbpedia.org/resource/Yea_Football_Club",
"http://dbpedia.org/resource/Oh_Yeah_(Yello_song)",
"http://dbpedia.org/resource/Scalable_Coherent_Interface",
"http://dbpedia.org/resource/CAN_bus",
"http://dbpedia.org/resource/The_New_One_(horse)",
"http://dbpedia.org/resource/Information_technology",
"http://dbpedia.org/resource/The_Glad_Products_Company",
"http://dbpedia.org/resource/AM_broadcasting",
"http://dbpedia.org/resource/To_Heart",
"http://dbpedia.org/resource/National_Organization_for_Women",
"http://dbpedia.org/resource/Hit_or_Miss_(New_Found_Glory_song)",
"http://dbpedia.org/resource/Canada",
"http://dbpedia.org/resource/Different_Things",
"http://dbpedia.org/resource/Norwegian_Trekking_Association",
"http://dbpedia.org/resource/Take_One_(Canadian_magazine)",
"http://dbpedia.org/resource/For_Inspiration_and_Recognition_of_Science_and_Technology",
"http://dbpedia.org/resource/Two_Guys",
"http://dbpedia.org/resource/The_Sydney_Morning_Herald",
"http://dbpedia.org/resource/Booting",
"http://dbpedia.org/resource/Precious_Time_(album)",
"http://dbpedia.org/resource/I\\u0027m_Glad",
"http://dbpedia.org/resource/Social_Democratic_Party_of_Switzerland",
"http://dbpedia.org/resource/International_Maritime_Organization",
"http://dbpedia.org/resource/LOL",
"http://dbpedia.org/resource/Names_of_God_in_Judaism",
"http://dbpedia.org/resource/Ike_Turner",
"http://dbpedia.org/resource/Tricky_Stewart",
"http://dbpedia.org/resource/Movies!",
}
multi_spaces_pattern = re.compile(r"\s+")
def process_text(text):
return multi_spaces_pattern.sub(" ", text.capitalize())<file_sep>import argparse
import logging
import math
from pprint import pformat
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from tqdm.auto import tqdm
from transformers import GPT2Tokenizer, AdamW
from dataset_utils import (
prepare_redial_baseline_dataset,
prepare_redial_knowledge_grounded_dataset,
get_movie_db_map
)
from datasets import (RedialDialoGPTDataset, RedialTransferTransfoDataset)
from train_utils import (
TransferTransfoConstants,
TransferTransfoWithKnowledgeConstants,
collate_batch_elements,
collate_transfertransfo_batch_elements,
save_model_checkpoint,
save_full_model,
save_model_config_and_tokenizer)
from trainer.counter import GlobalStepCounter
from trainer.metrics import RunningMetric, MetricLambda, RunningLambdaMetric
from trainer.scheduler import PiecewiseLinearLR
logger = logging.getLogger(__file__)
def prepare_dataloaders(args, tokenizer):
movie_db_map = get_movie_db_map(args.movies_data_path)
special_terms = None
if args.configuration == "baseline":
dataset = prepare_redial_baseline_dataset(
args.data_path,
tokenizer,
movie_db_map,
args.data_cache_path
)
train_dataset, test_dataset = RedialDialoGPTDataset(dataset["train"], tokenizer, args), \
RedialDialoGPTDataset(dataset["test"], tokenizer, args)
def collate_fn(batch):
return collate_batch_elements(batch, tokenizer, args.device)
train_loader, test_loader = \
DataLoader(
train_dataset,
batch_size=args.train_batch_size,
collate_fn=collate_fn,
shuffle=True), \
DataLoader(
test_dataset,
batch_size=args.test_batch_size,
collate_fn=collate_fn,
shuffle=False)
else:
dataset, special_terms = prepare_redial_knowledge_grounded_dataset(
args.data_path,
tokenizer,
movie_db_map,
args.data_cache_path,
split_files={"train": args.train_file, "test": args.eval_file},
recommender_only=args.recommender_only,
include_dacts=args.include_dialog_acts
)
special_terms.extend([
"<cast>", "</cast>",
"<movie_genre>", "</movie_genre>",
"<director>", "</director>",
])
train_dataset = RedialTransferTransfoDataset(dataset["train"], tokenizer, TransferTransfoConstants.SPECIAL_TOKENS, args)
test_dataset = RedialTransferTransfoDataset(dataset["test"], tokenizer, TransferTransfoConstants.SPECIAL_TOKENS, args)
def collate_fn(batch):
return collate_transfertransfo_batch_elements(batch, tokenizer, args)
train_loader = DataLoader(train_dataset, batch_size=args.train_batch_size,
collate_fn=collate_fn,
shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=args.test_batch_size,
collate_fn=collate_fn,
shuffle=False)
return train_loader, test_loader, special_terms
def train_lm(model, loader, optimizer, scheduler, step_counter, args):
logger.info("Running training")
model.train()
running_loss = RunningMetric()
for i, batch in enumerate(tqdm(loader)):
input_ids, labels, token_type_ids = batch
# Forward pass and loss computation
lm_loss, *_ = model(input_ids=input_ids, labels=labels, token_type_ids=token_type_ids)
# lm_loss is given by sum(batch_loss) / batch_size
# We need to average loss over all gradient accumulation steps
loss = lm_loss / args.gradient_accumulation_steps
running_loss.add(loss.item())
# Backprop step. Pytorch automatically accumulates
# gradients each time backward is called
loss.backward()
nn.utils.clip_grad_norm_(model.parameters(), args.max_norm)
# Parameter update step
if (i + 1) % args.gradient_accumulation_steps == 0:
optimizer.step()
optimizer.zero_grad()
scheduler.step()
if (i + 1) % args.log_every_n == 0:
logger.info(f"Iteration {i + 1}: [Running Loss: {running_loss.get()};Running PPL: {math.exp(running_loss.get())}]")
step_counter.step()
if step_counter.get() % args.checkpoint_every_n == 0:
checkpoint_name = f"checkpoint-{step_counter.get()}.pt"
save_model_checkpoint(model, args, checkpoint_name)
logger.info(f"Model checkpoint {checkpoint_name} saved!")
logger.info(f"Training loss: {running_loss.get()}")
logger.info(f"Training PPL: {math.exp(running_loss.get())}")
def train_double_heads_lm(model, loader, optimizer, scheduler, step_counter, args):
running_loss = RunningMetric()
ppl = MetricLambda(math.exp, running_loss)
for i, batch in enumerate(tqdm(train_loader)):
model.train()
batch = tuple(input_tensor.to(args.device) for input_tensor in batch)
input_ids, mc_token_ids, lm_labels, mc_labels, token_type_ids = batch
(lm_loss), (mc_loss), *_ = model(
input_ids, token_type_ids=token_type_ids, mc_token_ids=mc_token_ids,
mc_labels=mc_labels, lm_labels=lm_labels
)
loss = (lm_loss * args.lm_coef + mc_loss * args.mc_coef) / args.gradient_accumulation_steps
running_loss.add(float(loss))
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_norm)
if (i + 1) % args.log_every_n == 0:
logger.info(
f"Iteration {i}: [Running Loss: {running_loss.get()};Running PPL: {ppl.get()}]")
if i % args.gradient_accumulation_steps == 0:
optimizer.step()
optimizer.zero_grad()
scheduler.step()
step_counter.step()
if step_counter.get() % args.checkpoint_every_n == 0:
checkpoint_name = f"checkpoint-{step_counter.get()}.pt"
save_model_checkpoint(model, args, checkpoint_name)
logger.info(f"Model checkpoint {checkpoint_name} saved!")
logger.info(f"Epoch loss: {running_loss.get()}")
logger.info(f"Epoch PPL: {ppl.get()}")
def evaluate_lm(model, loader, loss_fn, args):
logger.info("Running evaluation")
model.eval()
running_loss = RunningMetric()
with torch.no_grad():
for i, batch in enumerate(tqdm(loader)):
input_ids, labels, token_type_ids = batch
# No loss is returned if lm_labels is not passed
lm_logits, *_ = model(
input_ids=input_ids, token_type_ids=token_type_ids
)
lm_logits_flattened = lm_logits.view(-1, lm_logits.size(-1))
labels_flattened = labels.view(-1)
loss = loss_fn(lm_logits_flattened, labels_flattened)
running_loss.add(loss.item())
if (i + 1) % args.log_every_n == 0:
logger.info(f"Iteration {i}: [Running Loss: {running_loss.get()};Running PPL: {math.exp(running_loss.get())}]")
logger.info(f"Validation NLL: {running_loss.get()}")
logger.info(f"Validation PPL: {math.exp(running_loss.get())}")
def evaluate_double_heads_lm(model, val_loader, loss_fn, args):
model.eval()
running_nll = RunningLambdaMetric(loss_fn)
ppl = MetricLambda(math.exp, running_nll)
with torch.no_grad():
for i, batch in enumerate(tqdm(val_loader)):
batch = tuple(input_tensor.to(args.device) for input_tensor in batch)
input_ids, mc_token_ids, lm_labels, mc_labels, token_type_ids = batch
lm_logits, mc_logits, *_ = model(
input_ids, token_type_ids=token_type_ids, mc_token_ids=mc_token_ids,
)
lm_logits_flat_shifted = lm_logits[..., :-1, :].contiguous().view(-1, lm_logits.size(-1))
lm_labels_flat_shifted = lm_labels[..., 1:].contiguous().view(-1)
running_nll.add(lm_logits_flat_shifted, lm_labels_flat_shifted)
logger.info(f"NLL Loss: {running_nll.get()}")
logger.info(f"Perlexity: {ppl.get()}")
def train_baseline_lm(model, loaders, optimizer, loss_fn, scheduler, args):
train_loader, test_loader = loaders
step_counter = GlobalStepCounter()
for i in range(args.n_epochs):
logger.info(f"Epoch {i + 1}:")
train_lm(model, train_loader, optimizer, scheduler, step_counter, args)
evaluate_lm(model, test_loader, loss_fn, args)
epoch_model = f"{args.experiment_name}_epoch_{i + 1}"
save_full_model(model, tokenizer, args, epoch_model)
logger.info(f"Model {epoch_model} saved!")
logger.info(f"Epoch {i + 1} completed!\n")
if args.n_epochs < 1:
evaluate_lm(model, test_loader, optimizer, args)
def train_knowledge_grounded_lm(model, loaders, optimizer, loss_fn, scheduler, args):
train_loader, val_loader = loaders
step_counter = GlobalStepCounter()
for epoch in range(args.n_epochs):
train_double_heads_lm(model, train_loader, optimizer, scheduler, step_counter, args)
epoch_model = f"{args.experiment_name}_epoch_{epoch + 1}"
save_full_model(model, tokenizer, args, epoch_model)
logger.info(f"Model {epoch_model} saved!")
logger.info(f"Epoch {epoch + 1} completed!\n")
evaluate_double_heads_lm(model, val_loader, loss_fn, args)
if args.n_epochs < 1:
evaluate_double_heads_lm(model, val_loader, loss_fn, args)
def setup_baseline_lm(args):
if args.gradient_checkpoint:
from models.gpt2 import GPT2LMHeadModel
model = GPT2LMHeadModel.from_pretrained(args.model_checkpoint)
else:
from transformers import GPT2LMHeadModel
model = GPT2LMHeadModel.from_pretrained(args.model_checkpoint)
return model
def setup_double_heads_model(new_vocabulary_size, args):
if args.gradient_checkpoint:
from models.gpt2 import GPT2DoubleHeadsModel
model = GPT2DoubleHeadsModel.from_pretrained(args.model_checkpoint)
else:
from transformers import GPT2DoubleHeadsModel
model = GPT2DoubleHeadsModel.from_pretrained(args.model_checkpoint)
model.resize_token_embeddings(new_num_tokens=new_vocabulary_size)
return model
def setup_knowledge_grounded_tokenizer(args):
tokenizer = GPT2Tokenizer.from_pretrained(args.model_checkpoint)
logger.info("Adding special tokens to tokenizer:")
logger.info(TransferTransfoWithKnowledgeConstants.ADDITIONAL_TOKENS + list(TransferTransfoWithKnowledgeConstants.ATTR_TO_SPECIAL_TOKEN.items()))
# Add special tokens
num_added_norm_tokens = tokenizer.add_tokens(TransferTransfoWithKnowledgeConstants.ADDITIONAL_TOKENS)
num_added_special_tokens = tokenizer.add_special_tokens(TransferTransfoWithKnowledgeConstants.ATTR_TO_SPECIAL_TOKEN)
new_vocab_size = len(tokenizer.encoder) + num_added_norm_tokens + num_added_special_tokens
# Return tokenizer and new vocabulary size
logger.info("Successfully added: %s tokens", num_added_norm_tokens + num_added_special_tokens)
return tokenizer, new_vocab_size
def get_argparser():
parser = argparse.ArgumentParser()
parser.add_argument('--configuration',
default='baseline',
choices=['baseline', 'knowledge_grounded']
)
parser.add_argument('--model_checkpoint',
default="microsoft/DialoGPT-medium",
help="The model checkpoint to use"
)
parser.add_argument('--gradient_checkpoint',
action="store_true",
help="Use gradient checkpoint variant GPT2"
)
parser.add_argument('--data_path',
default="redial/",
help="Path to dataset"
)
parser.add_argument('--movies_data_path',
default="redial/movies_with_mentions.csv",
help="Path to movie mentions file"
)
parser.add_argument('--train-file',
default="train_data_swda_tagged.jsonl",
help="Name of train jsonl file."
)
parser.add_argument('--eval-file',
default="test_data_swda_tagged.jsonl",
help="Name of test jsonl file."
)
parser.add_argument('--data_cache_path',
default="redial_dataset_cache.pkl",
help="Path to cached data"
)
parser.add_argument('--experiment_path',
default="runs",
help="Parent directory for experiments"
)
parser.add_argument('--experiment_name',
required=True,
help="Name of the experiment"
)
parser.add_argument('--seed',
default=42,
type=int,
help="Random seed for experiments"
)
parser.add_argument('--max_history_turns',
default=2,
type=int,
help="How many turns from context to retain"
)
parser.add_argument('--n_epochs',
default=3,
type=int,
help="Number of epochs for training"
)
parser.add_argument('--train_batch_size',
default=4,
type=int,
help="Train batch size"
)
parser.add_argument('--test_batch_size',
default=4,
type=int,
help="Test batch size"
)
parser.add_argument('--lr',
default=6.25e-5,
type=float,
help="Base/initial learning rate"
)
parser.add_argument('--device',
default=torch.device("cuda" if torch.cuda.is_available() else "cpu"),
help="Device to store the model"
)
parser.add_argument('--gradient_accumulation_steps',
default=16,
type=int,
help="Number of steps to accumulate gradient for"
)
parser.add_argument('--max_norm',
default=1.0,
type=float,
help="Maximum norm for the gradient"
)
parser.add_argument('--log_every_n',
default=500,
type=int,
help="The frequency (in number of steps) with which information is shown"
)
parser.add_argument('--checkpoint_every_n',
default=1800,
type=int,
help="The frequency (in number of steps) with which the model checkpoints are saved"
)
parser.add_argument('--recommender_only',
dest='recommender_only',
action='store_true',
help="Train only on recommender side utterances"
)
parser.set_defaults(include_dialog_acts=True)
parser.add_argument('--exclude_dialog_acts',
dest='include_dialog_acts',
action='store_false',
help="Whether to exclude dialog act in the knowledge")
# Double heads model specific args
double_heads_parser = parser.add_argument_group('Double Heads Model Arguments:')
double_heads_parser.add_argument('--num_candidates',
type=int, default=2,
help="Number of candidates to select from")
double_heads_parser.add_argument('--lm_coef',
type=float, default=1.0,
help="Weight for Language Model loss coefficient")
double_heads_parser.add_argument('--mc_coef',
type=float, default=1.0,
help="Weight for Multiple-Choice loss coefficient")
parser.set_defaults(recommender_only=False)
return parser
if __name__ == "__main__":
parser = get_argparser()
args = parser.parse_args()
args.inference = False
logger.info(f"Arguments : {pformat(args)}")
logging.basicConfig(level=logging.INFO)
logger.info("Arguments: %s", pformat(args))
logger.info("Prepare tokenizer, pretrained model and optimizer.")
logger.info("Load datasets")
if args.configuration == "baseline":
tokenizer = GPT2Tokenizer.from_pretrained(args.model_checkpoint)
model = setup_baseline_lm(args)
else:
tokenizer, new_vocab_size = setup_knowledge_grounded_tokenizer(args)
model = setup_double_heads_model(new_vocab_size, args)
train_loader, test_loader, special_terms = prepare_dataloaders(args, tokenizer)
if isinstance(special_terms, list):
print("adding more tokens:")
print(" *", special_terms)
num_added = tokenizer.add_tokens(special_terms)
print("num_added:", num_added)
new_vocab_size += num_added
if args.configuration == "baseline":
model = setup_baseline_lm(args)
else:
model = setup_double_heads_model(new_vocab_size, args)
optimizer = AdamW(model.parameters(), lr=args.lr, correct_bias=True)
scheduler = PiecewiseLinearLR(optimizer, [(0, args.lr), (args.n_epochs * len(train_loader), 0.0)])
loss_fn = nn.CrossEntropyLoss(ignore_index=-100) # used for computing validation loss
model.to(args.device)
save_model_config_and_tokenizer(model.config, tokenizer, args)
if args.configuration == "baseline":
train_baseline_lm(model, (train_loader, test_loader), optimizer, loss_fn, scheduler, args)
else:
train_knowledge_grounded_lm(model, (train_loader, test_loader), optimizer, loss_fn, scheduler, args)
<file_sep>"""
Created by diesel
12/23/19
"""
from collections import defaultdict
import nltk
import json
import spacy
nlp = spacy.load('en_core_web_sm')
class Lexicon(object):
_save_attrs = ["name", "terms", "_id2term", "info", "_id_counter",
"_standard_2_id", "_all_refs"]
def __init__(self, name):
self.name = name
self.terms = []
self._id2term = {}
self.info = {}
self._id_counter = 0
self._standard_2_id = {}
self._all_refs = defaultdict(list)
self._phrases = None
def _clean_up(self):
for record in self._id2term.values():
record["referential_forms"] = list(set(record["referential_forms"]))
def to_dict(self):
self._clean_up()
d = {}
for k in self._save_attrs:
d[k] = getattr(self, k)
return d
def to_lines(self , delim="; ", num_spaces=2):
self._clean_up()
all_lines = []
for entry in self.terms:
info = self.info[entry["id"]]
category = self.name
if category == "characters":
category = "character"
category = category.upper()
lines = [category, "standard_form" + ": "+ entry["standard_form"]]
forms = entry.get("referential_forms", [])
if forms:
forms = delim.join(forms)
lines.append("forms: " + forms)
for attr, val in info.items():
if attr in {"forms", "full_name", "standard_form"}:
continue
if isinstance(val, list):
val = delim.join(val)
lines.append(attr + ": " + str(val))
lines.extend([""] * num_spaces)
all_lines.extend(lines)
return all_lines
def to_df(self , delim="; ", num_spaces=2):
self._clean_up()
all_lines = []
for entry in self.terms:
info = self.info[entry["id"]]
category = self.name
if category == "characters":
category = "character"
category = category.upper()
e = {
"category": category,
"standard_form": entry["standard_form"],
"forms": json.dumps(entry.get("referential_forms", []))
}
for attr, val in info.items():
if attr in {"forms", "full_name", "standard_form"}:
continue
if isinstance(val, list):
val = json.dumps(val)
e[attr] = val
all_lines.append(e)
return all_lines
@classmethod
def from_dict(cls, d):
assert "name" in d
lex = cls(d["name"])
for k in cls._save_attrs:
if d.get(k):
setattr(lex, k, d[k])
lex._id2term = {}
_terms = []
if len(lex.terms) > 0:
for t in lex.terms:
if t["id"] not in lex._id2term:
lex._id2term[t["id"]] = t
_terms.append(t)
lex.terms = _terms
assert len(lex.terms) == len(lex._id2term), "{} {}".format(len(lex.terms), len(lex._id2term))
return lex
def get_info(self, tid_or_standard_form, default_val=None):
info = self.info.get(tid_or_standard_form)
if info is None:
key = self._standard_2_id.get(tid_or_standard_form)
info = self.info.get(key)
if info is None:
info = default_val
return info
def new_id(self):
self._id_counter += 1
return self._id_counter
def new_term(self, standard_form, info=None, referential_forms=None, tid=None, standard_form_is_id=True):
if standard_form in self._standard_2_id:
the_id = self._standard_2_id[standard_form]
for ref in referential_forms:
self._add_new_ref(ref, the_id)
else:
new_term = {
"standard_form": standard_form,
"referential_forms": referential_forms if referential_forms else [],
"id": (standard_form if standard_form_is_id else self.new_id()) if tid is None else tid
}
self.terms.append(new_term)
self._id2term[new_term["id"]] = new_term
self.info[new_term["id"]] = info
for ref in referential_forms:
self._add_new_ref(ref, new_term["id"])
self._standard_2_id[standard_form] = new_term["id"]
def update_info(self, tid, new_info):
tinfo = self.info.get(tid, {})
tinfo.update(new_info)
def _add_new_ref(self, ref, tid):
if ref not in self._all_refs or tid not in self._all_refs[ref]:
if ref not in self._all_refs:
self._all_refs[ref] = []
self._all_refs[ref].append(tid)
def new_referential_phrase(self, refs, standard_form=None, tid=None):
assert standard_form or tid
if tid is None:
tid = standard_form
if not isinstance(refs, list):
refs = [refs]
#print("tid:", [tid])
record = self._id2term.get(tid)
#print("record:", record)
record["referential_forms"].extend([r for r in refs if r not in record["referential_forms"]])
for ref in refs:
self._add_new_ref(ref, tid)
print(" * add {} => {}".format(ref, tid))
@property
def phrases(self):
if self._phrases is None:
self._phrases = set(self._all_refs.keys())
return self._phrases
def ref2standard(self, ref, get_tid=False):
ids = self._all_refs.get(ref)
if ids:
if not isinstance(ids, list):
ids = [ids]
standard_forms = [self._id2term[_i].get("standard_form") for _i in ids]
else:
standard_forms = None
retval = standard_forms, ids if get_tid else standard_forms
return retval
class LexBuilder(object):
def __init__(self):
self._entries = defaultdict(list)
self._lexicons = None
@property
def lexicons(self):
return self._lexicons
def add_entry(self, e, etype=None):
if etype is None:
etype = e["category"]
self._entries[etype].append(e)
def get_entry_key_set(self):
return set([k for etype, ents in self._entries.items() for e in ents for k in e.keys()])
def lex_as_dict(self):
return {lex.name: lex.to_dict() for lex in self.lexicons.values()}
def build_lexes(self):
lexicons = {}
for ent_type, entries in self._entries.items():
lex = Lexicon(ent_type)
for entry in entries:
if isinstance(entry, str):
entry = entries[entry]
if not entry.get("full_name") or entry["full_name"] == "none none":
continue
refs = self.process_ref(entry["forms"])
lex.new_term(entry["full_name"], info=entry, referential_forms=refs)
#refs = [process_ref(player["full_name"])]
#player_lex.new_term(player["full_name"], info=player, referential_forms=refs)
lexicons[ent_type] = lex
self._lexicons = lexicons
def save_lexes(self, outfile):
with open(outfile, "w") as fout:
json.dump(self.lex_as_dict(), fout, indent=2)
@staticmethod
def process_ref(text):
def _process(ref):
doc = nlp(ref)
toks = " ".join(getattr(tok, "text", None) for tok in doc).lower()
#toks = " ".join(nltk.word_tokenize(ref)).lower()
return toks
retval = [_process(ref) for ref in text] if isinstance(text, list) else _process(text)
return retval
def main():
pass
if __name__ == "__main__":
main()
<file_sep>import argparse
import torch
from tqdm.auto import tqdm
from transformers import GPT2Tokenizer, GPT2LMHeadModel, GPT2Config
import torch.nn.functional as F
from torch.utils.data import DataLoader
from datasets import RedialDialoGPTDataset, RedialTransferTransfoDataset
from dataset_utils import get_movie_db_map, prepare_redial_baseline_dataset, prepare_redial_knowledge_grounded_dataset
from train_utils import collate_batch_elements, collate_transfertransfo_batch_elements, TransferTransfoConstants
def prepare_dataloader(args, tokenizer):
movie_db_map = get_movie_db_map(args.movies_data_path)
dataset = prepare_redial_baseline_dataset(
args.data_path,
tokenizer,
movie_db_map,
args.data_cache_path
)
test_dataset = RedialDialoGPTDataset(dataset["test"], tokenizer, args)
collate_fn = lambda batch: collate_batch_elements(batch, tokenizer, args.device, pad_left=True)
test_loader = DataLoader(test_dataset, batch_size=args.test_batch_size, collate_fn=collate_fn, shuffle=False)
return test_loader
def prepare_knowledge_grounded_dataloader(args, tokenizer):
movie_db_map = get_movie_db_map(args.movies_data_path)
dataset, special_terms = prepare_redial_knowledge_grounded_dataset(
args.data_path,
tokenizer,
movie_db_map,
args.data_cache_path,
split_files={"train": args.train_file, "test": args.eval_file},
recommender_only=args.recommender_only,
include_dacts=args.include_dialog_acts
)
special_terms.extend([
"<cast>", "</cast>",
"<movie_genre>", "</movie_genre>",
"<director>", "</director>",
])
test_dataset = RedialTransferTransfoDataset(
dataset["test"], tokenizer, TransferTransfoConstants.SPECIAL_TOKENS, args)
def collate_fn(batch):
return collate_transfertransfo_batch_elements(batch, tokenizer, args)
test_loader = DataLoader(test_dataset, batch_size=args.test_batch_size, collate_fn=collate_fn, shuffle=False)
return test_loader
def save_outputs(outputs, args):
with open(args.output_file_path, 'w') as output_file:
output_file.write("\n".join(outputs))
def top_filtering(logits, top_k=0., top_p=0.9, threshold=-float('Inf'), filter_value=-float('Inf')):
""" Filter a distribution of logits using top-k, top-p (nucleus) and/or threshold filtering
Args:
logits: logits distribution shape (vocabulary size)
top_k: <=0: no filtering, >0: keep only top k tokens with highest probability.
top_p: <=0.0: no filtering, >0.0: keep only a subset S of candidates, where S is the smallest subset
whose total probability mass is greater than or equal to the threshold top_p.
In practice, we select the highest probability tokens whose cumulative probability mass exceeds
the threshold top_p.
threshold: a minimal threshold to keep logits
"""
assert logits.dim() == 1 # Only work for batch size 1 for now - could update but it would obfuscate a bit the code
top_k = min(top_k, logits.size(-1))
if top_k > 0:
# Remove all tokens with a probability less than the last token in the top-k tokens
indices_to_remove = logits < torch.topk(logits, top_k)[0][..., -1, None]
logits[indices_to_remove] = filter_value
if top_p > 0.0:
# Compute cumulative probabilities of sorted tokens
sorted_logits, sorted_indices = torch.sort(logits, descending=True)
cumulative_probabilities = torch.cumsum(F.softmax(sorted_logits, dim=-1), dim=-1)
# Remove tokens with cumulative probability above the threshold
sorted_indices_to_remove = cumulative_probabilities > top_p
# Shift the indices to the right to keep also the first token above the threshold
sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone()
sorted_indices_to_remove[..., 0] = 0
# Back to unsorted indices and set them to -infinity
indices_to_remove = sorted_indices[sorted_indices_to_remove]
logits[indices_to_remove] = filter_value
indices_to_remove = logits < threshold
logits[indices_to_remove] = filter_value
return logits
def decode_sequences(input_ids, token_type_ids, model, tokenizer, args):
special_tokens_ids = tokenizer.convert_tokens_to_ids(TransferTransfoConstants.SPECIAL_TOKENS)
outputs = []
for i in range(len(input_ids)):
input_seq = tokenizer.decode(input_ids[i][0])
prefix, suffix = input_seq.rsplit("<speaker", maxsplit=1)
context = prefix + "<speaker" + suffix[:2] # Hacky way to append the speaker tag
current_output = []
attempts = 0
# Keep trying to generate output until a limited number of times
expanded_tok_type_ids = token_type_ids[i][0].tolist()
for j in range(args.max_length): # Add trailing tokens
expanded_tok_type_ids.append(expanded_tok_type_ids[-1])
expanded_tok_type_ids = torch.tensor(expanded_tok_type_ids).to(args.device)
for j in range(args.max_length):
prefix_input_seq = torch.tensor(tokenizer.encode(context) + current_output).unsqueeze(0)
truncated_tok_type_ids = expanded_tok_type_ids[:prefix_input_seq.shape[-1]].unsqueeze(0)
logits = model(prefix_input_seq.to(args.device), token_type_ids=truncated_tok_type_ids.to(args.device))
if isinstance(logits, tuple) or len(logits.shape) == 4: # for gpt2 and maybe others
logits = logits[0]
logits = logits[0, -1, :] / args.temperature
logits = top_filtering(logits, top_k=args.top_k, top_p=args.top_p)
probs = F.softmax(logits, dim=-1)
prev = torch.topk(probs, 1)[1] if args.no_sample else torch.multinomial(probs, 1)
if prev.item() in special_tokens_ids:
patience = 3
while prev.item() in special_tokens_ids:
if probs.max().item() == 1 or patience == 0:
# Disabled this rather noisy warning
# logger.warn("Warning: model generating special token with probability 1.")
break # avoid infinitely looping over special token
prev = torch.multinomial(probs, num_samples=1)
patience -= 1
if prev.item() in special_tokens_ids:
break
current_output.append(prev.item())
output = tokenizer.decode(current_output)
outputs.append(output.replace('\n', ''))
return outputs
def generate_outputs(model, loader, tokenizer, args):
all_outputs = []
with torch.no_grad():
for i, batch in enumerate(tqdm(loader)):
input_ids, _, _, attention_mask = batch
output_sequences = model.generate(
input_ids=input_ids,
attention_mask=attention_mask,
do_sample=not args.no_sample,
pad_token_id=tokenizer.eos_token_id,
max_length=input_ids.size(1) + args.max_length,
temperature=args.temperature,
top_k=args.top_k,
top_p=args.top_p
)
for x in output_sequences:
all_outputs.append(tokenizer.decode(x[input_ids.size(1):], skip_special_tokens=True))
save_outputs(all_outputs, args)
def generate_outputs_single_example(model, loader, tokenizer, args):
outputs = []
with torch.no_grad():
for i, batch in enumerate(tqdm(loader)):
input_ids, mc_token_ids, lm_labels, mc_labels, token_type_ids = batch
outputs += decode_sequences(input_ids, token_type_ids, model, tokenizer, args)
save_outputs(outputs, args)
def main(args):
tokenizer = GPT2Tokenizer.from_pretrained(args.model_configuration)
if args.configuration == "baseline":
test_loader = prepare_dataloader(args, tokenizer)
else:
test_loader = prepare_knowledge_grounded_dataloader(args, tokenizer)
if args.model_configuration != args.model_checkpoint:
config = GPT2Config.from_pretrained(args.model_configuration)
state_dict = torch.load(args.model_checkpoint)
# I didnt' read the documentation carefully enough
model = GPT2LMHeadModel.from_pretrained(None, config=config,state_dict=state_dict)
else:
model = GPT2LMHeadModel.from_pretrained(args.model_checkpoint)
model.to(args.device)
if args.configuration == "baseline":
generate_outputs(model, test_loader, tokenizer, args)
else:
generate_outputs_single_example(model, test_loader, tokenizer, args)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--configuration',
type=str,
default='baseline',
choices=['baseline', 'knowledge_grounded'])
parser.add_argument('--model_configuration',
default="microsoft/DialoGPT-medium",
help="The model configuration to use"
)
parser.add_argument('--model_checkpoint',
default="microsoft/DialoGPT-medium",
help="The model checkpoint to use"
)
parser.add_argument('--data_path',
default="redial/",
help="Path to dataset"
)
parser.add_argument('--movies_data_path',
default="redial/movies_with_mentions.csv",
help="Path to movie mentions file"
)
parser.add_argument('--train-file',
default="train_data_swda_tagged.jsonl",
help="Name of train jsonl file."
)
parser.add_argument('--eval-file',
default="test_data_swda_tagged.jsonl",
help="Name of test jsonl file."
)
parser.add_argument('--data_cache_path',
default="redial_dataset_cache.pkl",
help="Path to cached data"
)
parser.add_argument('--test_batch_size',
default=1,
type=int,
help="Test batch size"
)
parser.add_argument('--model_metadata_path', type=str, default='./runs/bert_swbd_pd_nrg',
help='Path to the tokenizer and model configuration')
parser.add_argument('--output_file_path', type=str, default='submissions/submissions.txt')
parser.add_argument('--max_history_turns',
default=2,
type=int,
help="How many turns from context to retain"
)
parser.add_argument('--device',
default=torch.device("cuda" if torch.cuda.is_available() else "cpu"),
help="Device to store the model"
)
parser.add_argument("--temperature", type=int, default=0.7, help="Sampling softmax temperature")
parser.add_argument("--top_k", type=int,
default=0.,
help="Filter top-k tokens before sampling (<=0: no filtering)")
parser.add_argument("--top_p", type=float, default=0.9,
help="Nucleus filtering (top-p) before sampling (<=0.0: no filtering)")
parser.add_argument("--no_sample", action='store_true', help="Set to use greedy decoding instead of sampling")
parser.add_argument("--max_length", type=int, default=50, help="Maximum length of the output utterances")
double_heads_parser = parser.add_argument_group('Double Heads Model Arguments:')
double_heads_parser.add_argument('--num_candidates',
type=int, default=1,
help="Number of candidates to select from")
parser.add_argument('--recommender_only',
dest='recommender_only',
action='store_true',
help="Train only on recommender side utterances"
)
parser.set_defaults(include_dialog_acts=True)
parser.add_argument('--exclude_dialog_acts',
dest='include_dialog_acts',
action='store_false',
help="Whether to exclude dialog act in the knowledge")
args = parser.parse_args()
args.inference = True
print(args)
main(args)<file_sep>"""
Created by diesel
12/23/19
"""
#import scaffold
#print(vars(scaffold))
from lexicon import Lexicon
import json
import nltk
from collections import Counter
"""
# Penn Tagset
# Optional DT: (\S+/DT\s*)?
# Adjetive can be JJ,JJR,JJS: (\S+/JJ\w?\s*)*
# Noun can be NN,NNS,NNP,NNPS: (\S+/NN\w*\s*)+
# ?P<> named group
# ?: non-capturing group
# See https://docs.python.org/3/howto/regex.html#non-capturing-and-named-groups
# index/word/pos/ner
# capital determiner 'A' or 'The' followed by nouns
# The man
# (The Markell family) always
generic = "((?:(?:[0-9]+)/(?:The|A)/DT/\S+\s)(?:\S+/NN\S*/\S+\s*)+)"
# possesive relations people
# our son; his girlfriend
generic2 = "((?:(?:[0-9]+)/\S+/PRP\$/\S+\s)(?:\S+/NN\S*/\S+\s*)+)"
# Mister Deals saw
proper = "((?:^(?:[0-9]+)/\S+/NN\S*/\S+\s)(?:\S+/NN\S*/\S+)*)"
person = "((?:(?:[0-9]+)/\S+/NN\S*/\S+\s+)*(?:\S+/NN\S*/PERSON\s*))"
speaker = "((?:[0-9]+/SPEAKER/\S+/\S+\s+))"
#char_patterns = "|".join([generic, proper, person, speaker])
re_patterns = {
"animate_NN_wn": generic,
"proper_NN": proper,
"person_NER": person,
"speaker": speaker,
"poss_rel_wn": generic2
}
#pattern = re.compile(r'(?P<CHAR>(?:\S+/DT\s*)?(?:\S+/JJ\w?\s*)*(?:\S+/NN\w*\s*)+)')
#pattern = re.compile("(?P<CHAR>{})".format(char_patterns))
regex = {k: re.compile("(?P<CHAR>{})".format(char_patterns)) for k, char_patterns in re_patterns.items()}
#found_entities = defaultdict(list)
for pat_name, pattern in regex.items():
#print("pattern_name:", pat_name)
for m in pattern.finditer(tokens):
mention = [feats.split("/") for feats in m.group("CHAR").rstrip().split()]
"""
class RuleBasedNER():
def __init__(self, lexicons):
self._lexicons = lexicons
def tokenize_text(self, text):
return [t.lower() for t in nltk.word_tokenize(text)]
def tag_tokens(self, tokens):
#print("tokens:", tokens)
indexed_grams = {}
for n in [4, 3, 2, 1]:
ngrams = nltk.ngrams(tokens, n)
for j, gram in enumerate(ngrams):
g = " ".join(gram)
if g in indexed_grams:
if not isinstance(indexed_grams[g], list):
indexed_grams[g] = [indexed_grams[g]]
indexed_grams[g].append(j)
else:
indexed_grams[g] = j
#print("all_grams:", list(indexed_grams.keys()))
mentions = []
_mention_idxs = set()
for lex_name, lex in self._lexicons.items():
#print("phrases:", lex.phrases)
found = set(indexed_grams.keys()) & lex.phrases
#print("found:", found)
found = sorted([(len(f),f) for f in found], reverse=True)
if len(found) > 0:
for length, gram in found:
#print("gram:", gram)
idxs = indexed_grams.get(gram)
#print("idxs:", idxs)
if not isinstance(idxs, list):
idxs = [idxs]
for start_idx in idxs:
#print("start_idx:", start_idx)
#print("len:", len(gram.split()))
end_idx = start_idx + len(gram.split())
span_idxs = set(range(start_idx, end_idx))
#print("span_idxs:", span_idxs)
#print("start,end:", start_idx, end_idx)
standard_form, ent_id = lex.ref2standard(gram, get_tid=True)
#print("_mention_idxs:", _mention_idxs)
if len(_mention_idxs & span_idxs) < 1:
mention = {
"ent_id": ent_id,
"standard_form": standard_form,
"words": tokens[start_idx:end_idx],
"start_idx": start_idx,
"end_idx": end_idx,
"ent_type": lex.name
}
mentions.append(mention)
for _idx_ in span_idxs:
_mention_idxs.add(_idx_)
#print("mentions:", mentions)
return mentions
def player_analysis():
dm = DataManager(
season_dir_path="../data/season_info",
players_path="../data/nba-static-data/nba-players.json",
teams_path="../data/nba-static-data/nba-teams.json",
fun_facts_path="../data/ready-facts.json",
questions_path="../data/ready-questions.json",
standings_path="../data/nba-playoff-picture",
)
print("All Players:")
full_names = [player["full_name"] for player in dm.players]
print("num_names:", len(full_names))
print("num unique names:", len(set(full_names)))
dups = []
_all = set()
for name in full_names:
if name in _all:
dups.append(name)
_all.add(name)
for d in dups:
print(" * ", d)
print("Active Players:")
full_names = [player["full_name"] for player in dm.players if player["is_active"]]
print("num_names:", len(full_names))
print("num unique names:", len(set(full_names)))
def process_ref(text):
return " ".join(nltk.word_tokenize(text)).lower()
def setup_lexicons():
dm = DataManager(
season_dir_path="../data/season_info",
players_path="../data/nba-static-data/nba-players.json",
teams_path="../data/nba-static-data/nba-teams.json",
fun_facts_path="../data/ready-facts.json",
questions_path="../data/ready-questions.json",
standings_path="../data/nba-playoff-picture",
)
# set up lexicons of know entities
player_lex = Lexicon("player")
for player in dm.players:
refs = [process_ref(player["full_name"])]
player_lex.new_term(player["full_name"], info=player, referential_forms=refs)
team_lex = Lexicon("team")
for team in dm.teams:
refs = [process_ref(team[k]) for k in ["full_name", "city", "nickname", ]] # "abbreviation"
team_lex.new_term(team["full_name"], info=team, referential_forms=refs)
lexicons = {
lex.name: lex.to_dict() for lex in [player_lex, team_lex]
}
with open("../data/lexicons/lex.json", "w") as fout:
json.dump(lexicons, fout, indent=2)
with open("../data/lexicons/lex.json", "r") as fin:
lexicons2 = json.load(fin)
lex2 = {
name: Lexicon.from_dict(d) for name, d in lexicons2.items()
}
prepared_lexicons = "../data/lexicons/lex.json"
with open(prepared_lexicons, "r") as fin:
lexicons2 = json.load(fin)
lexicons = {
name: Lexicon.from_dict(d) for name, d in lexicons2.items()
}
def team_ner_demo(fact, ner_tagger, results, lex):
print(("-" * 20) + "\n\n")
text = fact["text"]
team = fact["team"]
toks = [t.lower() for t in nltk.word_tokenize(text)]
mentions = ner_tagger.tag_tokens(toks)
#player_mentions = [men for men in mentions if men["ent_type"] == "player"]
team_mentions = [men for men in mentions if men["ent_type"] == "team"]
nicknames = [lex.get_info(men["standard_form"][0])["nickname"] for men in team_mentions]
nicknames = "; ".join(nicknames)
print("\ntext:", text)
print("fact team:", team)
print("team_mention:", "; ".join([" ".join(men["words"]) for men in team_mentions]))
print("nickname:", nicknames)
if len(team_mentions) == 1:
words = " ".join(team_mentions[0]["words"])
standard = team_mentions[0]["standard_form"][0]
else:
words = "; ".join([" ".join(men["words"]) for men in team_mentions])
standard = "; ".join([men["standard_form"][0] for men in team_mentions])
#print("player_mention: ", words)
print("{} == {}".format(team, nicknames))
if team.lower() == nicknames.lower():
result = "PASS"
else:
result = "FAIL"
print(result)
results.append(result)
def player_ner_demo(fact, ner_tagger, results):
print(("-" * 20) + "\n\n")
text = fact["text"]
player = fact["player"]
toks = [t.lower() for t in nltk.word_tokenize(text)]
mentions = ner_tagger.tag_tokens(toks)
player_mentions = [men for men in mentions if men["ent_type"] == "player"]
team_mentions = [men for men in mentions if men["ent_type"] == "team"]
print("\ntext:", text)
print("player:", player)
print("team_mention:", "; ".join([" ".join(men["words"]) for men in team_mentions]))
if len(player_mentions) == 1:
words = " ".join(player_mentions[0]["words"])
standard = player_mentions[0]["standard_form"][0]
else:
words = "; ".join([" ".join(men["words"]) for men in player_mentions])
standard = "; ".join([men["standard_form"][0] for men in player_mentions])
print("player_mention: ", words)
print("{} == {}".format(player, standard))
if player.lower() == standard.lower():
result = "PASS"
else:
result = "FAIL"
print(result)
results.append(result)
def load_lexicons(prepared_lexicons="../data/lexicons/lex.json"):
with open(prepared_lexicons, "r") as fin:
lexicons = json.load(fin)
lexicons = {
name: Lexicon.from_dict(d) for name, d in lexicons.items()
}
return lexicons
def main():
dm = DataManager(
season_dir_path="../data/season_info",
players_path="../data/nba-static-data/nba-players.json",
teams_path="../data/nba-static-data/nba-teams.json",
fun_facts_path="../data/ready-facts.json",
questions_path="../data/ready-questions.json",
standings_path="../data/nba-playoff-picture",
templates_path="../data/season-templates/templates.json"
)
aka_list = [
{"full_name": "<NAME>", "aka": ["<NAME>", "<NAME>"]},
{"full_name": "<NAME>", "aka": ["Shaq"]}
]
player_lex = Lexicon("player")
for player in dm.players:
refs = [process_ref(player["full_name"])]
player_lex.new_term(player["full_name"], info=player, referential_forms=refs)
for aka in aka_list:
player_lex.new_referential_phrase(
[process_ref(ref) for ref in aka["aka"]],
aka["full_name"])
team_lex = Lexicon("team")
for team in dm.teams:
refs = [process_ref(team[k]) for k in ["full_name", "city", "nickname"]] # "abbreviation"
team_lex.new_term(team["full_name"], info=team, referential_forms=refs)
lexicons = {
lex.name: lex for lex in [player_lex, team_lex]
}
ner_tagger = RuleBasedNER(lexicons)
results = []
for fact in dm.fun_facts["fun-facts"]["players"]["facts"]:
player_ner_demo(fact, ner_tagger, results)
results = Counter(results)
print("results:")
for k,v in results.items():
print("{}: {}".format(k, v))
print("accuracy:", results["PASS"]/ (results["PASS"] + results["FAIL"]))
results = []
for fact in dm.fun_facts["fun-facts"]["teams"]["facts"]:
team_ner_demo(fact, ner_tagger, results, lexicons["team"])
results = Counter(results)
print("results:")
for k,v in results.items():
print("{}: {}".format(k, v))
print("accuracy:", results["PASS"]/ (results["PASS"] + results["FAIL"]))
prepared_lexicons = "../data/lexicons/lex.json"
lexicons = {name: lex.to_dict() for name, lex in lexicons.items()}
with open(prepared_lexicons, "w") as fout:
json.dump(lexicons, fout, indent=2)
if __name__ == "__main__":
main()
<file_sep>import csv
import requests
import json
import logging
import argparse
import os
import spacy
import imdb
import pdb
from data_loader import load_ner_tagger
from data_utils import load_conversations, dump_conversations_to_file, popular_actors_list, popular_directors_list
from annotation_utils import DBPedia, process_text
from fuzzywuzzy import fuzz, process
import pdb
from tqdm.auto import tqdm
from collections import defaultdict
nlp = spacy.load("en_core_web_lg")
def spotlight_annotate(text):
"""
Annotate the spotlight text with the default arguments
"""
payload = {
"text": text
}
headers = {
"Accept": "application/json"
}
response = requests.get(
"http://localhost:2222/rest/annotate",
headers=headers,
params=payload
)
# print(response.text)
return response.json()
def spotlight_annotate_conversations(conversations):
categories_to_not_stop_at = {
"http://dbpedia.org/resource/Romantic_comedy_film",
"http://dbpedia.org/resource/Cartoon",
"http://dbpedia.org/resource/Romance_film",
"http://dbpedia.org/resource/Thriller_(genre)",
"Lol! i am not a big tom fan at all."
}
# TODO : Use a secondary NER to identify mentions more accurately
blacklist_types = {"Schema:MusicAlbum"}
recognized_category_types = defaultdict(int)
for conversation in tqdm(conversations):
for message in conversation["messages"]:
if not message["text"]:
continue
spotlight_annotations = spotlight_annotate(process_text(message["text"]))
cleaned_mentions = []
if spotlight_annotations.get("Resources"):
for entity in spotlight_annotations["Resources"]:
if entity["@URI"] in DBPedia.BLACKLIST_URIS:
break
if entity["@types"]:
for t in entity["@types"].split(","):
recognized_category_types[t] += 1
# else:
# if entity["@URI"] not in categories_to_not_stop_at:
# pdb.set_trace()
cleaned_mentions = [entity for entity in spotlight_annotations["Resources"] if entity["@URI"] not in DBPedia.BLACKLIST_URIS]
message["spotlight_mentions"] = cleaned_mentions
return conversations
def spacy_ner_annotate(conversations):
recognized_category_types = defaultdict(int)
for conversation in tqdm(conversations):
for message in conversation["messages"]:
doc = nlp(process_text(message["text"]))
message_entities = []
for ent in doc.ents:
message_entities.append({
"surface": ent.text,
"type": ent.label_
})
recognized_category_types[ent.label_] += 1
message["spacy_mentions"] = message_entities
print(recognized_category_types)
return conversations
def link_mention_to_imdb(conversations, imdb_sqlite_path=None):
top_actors = popular_actors_list()
top_directors = popular_directors_list()
if imdb_sqlite_path:
ia = imdb.IMDb('s3', os.path.join('sqlite+pysqlite:///', args.imdb_sqlite_path))
else:
ia = imdb.IMDb()
match_cache = {}
for conversation in tqdm(conversations):
for message in conversation["messages"]:
imdb_records = []
if "spotlight_mentions" in message:
for mention in message["spotlight_mentions"]:
if "Schema:Person" in mention["@types"]:
# Actor disambiguation logic
surface_form = mention["@surfaceForm"]
if surface_form == "Goodnight":
continue
if surface_form.lower() == "jim" and "jim carrey" in message["text"].lower():
print("Jim converted to jim carrey")
surface_form = "Jim Carrey"
top_director_match, top_director_score = process.extractOne(surface_form, top_directors)
if top_director_score == 100:
# Tolerate only exact matches for director
print("Matched director ", top_director_match, "for ", surface_form)
cached_result = match_cache.get(best_match)
if cached_result:
imdb_records.append(cached_result)
else:
imdb_person_results = ia.search_person(best_match)
if len(imdb_person_results) > 0:
imdb_records.append(imdb_person_results[0])
match_cache[best_match] = imdb_person_results[0]
else:
top_actor_matches = process.extract(surface_form, top_actors, limit=5, scorer=fuzz.partial_ratio)
num_high_matches = len([match for match, score in top_actor_matches if score > 80])
if num_high_matches > 1:
print("Warning! Multiple matches for ", surface_form, " ; Top matches: ", top_actor_matches)
num_100_matches = len([match for match, score in top_actor_matches if score == 100])
if num_100_matches == 1:
best_match = top_actor_matches[0][0]
print("Choosing closest match ", best_match)
cached_result = match_cache.get(best_match)
if cached_result:
imdb_records.append(cached_result)
else:
imdb_person_results = ia.search_person(best_match)
if len(imdb_person_results) > 0:
imdb_records.append(imdb_person_results[0])
match_cache[best_match] = imdb_person_results[0]
else:
print("Too many high scoring matches!Ignoring!")
print("Utterance: ", message["text"])
elif num_high_matches == 0:
print("Warning! No match for surface form ", surface_form)
print("Utterance: ", message["text"])
else:
best_match = top_actor_matches[0][0]
cached_result = match_cache.get(best_match)
if cached_result:
imdb_records.append(cached_result)
else:
imdb_person_results = ia.search_person(best_match)
if len(imdb_person_results) > 0:
imdb_records.append(imdb_person_results[0])
match_cache[best_match] = imdb_person_results[0]
message["imdb_entries"] = [item.asXML() for item in imdb_records]
return conversations
def load_movie_mentions_csv(mentions_csv_path):
with open(mentions_csv_path, 'r') as csv_file:
return [row for row in csv.DictReader(csv_file)]
def fix_movie_mentions(conversations, movie_corrected_mentions, imdb_sqlite_path):
if imdb_sqlite_path:
ia = imdb.IMDb('s3', os.path.join('sqlite+pysqlite:///', args.imdb_sqlite_path))
else:
ia = imdb.IMDb()
def extract_imdb_id(url: str):
return url.replace("https://www.imdb.com/title/tt", "").replace("/", "")
for mention in movie_corrected_mentions:
conv_id = mention["conversation_index"]
message_index = mention["message_index"]
conversation = conversations[int(conv_id)]
message = conversation["messages"][int(message_index)]
message_text = message["text"]
movie_mentions_dict = conversation["movieMentions"]
if mention["Movie Name 1"]:
mention1 = mention["Movie Name 1"]
imdb_id_1 = extract_imdb_id(mention["IMDB ID 1"])
message_text = message_text.replace(mention1, imdb_id_1)
movie_1 = ia.get_movie(imdb_id_1)
movie_mentions_dict[imdb_id_1] = movie_1['title']
if mention["Movie Name 2"]:
mention2 = mention["Movie Name 2"]
imdb_id_2 = extract_imdb_id(mention["IMDB ID 2"])
movie_2 = ia.get_movie(imdb_id_2)
movie_mentions_dict[imdb_id_2] = movie_2['title']
message_text = message_text.replace(mention2, imdb_id_2)
if mention["Movie Name 3"]:
mention3 = mention["Movie Name 3"]
imdb_id_3 = extract_imdb_id(mention["IMDB ID 3"])
movie_3 = ia.get_movie(imdb_id_3)
movie_mentions_dict[imdb_id_3] = movie_3['title']
message_text = message_text.replace(mention3, imdb_id_3)
message["text"] = message_text
# Special corner case for correcting data
if conv_id == "504" and message_index == "11" :
message["text"] = message_text.replace("@Adam", "Adam")
return conversations
def add_genre_mentions(conversations):
ner_tagger = load_ner_tagger("gez/genre-phrases.tsv", "gez/movie-lex.json")
for conversation in tqdm(conversations):
for message in conversation["messages"]:
tokenized_text = ner_tagger.tokenize_text(message["text"])
mentions = ner_tagger.tag_tokens(tokenized_text)
message["genre_mentions"] = mentions
return conversations
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'--dataset_path',
type=str,
default="redial"
)
parser.add_argument(
'--imdb_sqlite_path',
type=str,
default=None,
help="Path to the IMDB sqlite database. Optional, but makes search significantly faster"
)
args = parser.parse_args()
splits = {
"train": {"input_file": "train_data_imdb_corrected.jsonl", "output_file": "train_data_genre_tagged.jsonl"},
"test": {"input_file": "test_data_imdb_corrected.jsonl", "output_file": "test_data_genre_tagged.jsonl"}
}
for split, metadata in splits.items():
split_filepath = os.path.join(args.dataset_path, metadata["input_file"])
conversations = load_conversations(split_filepath)
annotated_conversations = add_genre_mentions(conversations)
out_filepath = os.path.join(args.dataset_path, metadata["output_file"])
dump_conversations_to_file(annotated_conversations, out_filepath)<file_sep>"""
Created by diesel
11/9/20
"""
import pandas as pd
import json
d = {
'movieMentions': {
'111776': 'Super Troopers (2001)',
'91481': '<NAME> (1984)',
'151656': 'Police Academy (1984)',
'134643': 'American Pie (1999)',
'192131': 'American Pie ',
'124771': '48 Hrs. (1982)',
'94688': 'Police Academy 2: Their First Assignment (1985)',
'101794': 'Lethal Weapon (1987)'
},
'respondentQuestions': {
'111776': {
'suggested': 0,
'seen': 1,
'liked': 1
},
'91481': {
'suggested': 1,
'seen': 2,
'liked': 2
},
'151656': {
'suggested': 1,
'seen': 0,
'liked': 1
},
'134643': {
'suggested': 0,
'seen': 1,
'liked': 1
},
'192131': {
'suggested': 0,
'seen': 1,
'liked': 1
},
'124771': {
'suggested': 1,
'seen': 2,
'liked': 2
}, '94688': {
'suggested': 1,
'seen': 0,
'liked': 1
},
'101794': {
'suggested': 1,
'seen': 0,
'liked': 2
}
},
'messages': [
{
'timeOffset': 0,
'text': 'Hi I am looking for a movie like @111776',
'senderWorkerId': 956,
'messageId': 204171
},
{
'timeOffset': 48,
'text': 'You should watch @151656',
'senderWorkerId': 957,
'messageId': 204172
},
{
'timeOffset': 90,
'text': 'Is that a great one? I have never seen it. I have seen @192131',
'senderWorkerId': 956,
'messageId': 204173
},
{
'timeOffset': 122,
'text': 'I mean @134643',
'senderWorkerId': 956,
'messageId': 204174
},
{
'timeOffset': 180,
'text': 'Yes @151656 is very funny and so is @94688',
'senderWorkerId': 957,
'messageId': 204175
},
{
'timeOffset': 199,
'text': 'It sounds like I need to check them out',
'senderWorkerId': 956,
'messageId': 204176
},
{
'timeOffset': 219,
'text': 'yes you will enjoy them',
'senderWorkerId': 957,
'messageId': 204177
},
{
'timeOffset': 253,
'text': 'I appreciate your time. I will need to check those out. Are there any others you would recommend?',
'senderWorkerId': 956,
'messageId': 204178
},
{
'timeOffset': 297,
'text': 'yes @101794',
'senderWorkerId': 957,
'messageId': 204179
},
{
'timeOffset': 311,
'text': 'Thank you i will watch that too',
'senderWorkerId': 956,
'messageId': 204180
},
{
'timeOffset': 312,
'text': 'and also @91481',
'senderWorkerId': 957,
'messageId': 204181
},
{
'timeOffset': 326,
'text': 'Thanks for the suggestions.',
'senderWorkerId': 956,
'messageId': 204182
},
{
'timeOffset': 341,
'text': 'you are welcome',
'senderWorkerId': 957,
'messageId': 204183
},
{
'timeOffset': 408,
'text': 'and also @124771',
'senderWorkerId': 957,
'messageId': 204184
},
{
'timeOffset': 518,
'text': 'thanks goodbye',
'senderWorkerId': 956,
'messageId': 204185
}
],
'conversationId': '20001',
'respondentWorkerId': 957,
'initiatorWorkerId': 956,
'initiatorQuestions': {
'111776': {
'suggested': 0, 'seen': 1, 'liked': 1},
'91481': {
'suggested': 1, 'seen': 2, 'liked': 2},
'151656': {
'suggested': 1, 'seen': 0, 'liked': 1},
'134643': {
'suggested': 0, 'seen': 1, 'liked': 1},
'192131': {
'suggested': 0, 'seen': 1, 'liked': 1},
'124771': {
'suggested': 1, 'seen': 2, 'liked': 2},
'94688': {
'suggested': 1, 'seen': 0, 'liked': 1},
'101794': {
'suggested': 0, 'seen': 2, 'liked': 2}}}
def get_messages(infile):
with open(infile, "r") as fin:
messages = []
for line in fin:
d = json.loads(line)
speaker_lookup = dict(zip(range(len("ABCDEF")), "ABCDEF"))
speakers = {}
sid = 0
for m in d["messages"]:
#assert m['senderWorkerId'] < 7
if m['senderWorkerId'] not in speakers:
speakers[m['senderWorkerId']] = speaker_lookup[sid]
sid += 1
messages.append({
"text": m["text"],
"speaker": speakers[m['senderWorkerId']]
})
return messages
class DataLoader(object):
def __init__(self, args, infile=None):
self.args = args
self.infile = infile
def load(self, infile=None):
if infile is None:
infile = self.infile
else:
self.infile = infile
messages = get_messages(infile)
return pd.DataFrame(messages)
from lexicon import Lexicon, LexBuilder
from rule_based_ner import RuleBasedNER
import file_utils as fu
import os
from collections import defaultdict
def load_ner_tagger(concept_path, lex_save_path):
df = pd.read_csv(concept_path, sep="\t")
builder = LexBuilder()
key_phrases = defaultdict(list)
for row in df.itertuples():
key_phrases[row.group.strip()].append(row.phrase.strip())
for standard_form, forms in key_phrases.items():
# entry is dictionary
builder.add_entry({
"category": "movie_genre",
"standard_form": standard_form,
"forms": forms,
"name": standard_form,
"full_name": standard_form,
})
builder.build_lexes()
#builder.save_lexes(os.path.join("./", f"{topic_name}-lex.json"))
builder.save_lexes(lex_save_path)
#print("entry keys:", builder.get_entry_key_set())
return RuleBasedNER(builder.lexicons)
def main():
infile = "redial/train_data.jsonl"
loader = DataLoader(None)
messages = loader.load(infile)
ner_tagger = load_ner_tagger("gez/genre-phrases.tsv", "gez/movie-lex.json")
mentions = []
for row in messages.itertuples():
#print("\ntext:", row.text)
toks = ner_tagger.tokenize_text(row.text)
mentions.append(json.dumps(ner_tagger.tag_tokens(toks)))
messages["genre_mentions"] = mentions
#df = pd.DataFrame(messages)
#df.to_csv("redial/train_data.messages.tsv")
messages.to_csv("redial/train-genre-mentions.tsv", sep="\t", index=False)
if __name__ == "__main__":
main()
<file_sep>import pickle
from imdb import IMDb
import pandas as pd
from copy import deepcopy
import json
from imdb.Person import Person
from imdb.Movie import Movie
from imdb.Company import Company
from tqdm import tqdm
ia = IMDb()
def dump_person(p, max_n=None):
"""
print("person:", p)
print("current_info:", p.current_info)
print("infoset2keys:", p.infoset2keys)
d = dict(p)
print(d)
print("\n\n")
for name, attr_set in p.infoset2keys.items():
for attr in attr_set:
print(f"{attr} = {p.get(attr)}")
print("\n\n", "-"*20)
#p = ia.get
ia.update(p, info=["main", "biography", "awards"])
print("person:", p)
print("current_info:", p.current_info)
print("infoset2keys:", p.infoset2keys)
d = dict(p)
print(d)
print("\n\n")
for name, attr_set in p.infoset2keys.items():
print(name.upper())
for attr in attr_set:
r = p.get(attr)
if isinstance(r, dict):
r = r.keys()
print(f"{attr} = {r}")
"""
print(" * _person_", p)
if p.myID not in all_people:
ia.update(p, info=["main", "biography", "awards"])
all_people[p.myID] = dump_item(dict(p), max_n)
return p.myID
def dump_company(c, max_n=None):
print(" * _company_", c)
if c.myID not in all_companies:
ia.update(c, info=["main",])
all_companies[c.myID] = dump_item(dict(c), max_n)
return c.myID
def dump_movie(m, max_n=None):
print(" * _movie_", m)
if m.movieID not in all_movies:
all_movies[m.movieID] = True
temp_d = dict(m)
movie_d = {}
for k, attr in temp_d.items():
movie_d[k] = dump_item(attr, max_n)
all_movies[m.movieID] = movie_d
return m.movieID
def dump_item(val, max_n=None):
if isinstance(val, list):
trunc_val = len(val) if max_n is None else max_n
dumped = [dump_item(v, max_n) for v in val[:trunc_val]]
elif isinstance(val, dict):
dumped = {k: dump_item(v, max_n) for k,v in val.items()}
elif isinstance(val, Person):
dumped = dump_person(val, max_n)
elif isinstance(val, Movie):
dumped = dump_movie(val, max_n)
elif isinstance(val, Company):
dumped = dump_company(val, max_n)
else:
# base case
dumped = val
return dumped
def main():
print("loading data ...")
with open("imdb_data.pkl", "rb") as fin:
imdb_data = pickle.load(fin)
print("aggregating data ...")
for k, movie in tqdm(list(imdb_data["with_imdb_key"].items())):
all_movies[k] = dump_item(movie, 1)
break
print("saving data ...")
for name, data in {"movies": all_movies, "people": all_people, "companies": all_companies}.items():
with open(f"all-{name}.json", "w") as fout:
json.dump(data, fout, indent=2)
def demo_main():
#ia = IMDb()
print("ia.get_movie_infoset():", ia.get_movie_infoset())
print("ia.get_person_infoset():", ia.get_person_infoset())
print("ia.get_company_infoset():", ia.get_company_infoset())
exit(0)
with open("imdb_data.pkl", "rb") as fin:
d = pickle.load(fin)
all_movies = {}
all_people = {}
for k, v in d["with_imdb_key"].items():
# print(f"{k}: {type(v)}")
print(f"{v.myTitle}:", v.current_info)
print(v.infoset2keys)
print(v.get("title"))
print(v.get("taglines"))
print("median" in v)
ia.update(v, info=['taglines', 'vote details'])
print(f"{v.myTitle}:", v.current_info)
print(v.infoset2keys)
print(v.get("title"))
print(v.get("taglines"))
print("median" in v)
for a in ['demographics', 'number of votes', 'arithmetic mean', 'median']:
print(f"{a}:", v.get(a))
for kk, vv in v.items():
# print(f"{kk}: {vv}")
# if kk == "cast":
if False:
p = vv[0]
print("-" * 10)
print("cast[0]")
print(p.summary())
print(p.myName)
print(p.personID)
print(p.billingPos)
for a in p.__dict__.keys():
print(f"{a}: {p.get(a)}")
print("-" * 10)
for attr_name in p.keys_alias:
print(f"{attr_name}: {p.get(attr_name)}")
print("-" * 20)
p = dict(name='', personID=p.personID, myName=p.myName,
myID=p.myID, data=p.data,
currentRole=p.currentRole,
roleIsPerson=p._roleIsPerson,
notes=p.notes, accessSystem=p.accessSystem,
titlesRefs=p.titlesRefs,
namesRefs=p.namesRefs,
charactersRefs=p.charactersRefs)
for _k, _v in p.items():
print(f"{_k}: {_v}")
_d = dict(v)
print(_d)
if input(">"):
exit(0)
df = pd.DataFrame([dict(movie) for movie in d["with_imdb_key"]])
df.to_csv("imdb-movies.tsv", sep="\t", index=False)
print("df shape:", df.shape)
if __name__ == "__main__":
#demo_main()
main()
<file_sep>from .redial import RedialDialoGPTDataset, RedialTransferTransfoDataset<file_sep>
"""
This script allows for the standardized evaluation metrics
such as F1, BLEU-4, ROUGE-L, METEOR, etc. to be computed
against a set of reference responses.
"""
import argparse
import glob
import logging
import os
import pickle
import pprint
from collections import Counter
import nlgeval
from datasets import load_metric
import nltk
from nltk.translate.meteor_score import meteor_score
import numpy as np
class ReferenceMetric(object):
"""
Metric that requires a reference sentence for each
hypothesis to compute
"""
def compute(self, hypotheses, references):
raise NotImplementedError("Implement the compute method!")
class ReferenceFreeMetric(object):
"""
Metric that does not require a reference sentence
"""
def compute(self, hypotheses):
raise NotImplementedError("Implement the compute method!")
class NLPReferenceMetric(ReferenceMetric):
"""
Reference dependent metrics that are part of the
Huggingface NLP library
"""
def __init__(self, module, compute_args={}):
self.scorer = load_metric(module)
self.compute_args = compute_args
def compute(self, hypotheses, references):
return self.scorer.compute(predictions=hypotheses, references=references, **self.compute_args)
class BLEUMetric(NLPReferenceMetric):
def __init__(self):
super().__init__('bleu')
def __repr__(self):
return 'BLEU-4'
def compute(self, hypotheses, references):
return super().compute([hyp.split() for hyp in hypotheses], [[ref.split()] for ref in references])
class RougeMetric(NLPReferenceMetric):
def __init__(self):
super().__init__('rouge')
def __repr__(self):
return 'ROUGE'
class BertScoreMetric(NLPReferenceMetric):
def __init__(self):
self.arg_dict = {"lang": "en"}
super().__init__('bertscore', self.arg_dict)
def __repr__(self):
return f'BertScore({self.arg_dict})'
def compute(self, hypotheses, references):
return sum(super().compute(hypotheses, references)['f1']) / len(hypotheses)
class UnigramFScoreMetric(ReferenceMetric):
def __init__(self, beta=1):
self.beta = beta
self.beta_squared = beta * beta # Fbeta uses (beta^2 as weighting factor)
def _f_beta(self, pred, true):
common = Counter(true) & Counter(pred)
num_same = sum(common.values())
if num_same == 0:
return 0
prec = num_same / len(pred)
rec = num_same / len(true)
f_beta = ((self.beta_squared + 1) * prec * rec) / (self.beta_squared * prec + rec)
return f_beta
def compute(self, hypotheses, references):
return sum([self._f_beta(hyp.split(), ref.split()) for hyp, ref in zip(hypotheses, references)]) / len(references)
def __repr__(self):
return f'F{self.beta}-score'
class MeteorMetric(ReferenceMetric):
"""
Computes the average METEOR score across all examples
"""
def compute(self, hypotheses, references):
try:
nltk.data.find('wordnet')
except LookupError:
nltk.download('wordnet')
return sum([meteor_score([ref], hyp) for (ref, hyp) in zip(hypotheses, references)]) / len(references)
def __repr__(self):
return f'METEOR score'
class NGramDiversity(ReferenceFreeMetric):
def __init__(self, n=1):
self.n = n
def _diversity(self, pred):
n_grams = []
for i in range(len(pred) - self.n + 1):
n_grams.append(' '.join(pred[i:i + self.n]))
if len(n_grams) == 0:
return 0
return len(set(n_grams)) / len(n_grams)
def compute(self, hypotheses):
return sum([self._diversity(hyp.split()) for hyp in hypotheses]) / len(hypotheses)
def __repr__(self):
return f'Utterance {self.n}-gram diversity'
class CorpusNGramDiversity(ReferenceFreeMetric):
"""
Computes the number of unique n-grams that were generated by the model
out of the total number of n-grams present
"""
def __init__(self, n=1):
self.n = n
def compute(self, hypotheses):
n_grams = []
for hyp in hypotheses:
pred = hyp.split()
for i in range(len(pred) - self.n + 1):
n_grams.append(' '.join(pred[i:i + self.n]))
if len(n_grams) == 0:
return 0
return len(set(n_grams)) / len(n_grams)
def __repr__(self):
return f'Corpus {self.n}-gram diversity'
class NLGEval(ReferenceMetric):
"""
Runs the full NLGEval pipeline which computes multiple machine translation
metrics:
1. BLEU
2. METEOR
3. ROUGE
3. CIDEr
4. Skip Thought
5. Embedding Average
6. Vector Extrema
7. Greedy matching
"""
def __init__(self, hypothesis_file, reference_file):
self.metrics_dict = nlgeval.compute_metrics(hypothesis=hypothesis_file,
references=[reference_file], no_skipthoughts=True, no_glove=True)
def __repr__(self):
return 'NLGEval metrics'
def compute(self, hypotheses, references):
return pprint.pformat(self.metrics_dict)
class USRMetric(ReferenceFreeMetric):
"""
Computes the USR score as defined by Mehri and Eskenazi (2019)
Currently the code computes the following:
1. MLM objective
"""
def _make_scoring_file(self, context_file, hypothesis_file):
scratch_path = '../submissions/scratch'
with open(context_file, 'r') as context_, open(hypothesis_file, 'r') as hypothesis_, open(scratch_path, 'w') as scratch_file:
context_lines = [line.strip() for line in context_]
hypothesis_lines = [line.strip() for line in hypothesis_]
for (c, h) in zip(context_lines, hypothesis_lines):
scratch_file.writelines(f'{c} _eos _go {h}\n')
return scratch_path
def _compute_regression_scores(self, mlm_score, dr_c_scores, dr_f_scores):
# Understandable (MLM), Natural (MLM), Maintains Context (DR-c), Interesting (DR-c), Uses Knowledge (DR-f)
common_len = min(len(mlm_score), len(dr_c_scores), len(dr_f_scores))
mlm_score = mlm_score[:common_len]
dr_c_scores = dr_c_scores[:common_len].tolist()
dr_f_scores = dr_f_scores[:common_len].tolist()
X = np.array([mlm_score, mlm_score, dr_c_scores, dr_c_scores, dr_f_scores]).T
with open('../usr/examples/regr.pkl', 'rb') as regression_model_file:
model = pickle.load(regression_model_file)
y = model.predict(X)
return y.tolist()
def _compute_dr_score(self, args):
from usr.examples.train_understandable import MODEL_CLASSES, WEIGHTS_NAME, predict
config_class, model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
tokenizer = tokenizer_class.from_pretrained(args.output_dir, do_lower_case=args.do_lower_case)
checkpoints = [args.output_dir]
preds = None
if args.eval_all_checkpoints:
checkpoints = list(
os.path.dirname(c) for c in sorted(glob.glob(args.output_dir + '/**/' + WEIGHTS_NAME, recursive=True)))
logging.getLogger("transformers.modeling_utils").setLevel(logging.WARN) # Reduce logging
for checkpoint in checkpoints:
global_step = checkpoint.split('-')[-1] if len(checkpoints) > 1 else ""
prefix = checkpoint.split('/')[-1] if checkpoint.find('checkpoint') != -1 else ""
model = model_class.from_pretrained(checkpoint)
model.to(args.device)
preds = predict(args, model, tokenizer, args.data_dir, prefix=prefix)
return preds
def make_args(self, model_path, scoring_file):
args = argparse.Namespace()
args.per_gpu_eval_batch_size = 1
args.output_dir = model_path
args.model_type = "roberta"
args.model_name_or_path = "roberta-base"
args.data_dir = scoring_file
args.do_eval = True
args.task_name = "qqp"
args.do_lower_case = False
args.device = "cuda"
args.eval_all_checkpoints = False
args.max_seq_length = 128
args.local_rank = -1
args.n_gpu = 1
args.overwrite_cache = True
args.output_mode = 'classification' # QQP task uses classification
return args
def _compute_dr_c_score(self, scoring_file):
args = self.make_args("../usr/examples/ctx", scoring_file)
preds = self._compute_dr_score(args)
return preds
def _compute_dr_f_score(self, scoring_file):
args = self.make_args("../usr/examples/uk", scoring_file)
return self._compute_dr_score(args)
def __init__(self, context_file, fact_file, hypothesis_file):
self.lm_scoring_file = self._make_scoring_file(context_file, hypothesis_file)
self.dr_scoring_file = self._make_dr_scoring_file(context_file, fact_file, hypothesis_file)
def compute_mlm_scores(self, scoring_file):
args = self.build_args(scoring_file)
from usr.examples.run_lm_finetuning import evaluate, MODEL_CLASSES, WEIGHTS_NAME
config_class, model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
config = config_class.from_pretrained(args.config_name if args.config_name else args.model_name_or_path,
cache_dir=args.cache_dir if args.cache_dir else None)
tokenizer = tokenizer_class.from_pretrained(
args.tokenizer_name if args.tokenizer_name else args.model_name_or_path,
do_lower_case=args.do_lower_case,
cache_dir=args.cache_dir if args.cache_dir else None)
if args.block_size <= 0:
args.block_size = tokenizer.max_len_single_sentence
model = model_class.from_pretrained(args.model_name_or_path,
from_tf=bool('.ckpt' in args.model_name_or_path),
config=config,
cache_dir=args.cache_dir if args.cache_dir else None)
model.to(args.device)
result = None
checkpoints = [args.output_dir]
if args.eval_all_checkpoints:
checkpoints = list(
os.path.dirname(c) for c in sorted(glob.glob(args.output_dir + '/**/' + WEIGHTS_NAME, recursive=True)))
logging.getLogger("transformers.modeling_utils").setLevel(logging.WARN) # Reduce logging
for checkpoint in checkpoints:
global_step = checkpoint.split('-')[-1] if len(checkpoints) > 1 else ""
prefix = checkpoint.split('/')[-1] if checkpoint.find('checkpoint') != -1 else ""
model = model_class.from_pretrained(checkpoint)
model.to(args.device)
result = evaluate(args, model, tokenizer, prefix=prefix)
# result = dict((k + '_{}'.format(global_step), v) for k, v in result.items())
# results.update(result)
return result
def build_args(self, scoring_file):
args = argparse.Namespace()
args.output_dir = '../usr/examples/roberta_ft'
args.model_type = 'roberta'
args.train_data_file = scoring_file
args.per_gpu_eval_batch_size = 1
args.model_name_or_path = 'roberta-base'
args.eval_data_file = scoring_file
args.do_eval = True
args.mlm = True
args.device = "cuda"
args.local_rank = -1
args.n_gpu = 0
args.config_name = ""
args.cache_dir = ""
args.tokenizer_name = ""
args.do_lower_case = False
args.eval_all_checkpoints = False
args.block_size = -1
return args
def __repr__(self):
return 'USR Metric Fine-tuned on Topical Chats'
def compute(self, hypotheses):
mlm_scores = self.compute_mlm_scores(self.lm_scoring_file)
dr_c_scores = self._compute_dr_c_score(self.dr_scoring_file)
dr_f_scores = self._compute_dr_f_score(self.dr_scoring_file)
usr_scores = self._compute_regression_scores(mlm_scores, dr_c_scores, dr_f_scores)
self.results = usr_scores
return sum(self.results) / len(self.results) if len(self.results) > 0 else 0
def _make_dr_scoring_file(self, context_file, fact_file, hypothesis_file):
scratch_dir = '../submissions/'
scratch_path = scratch_dir + 'dev.tsv'
with open(context_file, 'r') as ctx, open(fact_file, 'r') as fct, open(hypothesis_file) as resp, open(scratch_path, 'w') as scratch_file:
contexts = [line.strip() for line in ctx]
facts = [line.strip() for line in fct]
responses = [line.strip() for line in resp]
for (c, f, r) in zip(contexts, facts, responses):
scratch_file.write(f'0\t1\t2\t{c} {f} _eos\t_go {r}\t0\n')
return scratch_dir
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# Currently support only single hypotheses scoring
parser.add_argument('--context_file',
type=str,
default='processed_output/valid_freq.src')
parser.add_argument('--predictions_file',
type=str,
default="submissions/submissions.txt",
help='File containing output predictions')
parser.add_argument('--references_file',
type=str,
default='processed_output/valid_freq.tgt',
help='File containing the reference responses')
parser.add_argument('--fact_file',
type=str,
default='processed_output/valid_freq.fct',
help='File containing reference facts')
args = parser.parse_args()
with open(args.predictions_file, 'r') as predictions_file:
predictions = [line
.strip()
.replace(".", " .").replace("?", " ?")
.replace(",", " ,")
.replace("'", " ' ")
.replace("dn't", "d n't")
for line in predictions_file]
with open(args.references_file, 'r') as references_file:
references = [line.replace("_go", "").replace("_eos", "").strip() for line in references_file]
assert len(predictions) == len(references), "The number of predictions and references do not match!"
for prediction in predictions:
assert prediction.strip() != "", "Predictions cannot be empty!"
metrics = [
# BLEUMetric(),
# RougeMetric(),
BertScoreMetric(),
# MeteorMetric(),
UnigramFScoreMetric(),
NGramDiversity(n=1),
NGramDiversity(n=2),
CorpusNGramDiversity(n=1),
CorpusNGramDiversity(n=2),
NLGEval(args.predictions_file, args.references_file),
# USRMetric(args.context_file, args.fact_file, args.predictions_file)
]
print(f"Number of examples n={len(predictions)}\n")
for metric in metrics:
if isinstance(metric, ReferenceFreeMetric):
print(metric, ":")
print(metric.compute(predictions))
else:
print(metric, ":")
print(metric.compute(predictions, references))
<file_sep>import json
import argparse
import re
import csv
import os
from tqdm.auto import tqdm
from data_utils import load_conversations
def identify_mislabeled_movies(conversations, output_file_path):
mislabeled_movie_regex = re.compile(r"@[A-Za-z]")
mislabeled_movie_utterances = []
for i, conversation in enumerate(tqdm(conversations)):
for j, message in enumerate(conversation["messages"]):
match = mislabeled_movie_regex.search(message["text"], re.MULTILINE)
if match:
mislabeled_movie_utterances.append({
"conversation_index": i,
"message_index": j,
"messageId": message["messageId"],
"text": message["text"]
})
with open(output_file_path, 'w') as mislabeled_utterances_file:
writer = csv.DictWriter(mislabeled_utterances_file, fieldnames=["conversation_index", "message_index", "text", "messageId"])
writer.writeheader()
writer.writerows(mislabeled_movie_utterances)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('data_path')
args = parser.parse_args()
splits = {
"train": {"input_file": "train_data_spacy.jsonl", "output_file": "train_utterances_with_mislabeled_movies.csv"},
"test": {"input_file": "test_data_spacy.jsonl", "output_file": "test_utterances_with_mislabeled_movies.csv"}
}
for splitname, split_metadata in splits.items():
input_file_path = os.path.join(args.data_path, split_metadata["input_file"])
output_file_path = os.path.join(args.data_path, split_metadata["output_file"])
conversations = load_conversations(input_file_path)
identify_mislabeled_movies(conversations, output_file_path)<file_sep>import numbers
from collections import Sequence
from torch.optim.lr_scheduler import _LRScheduler
class PiecewiseLinearLR(_LRScheduler):
"""
LR Scheduler that linearly decreases the LR over the
duration of the number of steps, based off the Ignite
implementation.
I didn't want to change any of the original setup from TransferTransfo
therefore, it felt more appropriate to retrofit a custom scheduler
"""
def __init__(self, optimizer, milestones_values, last_epoch=-1):
values = []
milestones = []
for pair in milestones_values:
if not isinstance(pair, Sequence) or len(pair) != 2:
raise ValueError("Argument milestones_values should be a list of pairs (milestone, param_value)")
if not isinstance(pair[0], numbers.Integral):
raise ValueError("Value of a milestone should be integer, but given {}".format(type(pair[0])))
if len(milestones) > 0 and pair[0] < milestones[-1]:
raise ValueError("Milestones should be increasing integers, but given {} is smaller "
"than the previous milestone {}".format(pair[0], milestones[-1]))
milestones.append(pair[0])
values.append(pair[1])
self.values = values
self.milestones = milestones
self._index = 0
self.last_epoch = last_epoch
super(PiecewiseLinearLR, self).__init__(optimizer, last_epoch)
def _get_start_end(self):
if self.milestones[0] > self.last_epoch:
return self.last_epoch - 1, self.last_epoch, self.values[0], self.values[0]
elif self.milestones[-1] <= self.last_epoch:
return self.last_epoch, self.last_epoch + 1, self.values[-1], self.values[-1],
elif self.milestones[self._index] <= self.last_epoch < self.milestones[self._index + 1]:
return self.milestones[self._index], self.milestones[self._index + 1], \
self.values[self._index], self.values[self._index + 1]
else:
self._index += 1
return self._get_start_end()
def get_lr(self):
start_index, end_index, start_value, end_value = self._get_start_end()
return [start_value + (end_value - start_value) * (self.last_epoch - start_index) / (end_index - start_index)]
<file_sep>class GlobalStepCounter(object):
def __init__(self):
self.num_steps = 0
def get(self):
return self.num_steps
def step(self):
self.num_steps += 1<file_sep>"""
Created by diesel
11/11/20
"""
def main():
with open("genres.txt", "r") as fin:
lines = [l for l in fin.read().split("\n") if l]
all_terms = []
for line in lines:
phrases = line.split(";")
all_terms.extend([p.strip() for p in phrases if p.strip()])
all_terms = list(set(all_terms))
with open("genres-clean.txt", "w") as fout:
fout.write("\n".join(all_terms))
if __name__ == "__main__":
main()
<file_sep>"""
Created by diesel
11/9/20
"""
from lexicon import Lexicon, LexBuilder
from rule_based_ner import RuleBasedNER
import file_utils as fu
from shutil import copyfile
import unidecode
import os
import json
import nltk
from collections import Counter
from collections import defaultdict
import pandas as pd
import spacy
nlp = spacy.load('en_core_web_sm')
def unicode_2_ascii(text):
#if isinstance(text, unicode):
text = unidecode.unidecode(text)
return text
def iter_lines(lines):
for line in lines:
yield line
def unpack_entry(first, lines, val_lists=False):
# first line is the etype
#if not first:
d = {
"category": first.strip(" :").lower()
}
single_string_values = {"text", "follow_up", "followup", "follow_up", "tid", "evaluative_language",
"dact", "etype", "mod", "rel", "primary_dact", "uid"}
#print("first:", first)
#print("lines[0]:", lines[0])
for line in lines:
if line.startswith("#"):
continue
line = line.strip()
# stop at first empty line
if not line:
break
splitline = line.split(":", maxsplit=1)
#print("splitline:", splitline)
slot_name, slot_vals = splitline
slot_vals = unicode_2_ascii(slot_vals)
if (val_lists and slot_name not in single_string_values) or slot_name in {"reference_forms", "forms", "referential_expressions"}:
if "," in slot_vals:
slot_vals = [v.strip() for v in slot_vals.split(",") if v.strip()]
#print("ifif:", slot_vals)
else:
slot_vals = [v.strip() for v in slot_vals.split(";") if v.strip()]
#print("ifelse")
else:
slot_vals = slot_vals.strip()
#print("else")
slot_name = slot_name.strip().lower()
if slot_name == "tid":
slot_name = "uid"
elif slot_name == "followup":
slot_name = "follow_up"
d[slot_name] = slot_vals
#print(f"{slot_name}: {slot_vals}")
return d, d["category"]
def extract_referencial_forms(e, cat, fullname_only=False):
if cat == "person":
names = [e.get(name) for name in ["first_name", "last_name", ] if e.get(name)]
full_name = " ".join(names)
forms = [full_name]
if not fullname_only and e.get("last_name"):
forms.append(e["last_name"])
e["full_name"] = full_name
elif cat == "book":
forms = [e[attr] for attr in ["full_name", "nick_name" ] if e.get(attr)]
if e.get("nick_name") and e["nick_name"].startswith("the "):
forms.append(e["nick_name"][len("the "):].strip())
_ref_field = "reference_forms"
if e.get(_ref_field):
if not isinstance(e[_ref_field], list):
e[_ref_field] = [e[_ref_field]]
forms.extend(e[_ref_field])
else:
forms = [e[name] for name in ["name", "nick_name", "full_name",] if e.get(name)]
if e.get("forms"):
forms.extend(e["forms"])
forms = list(set(forms))
if not e.get("full_name") and e.get("name"):
e["full_name"] = e["name"]
#print("e forms:", e["forms"])
if forms:
e["forms"] = forms
return e
def demo():
center_dir = "../center/astext-cleaned/"
topic_name = "animals"
#fact_infile_name = f"{topic_name}-facts.txt"
lex_infile_name = f"{topic_name}-lex.txt"
lex_input_path = os.path.join(center_dir, lex_infile_name)
lines = fu.read_lines(lex_input_path)
lines = iter_lines(lines)
builder = LexBuilder()
# entry is dictionary
builder.add_entry({
'category': 'animal',
'standard_form': 'Cat',
'forms': ['kittens', 'kitten', 'cats', 'feline', 'felines', 'kitty', 'kitty cats', 'kitty cat', 'Cat', 'kitties'],
'name': 'Cat',
'full_name': 'Cat'
})
builder.add_entry({
'category': 'animal',
'standard_form': 'Dog',
'forms': ['Dog', 'canine', 'doggy', 'dogs', 'puppies', 'puppy'],
'name': 'Dog',
'full_name': 'Dog'
})
builder.add_entry({
'category': 'concept',
'standard_form': 'gender',
'forms': ['gender'],
'name': 'gender',
'full_name': 'gender'
})
builder.add_entry({
'category': 'concept',
'standard_form': 'intelligence',
'forms': ['learning', 'intelligence', 'cognitive ability'],
'name': 'intelligence',
'full_name': 'intelligence'
})
builder.add_entry({
'category': 'concept',
'standard_form': 'anatomy',
'forms': ['anatomy'],
'name': 'anatomy',
'full_name': 'anatomy'
})
builder.build_lexes()
builder.save_lexes(os.path.join("./", f"{topic_name}-lex.json"))
print("entry keys:", builder.get_entry_key_set())
ner_tagger = RuleBasedNER(builder.lexicons)
texts = ["i think cats dog", "i think cats learning gender and anatomy", "i think cats learning",
"this is cats weird and dogs"]
for text in texts:
print("\ntext:", text)
toks = ner_tagger.tokenize_text(text)
mentions = ner_tagger.tag_tokens(toks)
print("mentions:")
for m in mentions:
print(" - ", m)
print()
def main():
center_dir = "../center/astext-cleaned/"
topic_name = "hockey"
topic_name = "animals"
#topic_name = "activities"
#topic_name = "harry_potter"
#fact_infile_name = f"{topic_name}-facts.txt"
lex_infile_name = f"{topic_name}-lex.txt"
lex_input_path = os.path.join(center_dir, lex_infile_name)
lines = fu.read_lines(lex_input_path)
lines = iter_lines(lines)
builder = LexBuilder()
for j, line in enumerate(lines):
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
entry, etype = unpack_entry(line, lines)
# entry is dictionary
entry = extract_referencial_forms(entry, etype, fullname_only=True)
builder.add_entry(entry, etype)
builder.build_lexes()
builder.save_lexes(os.path.join("./", f"{topic_name}-lex.json"))
print("entry keys:", builder.get_entry_key_set())
ner_tagger = RuleBasedNER(builder.lexicons)
texts = ["i think cats dog", "i think cats learning gender and anatomy", "i think cats learning",
"this is cats weird and dogs"]
for text in texts:
print("\ntext:", text)
toks = ner_tagger.tokenize_text(text)
mentions = ner_tagger.tag_tokens(toks)
print("mentions:")
for m in mentions:
print(" - ", m)
print()
if __name__ == "__main__":
#main()
demo()
<file_sep>import csv
import imdb
import pickle
import argparse
import os
import requests
from collections import defaultdict
from tqdm.auto import tqdm, trange
from bs4 import BeautifulSoup
def retrieve_movies_data_from_imdb(args):
"""
Given the merged movies data, retrieve data for ones which have a matching IMDB id
"""
if args.imdb_sqlite_path:
ia = imdb.IMDb('s3', os.path.join('sqlite+pysqlite:///', args.imdb_sqlite_path))
else:
ia = imdb.IMDb()
movies_data = defaultdict(dict)
with open(args.merged_movie_data_path, 'r') as merged_movies_file:
reader = csv.DictReader(merged_movies_file)
for row in tqdm(reader):
database_id = row['databaseId']
imdb_id = row['imdbId']
if database_id != '-1' and imdb_id != '-1': # We can definitively identify and retrieve these movies
try:
movies_data['with_imdb_key'][database_id] = movie = ia.get_movie(imdb_id)
print("Processed movie:", movie)
except imdb.IMDbError as e:
print("Exception", e)
movies_data['without_imdb_key'][database_id] = row
print("Skipped movie", row['movieName'])
else: # Let's kick the can down the road by dealing with it later
movies_data['without_imdb_key'][database_id] = row
print("Skipped movie", row['movieName'])
return movies_data
def scrape_imdb_list(url, n_pages):
item_list = []
index = 1
for i in trange(1, n_pages + 1):
formatted_url = url.format(str(i))
response = requests.get(formatted_url)
if response.status_code == 200:
html = response.text
soup = BeautifulSoup(html, 'html.parser')
for header in soup.find_all('h3', "lister-item-header"):
item_name = header.a.get_text().strip()
item_struct = {
'id': index,
'name': item_name
}
item_list.append(item_struct)
index += 1
headers = ['id', 'name']
return (item_list, headers)
def scrape_imdb_top_1000_actors():
url = "https://www.imdb.com/list/ls058011111/?sort=list_order,asc&mode=detail&page={}"
actor_list, headers = scrape_imdb_list(url, 10)
with open('top_1000_actors.csv', 'w') as top_actors_file:
writer = csv.DictWriter(top_actors_file, fieldnames=headers)
writer.writeheader()
writer.writerows(actor_list)
def scrape_imdb_top_250_directors():
url = "https://www.imdb.com/list/ls008344500/"
director_list, headers = scrape_imdb_list(url, 3)
with open('top_250_directors.csv', 'w') as top_directors_file:
writer = csv.DictWriter(top_directors_file, fieldnames=headers)
writer.writeheader()
writer.writerows(director_list)
def scrape_imdb_top_500_directors():
url = "https://www.imdb.com/list/ls039888167/"
director_list, headers = scrape_imdb_list(url, 5)
with open('top_500_directors.csv', 'w') as top_directors_file:
writer = csv.DictWriter(top_directors_file, fieldnames=headers)
writer.writeheader()
writer.writerows(director_list)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--merged_movie_data_path',
default='redial/movies_merged_with_imdb.csv',
type=str,
help='Path to the merged file of imdb info and movielens data'
)
parser.add_argument('--imdb_sqlite_path',
default='',
type=str,
help='Path to the IMDB sqlite data (for offline retrieval)'
)
args = parser.parse_args()
# movies_data = retrieve_movies_data_from_imdb(args)
# with open('imdb_data.pkl', 'wb') as movie_imdb_pickle_file:
# pickle.dump(movies_data, movie_imdb_pickle_file)
# scrape_imdb_top_1000_actors()
scrape_imdb_top_500_directors()<file_sep>import csv
import json
import os
import pickle
import re
import pdb
from bs4 import BeautifulSoup
from collections import namedtuple
from transformers import GPT2Tokenizer
from tqdm.auto import tqdm
BaselineExample = namedtuple(
'BaselineExample',
['context', 'response']
)
KnowledgeGroundedExample = namedtuple(
'KnowledgeGroundedExample',
['context', 'response', 'knowledge']
)
def prepare_baseline_redial_split(
split_path,
tokenizer,
movie_db_map
):
with open(split_path, 'r') as split_file:
split_conversations = split_file.read().splitlines()
examples = []
# Matching for movie mention ids: @1234
movie_mention_pattern = re.compile(r"@(\d+)")
# Pattern for mathching the year portion: (2007)
movie_title_year_pattern = re.compile(r"\s+\(\d+\)")
for conversation_str in tqdm(split_conversations):
conversation = json.loads(conversation_str)
context = []
messages = conversation["messages"]
response = ""
for i, message in enumerate(messages):
processed_text = message["text"]
for mention in movie_mention_pattern.finditer(processed_text):
movie_id = mention.group(1)
# Remove year from title
movie_title = movie_title_year_pattern.sub('', movie_db_map[movie_id])
# for now, naively substitute movie title in message
processed_text = processed_text.replace("@" + movie_id, movie_title)
if i == len(messages) - 1 or \
message["senderWorkerId"] != messages[i + 1]["senderWorkerId"]:
response += processed_text
encoded_response = tokenizer.encode(response)
examples.append(BaselineExample(
context,
encoded_response
))
context = context + [encoded_response]
response = ""
else:
# We looked ahead and saw another follow-on response
response += processed_text + " . "
return examples
def prepare_redial_knowledge_grounded_split(
split_path,
movie_db_map,
recommender_only=False,
include_dact=True
):
print("\nLoading data", split_path)
with open(split_path, 'r') as split_file:
split_conversations = split_file.read().splitlines()
examples = []
# Matching for movie mention ids: @1234
movie_mention_pattern = re.compile(r"@(\d+)")
# Pattern for mathching the year portion: (2007)
movie_title_year_pattern = re.compile(r"\s+\(\d+\)")
num_examples_using_knowledge = 0
unk_terms = {
"movie_genre": "<unk_genre>",
"director": "<unk_director>",
"cast": "<unk_cast>",
}
dact_set = set()
for conversation_str in tqdm(split_conversations):
conversation = json.loads(conversation_str)
context = []
messages = conversation["messages"]
response = ""
response_knowledge = []
if recommender_only:
sender_id = conversation["initiatorWorkerId"]
for i, message in enumerate(messages):
processed_text = message["text"]
for mention in movie_mention_pattern.finditer(processed_text):
movie_id = mention.group(1)
movie_title = movie_db_map.get(movie_id)
if not movie_title:
movie_title = conversation["movieMentions"][movie_id]
if isinstance(movie_title, dict):
movie_title = movie_title.get("title")
movie_title = movie_title_year_pattern.sub('', movie_title)
# naively substitute movie title in message
processed_text = processed_text.replace("@" + movie_id, movie_title)
response_knowledge.append(("movie_title", movie_title))
#print("\n", movie_id)
#print(conversation["movieMentions"].keys())
mms = conversation["movieMentions"].get(movie_id)
if isinstance(mms, dict):
mgenres = mms["genres"] if mms.get("genres") else [unk_terms["genre"]]
cast = [a["name"] + "," for a in mms["cast"]] if mms.get("cast") else [unk_terms["cast"]]
director = [a["name"] + "," for a in mms["director"]] if mms.get("director") else [unk_terms["director"]]
response_knowledge.extend([
("movie_genre", " ".join(mgenres)),
("director", " ".join(director)),
("cast", " ".join(cast))
])
# For now, just pass in the surface form (later experiment is to try use normalized form)
for genre_mention in message["genre_mentions"]:
#print(genre_mention)
response_knowledge.append(("genre", genre_mention["words"][0]))
for imdb_entry in message["imdb_entries"]:
soup = BeautifulSoup(imdb_entry, "xml")
response_knowledge.append(('person', soup.find('name').text))
if include_dact:
dacts = []
the_das = message["swbd_da"]
for da in the_das:
d = "<" + da["label_name"].replace(" ", "_") + ">"
dact_set.add(d)
dacts.append(d)
dact_tup = ("dact", " ".join(dacts))
response_knowledge.append(dact_tup)
#print(message)
#print(response_knowledge)
#input(">>")
if i == len(messages) - 1 or \
message["senderWorkerId"] != messages[i + 1]["senderWorkerId"]:
response += processed_text
if not recommender_only or (recommender_only and sender_id != message["senderWorkerId"]):
examples.append(KnowledgeGroundedExample(
context,
response,
response_knowledge
))
if len(response_knowledge) > 0:
num_examples_using_knowledge += 1
context = context + [response]
response = ""
response_knowledge = []
else:
# We looked ahead and saw another follow-on response
response += processed_text + " . "
print("Num examples:", len(examples))
print("Num examples using knowledge: ", num_examples_using_knowledge)
return examples, list(unk_terms.values()) + list(dact_set)
def get_movie_db_map(mentions_file_path):
movie_db_map = {}
with open(mentions_file_path, 'r') as mentions_file:
reader = csv.DictReader(mentions_file)
for row in reader:
movie_db_map[row['movieId']] = row['movieName']
return movie_db_map
def try_load_pickle(pickle_file_path, get_special=False):
print("trying to load pickle", pickle_file_path)
if os.path.exists(pickle_file_path):
with open(pickle_file_path, 'rb') as pickle_file:
data = pickle.load(pickle_file)
if get_special and isinstance(data, dict):
retval = data["data"], data.get("special_terms")
else:
retval = data
return retval
print("not found ...")
def save_pickle(pickle_file_path, data, special_terms=None):
if special_terms:
data = {
"data": data,
"special_terms": special_terms
}
with open(pickle_file_path, 'wb') as pickle_file:
pickle.dump(data, pickle_file)
def prepare_redial_baseline_dataset(
redial_path,
tokenizer,
movie_db_map,
dataset_cache_path='dataset_cache.pkl'
):
dataset = try_load_pickle(dataset_cache_path)
if dataset:
print("Cached data already found, returning")
return dataset
split_files = {
'train': 'train_data.jsonl',
'test': 'test_data.jsonl'
}
dataset = {}
for split, split_file_name in split_files.items():
split_file_path = os.path.join(redial_path, split_file_name)
examples = prepare_baseline_redial_split(split_file_path, tokenizer, movie_db_map)
dataset[split] = examples
save_pickle(dataset_cache_path, dataset)
print("Saved file to cache ", dataset_cache_path)
return dataset
def prepare_redial_knowledge_grounded_dataset(
redial_path,
tokenizer,
movie_db_map,
dataset_cache_path='kg_dataset_cache.pkl',
split_files=None,
recommender_only=False,
include_dacts=True,
):
dataset = try_load_pickle(dataset_cache_path, get_special=True)
if dataset:
print("Cached data already found, returning")
return dataset[0], dataset[1]
if split_files is None:
split_files = {
'train': 'train_data_genre_tagged.jsonl',
'test': 'test_data_genre_tagged.jsonl'
}
dataset = {}
for split, split_file_name in split_files.items():
split_file_path = os.path.join(redial_path, split_file_name)
examples, special_terms = prepare_redial_knowledge_grounded_split(split_file_path, movie_db_map, recommender_only, include_dacts)
dataset[split] = examples
if split.lower() == "train":
train_terms = special_terms
save_pickle(dataset_cache_path, dataset, special_terms=train_terms)
print("Saved file to cache ", dataset_cache_path)
return dataset, train_terms
<file_sep>certifi==2020.6.20
cffi # @ file:///tmp/build/80754af9/cffi_1600699165083/work
chardet==3.0.4
click==7.1.2
dataclasses==0.7
filelock==3.0.12
idna==2.10
IMDbPY==2020.9.25
joblib==0.17.0
lxml==4.6.1
mkl-fft==1.2.0
mkl-random==1.1.1
mkl-service==2.3.0
numpy # @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603487797006/work
packaging==20.4
protobuf==3.13.0
pycparser # @ file:///tmp/build/80754af9/pycparser_1594388511720/work
pyparsing==2.4.7
regex==2020.10.28
requests==2.24.0
sacremoses==0.0.43
sentencepiece==0.1.91
six==1.15.0
SQLAlchemy==1.3.20
tokenizers==0.9.2
torch==1.3.1
tqdm==4.51.0
transformers==3.4.0
urllib3==1.25.11
<file_sep>"""
Created by diesel
12/19/19
"""
from __future__ import print_function, division
import os
import pandas as pd
def check_and_create(out_dir):
if not os.path.exists(out_dir):
print(" ** creating directory: {}".format(out_dir))
os.makedirs(out_dir)
def write_lines(outpath, lines):
with open(outpath, "w") as fout:
fout.write("\n".join(lines))
def read_lines(fpath):
with open(fpath, "r") as fin:
lines = fin.read().split("\n")
return lines
def read_csv_shards(dirpath, name_prefix, postfix):
#print("reading csv shards")
#print(" * dirpath:", dirpath)
#print(" * name_prefix:", name_prefix)
#print(" * postfix:", postfix)
filenames = [fname for fname in os.listdir(dirpath) if fname.startswith(name_prefix) if fname.endswith(postfix)]
#print(" * filenames:", filenames)
idxs = [fname[len(name_prefix):-len(postfix)] for fname in filenames]
#print(" * idxs:", idxs)
sorted_names = [(int(idx), name) for idx, name in zip(idxs, filenames)]
sorted_names.sort()
data = pd.DataFrame()
for idx, fname in sorted_names:
df = pd.read_csv(os.path.join(dirpath, fname))
data = pd.concat([data, df], axis=0)
data.reset_index(inplace=True, drop=True)
return data
def save_df_shards(to_write, outpath, max_size=15000):
h = 0
while to_write:
h += 1
new_df = pd.DataFrame(to_write[:max_size])
outfile = outpath + "-{}.csv".format(h)
if "Index" in new_df.columns.values:
new_df.drop("Index", axis=1, inplace=True)
new_df.to_csv(outfile, index=False, encoding="utf-8")
to_write = to_write[max_size:]
def main():
pass
if __name__ == "__main__":
main()
<file_sep>
import pandas as pd
import numpy as np
import json
import nltk
def make_signature(row):
return f"{row.original_title} ({row.year})".lower()
def main1():
"""
['Headhunter (2009)']
:return:
"""
print("hello there !")
redial_df = pd.read_csv("movies_with_mentions.csv")
redial_movies = {row.movieName.lower(): j for j, row in enumerate(redial_df.itertuples())}
imdb_df = pd.read_csv("IMDb-movies.csv")
imdb_df["sig"] = [make_signature(row) for row in imdb_df.itertuples()]
#to_drop = [row.Index for row in imdb_df.itertuples() if row.sig not in redial_movies]
#imdb_df.drop(to_drop, axis=0, inplace=True)
imdb_movies = {row.sig: (row.imdb_title_id, row.title ) for j, row in enumerate(imdb_df.itertuples())}
mapping = [imdb_movies.get(row.movieName.lower()) for row in redial_df.itertuples()]
redial_df["imdb_index"] = mapping
redial_df.to_csv("movies_with_mentions-IMDb.tsv", sep="\t", index=False)
imdb_df.to_csv("IMDb-movies-redial.tsv", sep="\t", index=False)
def read_lines(fpath):
with open(fpath, "r") as fin:
return fin.read().split("\n")
_long_count = 0
_good_count = 0
def normalize_imbd_id(the_id, wanted_len):
global _long_count, _good_count
if not isinstance(the_id, str):
the_id = str(the_id)
if len(the_id) < wanted_len:
# imdb movie id's are length 7 numeric identifiers with
# leading zeroes when necessary.
the_id = "0" * (wanted_len - len(the_id)) + the_id
#assert len(the_id) == wanted_len, [the_id]
if len(the_id) != wanted_len:
_long_count += 1
else:
_good_count += 1
return the_id
def get_movie(mid, movie_base):
return movie_base.get(normalize_imbd_id(mid, 7))
def get_person(pid, person_base):
if isinstance(pid, str) or isinstance(pid, int):
retval = person_base.get(normalize_imbd_id(pid, 7))
else:
retval = pid
return retval
def main():
movielense_2_imdb = {}
database_2_imdb = {}
df = pd.read_csv("movies_merged_with_imdb.csv")
for row in df.itertuples():
#print(row)
imdbId = normalize_imbd_id(row.imdbId, 7)
movielense_2_imdb[row.movielensId] = imdbId
database_2_imdb[str(row.databaseId)] = imdbId
assert isinstance(imdbId, str), imdbId
_imdb_movie_set = set(movielense_2_imdb.values())
assert "3874544" in _imdb_movie_set
assert "0247745" in _imdb_movie_set
assert database_2_imdb["111776"] == "0247745"
movies_db, people_db = load_databases()
#exit(0)
for split_name in ["test", "train"]:
new_lines = []
fname = f"{split_name}_data_swda_tagged.jsonl"
print("loading", fname)
print("processing ...")
for line in read_lines(fname):
try:
d = json.loads(line)
except Exception as e:
# copy the line over
new_lines.append(line)
# and continue to the next line
continue
new_mentions = {}
for mid in d["movieMentions"]:
#print("\nmid:", mid)
imdb_id = database_2_imdb.get(mid)
#print("imdb_id:", imdb_id)
#print("mention:", d["movieMentions"][mid])
movie = get_movie(imdb_id, movies_db)
#print("movie:", movie)
if isinstance(movie, dict):
for attr_name in ["director", "cast"]:
new_vals = [get_person(pid, people_db) if get_person(pid, people_db) else pid
for pid in movie.get(attr_name, [])]
for p in new_vals:
if not isinstance(p, dict):
continue
new_kf_movies = []
for kf_movie_id in p.get("known for", []):
#print("kf_movie_id:", [kf_movie_id])
if isinstance(kf_movie_id, dict):
kf_movie = kf_movie_id
else:
kf_movie = get_movie(kf_movie_id, movies_db)
#print("kf_movie:", kf_movie)
if kf_movie is None:
new_kf_movies.append(kf_movie_id)
else:
new_kf_movies.append({
"long imdb title": kf_movie.get("long imdb title", "_unk_title_"),
"imdb_id": kf_movie_id.get("imdb_id") if isinstance(kf_movie_id, dict) else kf_movie_id
})
p["known for"] = new_kf_movies
movie[attr_name] = new_vals
movie["imdb_id"] = imdb_id
new_mentions[mid] = movie
else:
# blindly copy whatever is there.
new_mentions[mid] = d["movieMentions"][mid]
#print(new_mentions[mid])
d["movieMentions"] = new_mentions
new_lines.append(json.dumps(d))
if input(">>>"):
exit(0)
fname = f"{split_name}-with-movie-info.jsonl"
print("saving", fname)
with open(fname, "w") as fout:
fout.write("\n".join(new_lines))
print("_long_count:", _long_count)
print("_good_count:", _good_count)
def load_databases():
with open("all-movies.json", "r") as fin:
movies_db = json.load(fin)
mkeys = [len(str(mid)) for mid in list(movies_db.keys())]
print("max movie key length:", max(mkeys))
print("min movie key length:", min(mkeys))
movies_db = {normalize_imbd_id(imdb_id, 7): entry
for imdb_id, entry in movies_db.items()}
with open("all-people.json", "r") as fin:
people_db = json.load(fin)
pkeys = [len(str(mid)) for mid in list(movies_db.keys())]
print("max people key length:", max(pkeys))
print("min people key length:", min(pkeys))
people_db = {normalize_imbd_id(imdb_id, 7): entry
for imdb_id, entry in people_db.items()}
return movies_db, people_db
if __name__ == "__main__":
main()
| d8b9ff20d403f2327ac892a7061fb532357cb170 | [
"Markdown",
"Python",
"Text"
] | 26 | Python | omkarpat/conversational-movie-recommender | 3ee3b4854bbab5eb4730a18972f5cfd5350d9bf4 | 01944e9f32b7be250b599b267015b888450353e1 | |
refs/heads/master | <file_sep># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-10 16:12
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('menu_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='menuitem',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='menu_item_to_category', to='menu_app.Category'),
),
migrations.AlterField(
model_name='menuitem',
name='product',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='menu_item_product', to='menu_app.Product'),
),
]
<file_sep># dj_server
Django server
<file_sep>from django.utils import timezone
def logme(obj_to_log,type='0'):
'''
console log functiom
'''
print ('++++++++ ' + '{:%y.%m.%d %H:%M:%S}'.format(
timezone.now()) + ' [{}] '.format(type) + repr(obj_to_log))
def add_li(list_to_add,li_txt):
list_to_add.append('<li class="tree_li">')
list_to_add.append(li_txt)
list_to_add.append('</li>')
def attr_to_string(obj):
ret = []
for attr, value in obj.__dict__.items():
ret.append('{}='.format(attr))
ret.append('{} '.format(value))
return ''.join(ret)
def obj_w_level_to_list(list_to_add, obj, level_to_add):
'''
Add obj to a given list and level
'''
obj.level = level_to_add
obj.level_string = (get_tabulating_string(level_to_add))
list_to_add.append(obj)
def get_tabulating_string(level):
# child_level = child.get_level()
if level > 1:
return '<span class="space">{}</span>- '.format('--' * (level-1))
# li_list.append('- ')
else:
return '- '
<file_sep>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index,name='index'),
url(r'^insch/(?P<node>\d+)/(?P<is_cat>\d{1})/(?P<rel_id>\d+)/$',
views.insert_child, name='insert_child'),
url(r'^inssib/(?P<node>\d+)/(?P<is_cat>\d{1})/(?P<rel_id>\d+)/$',
views.insert_sibling, name='insert_sibling'),
url(r'^cat/(?P<cat_id>\d+)/$',
views.category_card, name='category_card'),
url(r'^prod/(?P<prod_id>\d+)/$',
views.product_card, name='product_card'),
url(r'^del/(?P<element_id>\d+)/$',
views.delete_element, name='delete_element'),
url(r'^generate_tree/$',
views.generate_tree, name='generate_tree'),
url(r'^delete_tree/$',
views.delete_tree, name='delete_tree'),
url(r'^menu_admin/$',
views.menu_admin, name='menu_admin'),
url(r'^get_childs_li/(?P<parent_id>\d+)/$',
views.get_childs_li, name='get_childs_li'),
]<file_sep># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-11 15:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('menu_app', '0003_auto_20161210_1716'),
]
operations = [
migrations.CreateModel(
name='MenuTree',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_category', models.BooleanField()),
('bot_sib', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bot_sib_to_self', to='menu_app.MenuTree')),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='menu_tree_to_category', to='menu_app.Category')),
('first_child', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='first_child_to_self', to='menu_app.MenuTree')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='parent_to_self', to='menu_app.MenuTree')),
('product', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='menu_tree_product', to='menu_app.Product')),
('top_sib', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='top_sib_to_self', to='menu_app.MenuTree')),
],
),
]
<file_sep># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-10 17:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('menu_app', '0002_auto_20161210_1612'),
]
operations = [
migrations.RemoveField(
model_name='menuitem',
name='item_type',
),
migrations.AddField(
model_name='menuitem',
name='is_category',
field=models.BooleanField(default=False),
preserve_default=False,
),
]
<file_sep># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-22 17:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('menu_app', '0005_menutree_child_qty'),
]
operations = [
migrations.AddField(
model_name='menutree',
name='child_qty_active',
field=models.IntegerField(default=0),
),
]
<file_sep>from django.contrib import admin
from .models import Category, Product, MenuTree
admin.site.register(Category)
admin.site.register(Product)
admin.site.register(MenuTree)
<file_sep>//js scripts for menu_app
function getCategory(){
$input = $("input#category_input");
val = $input.val();
list = $input.attr('list'),
match = $('#' + list + ' option').filter(function() {
return ($(this).val() === val);
});
if(match.length > 0) {
return match.data('id');
};
return;
};
function getProduct(){
$input = $("input#product_input");
val = $input.val();
list = $input.attr('list'),
match = $('#' + list + ' option').filter(function() {
return ($(this).val() === val);
});
if(match.length > 0) {
return match.data('id');
};
return;
};
function getChildCheckbox(){
chbox = $('input#child_checkbox');
return chbox.prop('checked');
};
function getCategoryRadio(){
cat_radio = $('input#category_radio');
return cat_radio.prop('checked');
};
function createMenuEntry(){
$('div#change_menu').toggleClass('invisible','visible');
category_id = getCategory();
product_id = getProduct();
create_child = getChildCheckbox();
create_category = getCategoryRadio();
};
function expand_pressed(self){
//Expend category button pressed
var btn = $(self);
var parent = btn.parent();
var request = '/get_childs_li/' + btn.data('id') + '/';
if (btn.data('state') == 'closed') {
$.ajax({
url: request,
method: "GET",
success: function(result){
if (result.slice(0,5) == 'Error') {
if (btn.attr('id') != 'expand_root') {
alert(result);
}
} else {
parent.append(result);
};
},
error: function(result){
alert(result);
}
});
btn.data('state', 'open');
} else {
li_to_remove = $("li", $(parent));
li_to_remove.remove();
btn.data('state', 'closed');
};
};
$('a.insert_btn').on("click", function(e){
//Insert button in admin_menu pressed
var a = $(this);
var child_chbox = $('input#child_checkbox');
$('span#change_menu_entry_id').text(a.data('id'));
$('div#change_menu').toggleClass('invisible', false);
if (Boolean(a.data('iscategory'))) {
child_chbox.toggleClass('invisible', false);
child_chbox.prop('checked', true);
} else {
child_chbox.toggleClass('invisible', true);
child_chbox.prop('checked', false);
};
});
$('button#change_insert_btn').on("click", function(e){
//Insert button on modification menu pressed
var btn = $(this);
var request = ''
category_id = getCategory();
product_id = getProduct();
create_child = getChildCheckbox();
create_category = getCategoryRadio();
if (create_child) {
request += '/insch/';
} else {
request += '/inssib/';
};
request += $('span#change_menu_entry_id').text() + '/';
if (create_category) {
if (!category_id) {
alert('Category is not set');
return;
}
request += '1/' + category_id + '/';
} else {
if (!product_id) {
alert('Product is not set');
return;
}
request += '0/' + product_id + '/';
};
$.ajax({
url: request,
method: "GET",
success: function(result){
if (result.slice(0,5) == 'Error') {
alert(result);
} else {
location.reload();
};
},
error: function(result){
alert(result);
}
});
$('div#change_menu').toggleClass('invisible', true);
});
$(document).ready(function(){
//Expands root element
var btn = $('a#expand_root');
expand_pressed(btn);
});
<file_sep>from django.db import models
from django.db.models import F
from django.core import exceptions
from .add_lib import logme, add_li, attr_to_string, obj_w_level_to_list
import random
import datetime
class Category(models.Model):
name = models.TextField(max_length=50)
active = models.BooleanField(default=False)
def save(self, *args, **kwargs):
if self.name == 'root':
try:
root = Category.objects.filter(name='root')
except:
root = None
if root:
return False
super(Category, self).save(*args, **kwargs)
class Product(models.Model):
name = models.TextField(max_length=50)
price = models.DecimalField(max_digits=10, decimal_places=2)
active = models.BooleanField(default=False)
class MenuTree(models.Model):
parent = models.ForeignKey('self', related_name='parent_to_self',
blank=True, null=True, db_index=True)
first_child = models.ForeignKey('self', related_name='first_child_to_self',
blank=True, null=True, db_index=True)
top_sib = models.ForeignKey('self', related_name='top_sib_to_self',
blank=True, null=True, db_index=True)
bot_sib = models.ForeignKey('self', related_name='bot_sib_to_self',
blank=True, null=True, db_index=True)
is_category = models.BooleanField()
category = models.ForeignKey(Category, related_name='menu_tree_to_category',
blank=True, null=True)
product = models.ForeignKey(Product, related_name='menu_tree_product',
blank=True, null=True)
child_qty = models.IntegerField(default=0)
child_qty_active = models.IntegerField(default=0)
def update_parents_element_count(self, changes): #, active):
'''
Updates child_qty and child_qty_active fields for all parents
changes = [child_qty,child_qty_active]
'''
self.child_qty = F('child_qty') + changes[0]
self.child_qty_active = F('child_qty_active') + changes[1]
self.save()
self.refresh_from_db()
if self.parent:
self.parent.update_parents_element_count(changes)
return
def get_root(self):
'''
Returns root element, creates if does not exist, with category name 'root'
'''
try:
root = MenuTree.objects.get(parent=None)
except exceptions.ObjectDoesNotExist:
root = MenuTree()
root.is_category = True
try:
cat = Category.objects.filter(name='root')[0]
if not cat.active:
cat.active = True
cat.save()
except:
cat = Category()
cat.name = 'root'
cat.active = True
cat.save()
root.category = cat
root.save()
return root
def insert_child(self, child_is_category, link_id, *args, **kwargs):
"""
Always inserts first child, pushing ex_first_child down if exist
link_id = id of Category or Product models
"""
if not self.is_category:
raise exceptions.FieldError("Child can be inserted only in category")
child = MenuTree()
child.parent = self
child.is_category = bool(int(child_is_category))
if child.is_category:
try:
cat = Category.objects.get(id=int(link_id))
except exceptions.ObjectDoesNotExist:
raise exceptions.FieldError('Existing Category must be set')
child.category = cat
else:
try:
prod = Product.objects.get(id=int(link_id))
except exceptions.ObjectDoesNotExist:
raise exceptions.FieldError('Existing Product must be set')
child.product = prod
if self.first_child:
child.bot_sib = self.first_child
child.save()
self.first_child.top_sib = child
self.first_child.save()
else:
child.bot_sib = None
child.save()
self.first_child = child
if not child.is_category:
if child.product.active and self.category.active and not self.parent_category_inactive():
self.update_parents_element_count([1,1])
else:
self.update_parents_element_count([1,0])
else:
self.save()
return child
def insert_sibling(self, sibling_is_category, link_id, *args, **kwargs):
"""
Always inserts sibling after self
"""
if not self.parent:
raise exceptions.FieldError('Root element can not have siblings')
sibling = MenuTree()
sibling.parent = self.parent
sibling.first_child = None
sibling.top_sib = self
sibling.is_category = bool(int(sibling_is_category))
if sibling.is_category:
try:
cat = Category.objects.get(id=int(link_id))
except exceptions.ObjectDoesNotExist:
raise exceptions.FieldError('Category must be set')
sibling.category = cat
else:
try:
prod = Product.objects.get(id=int(link_id))
except exceptions.ObjectDoesNotExist:
raise exceptions.FieldError('Product must be set')
sibling.product = prod
sibling.bot_sib = self.bot_sib #None or object
sibling.save()
if self.bot_sib:
self.bot_sib.top_sib = sibling
self.bot_sib.save()
self.bot_sib = sibling
self.save()
#updates child qtys in parent entry
if not sibling.is_category:
if sibling.product.active and sibling.parent.parent_category_inactive():
sibling.parent.update_parents_element_count([1,1])
else:
sibling.parent.update_parents_element_count([1,0])
return sibling
def parent_category_inactive(self):
'''
Checks if exist inactive parent category
'''
parent = self.parent
while parent:
if not parent.category.active:
return True
parent = parent.parent
return False
def delete_category_childs(self):
'''
Returns a list of deleted child_qty and child_qty_active
'''
deleted_products = [0,0]
run_loop = True
while run_loop:
if self.first_child:
try:
child = MenuTree.objects.get(id=self.first_child.id)
except exceptions.ObjectDoesNotExist:
child = None
self.first_child = child
run_loop = False
if child:
self.first_child = child.bot_sib
deleted_products = [x + y for x, y in
zip(deleted_products, child.delete_element(False))]
else:
run_loop = False
return deleted_products
def delete_element(self, update_parents):
'''
Deletes element and childs if category
returns a list of deleted child_qty and child_qty_active quantities
'''
deleted_products = [0,0]
if self.top_sib:
self.top_sib.bot_sib = self.bot_sib
self.top_sib.save()
if self.bot_sib:
self.bot_sib.top_sib = self.top_sib
self.bot_sib.save()
if self.is_category:
deleted_products = self.delete_category_childs()
if not self.category.active or self.parent_category_inactive():
deleted_products[1] = 0 #active products
else: #product
if self.product.active and not self.parent_category_inactive():
deleted_products = [1,1]
else:
deleted_products = [1,0]
if self.parent:
if self.parent.first_child == self: #self is a first child
self.parent.first_child = self.bot_sib
self.parent.save()
if update_parents and (deleted_products[0] != 0):
self.parent.update_parents_element_count([-1 * x for x in deleted_products])
self.parent = None
self.top_sib = None
self.bot_sib = None
self.delete()
return deleted_products
def get_tree(self):
'''
Retruns tree objects list
'''
tree = []
root = self.get_root()
obj_w_level_to_list(tree, root, 0)
tree += root.get_category_branch()
return tree
def get_category_branch(self, level=0, *args, **kwargs):
'''
Returns list of MenuTree objects, that are childs of self
'''
local_level = level + 1
branch = []
if self.first_child:
run_loop = True
while run_loop:
if self.first_child.is_category:
obj_w_level_to_list(branch,self.first_child,local_level)
branch += self.first_child.get_category_branch(local_level)
else:
obj_w_level_to_list(branch,self.first_child,local_level)
if not self.first_child.bot_sib: #last child element
run_loop = False
else:
self.first_child = self.first_child.bot_sib
return branch
def get_single_branch(self):
'''
Returns single level child elements
'''
branch = []
if self.first_child:
run_loop = True
while run_loop:
obj_w_level_to_list(branch,self.first_child,self.get_level()+1)
if not self.first_child.bot_sib: #last child element
run_loop = False
else:
self.first_child = self.first_child.bot_sib
return branch
def generate_random_tree(self):
'''
Generates total_elements random tree
'''
total_cats = 300
total_prods = 10000
total_elements = 500
start = datetime.datetime.now()
logme('generate_random_tree process started {}'.format(start))
existing_cats = Category.objects.count()
if existing_cats < total_cats:
for i in range(total_cats - existing_cats):
new_cat = Category()
new_cat.name = 'Category_auto_{}'.format(i)
new_cat.active = random.choice([True,True,True,False])
new_cat.save()
existing_prods = Product.objects.count()
if existing_prods < total_prods:
for i in range(total_prods - existing_prods):
new_prod = Product()
new_prod.name = 'Product_auto_{}'.format(i)
new_prod.active = random.choice([True,True,True,False])
new_prod.price = random.randrange(1000) + random.randrange(100)/100
new_prod.save()
root = self.get_root()
elements_left = total_elements
for i in range(total_elements):
elements_left -= root.create_random_child(elements_left,1)
if elements_left < 1:
break
end = datetime.datetime.now()
logme('generate_random_tree process finished, duration {}, elements created {}'.format(end - start,total_elements))
def create_random_child(self, elements_left, level):
'''
Creates random child, if child is category makes recursion call
to generate its childs until elements_left == created_childs
'''
stop_prob = 0.2
prod_prob = 0.8 #else category
prod_in_first_level_prob = 0.1
cats = Category.objects.all()
prods = Product.objects.all()
created_childs = 0
for i in range(elements_left):
if (random.random() <= stop_prob) or (created_childs > elements_left):
break
elif level == 1:
insert_prod = random.random() <= prod_in_first_level_prob
else:
insert_prod = random.random() <= prod_prob
if insert_prod:
child = self.insert_child(False,random.choice(prods).id)
created_childs += 1
else: #category
cat = self.insert_child(True,random.choice(cats).id)
created_childs += 1
created_childs += cat.create_random_child(elements_left - created_childs, level + 1)
return created_childs
def get_level(self):
'''
Returns level of self element
'''
level = 0
parent = self.parent
while parent:
level += 1
parent = parent.parent
return level
<file_sep>from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.core.exceptions import ObjectDoesNotExist, FieldError
from .add_lib import logme, add_li, get_tabulating_string
from .models import Category, Product, MenuTree
def index(request):
'''
Non editable menu view
'''
resp = []
root = MenuTree()
root = root.get_root()
# tree = root.get_tree()
tree = root.get_single_branch()
elem_qty = len(tree) + 1
li_tag = '<li class="tree_li"'
return render(request, 'menu_app/menu_active.html',{'tree': tree,
'elem_qty': elem_qty,
'root': root,
'li_tag': li_tag})
def insert_child(request, node, is_cat, rel_id):
'''
Inserts child for parent node
is_cat (is_category) = 0 or 1
url like: /insch/78/1/1/
'''
try:
parent = MenuTree.objects.get(id=node)
parent.insert_child(is_cat,rel_id)
except Exception as e:
return HttpResponse('Error: ' + str(e))
return HttpResponse('Ok: child created')
def insert_sibling(request, node, is_cat, rel_id):
'''
Inserts sibling for node element
is_cat (is_category) = 0 or 1
url like: /inssib/78/0/10/
'''
try:
top_sibling = MenuTree.objects.get(id=node)
top_sibling.insert_sibling(is_cat,rel_id)
except FieldError as e:
return HttpResponse('Error: ' + str(e))
return HttpResponse('Ok: sibling created')
def category_card(request, cat_id):
'''
Shows category card view
'''
try:
cat = Category.objects.get(id=int(cat_id))
except ObjectDoesNotExist:
return HttpResponse('<h1>Category Does Not Exist</h1>')
resp = []
resp.append('<h1>')
resp.append(cat.name)
resp.append('</h1>')
resp.append('<p>Active ')
resp.append(str(cat.active))
resp.append('</p>')
return HttpResponse(''.join(resp))
def product_card(request, prod_id):
'''
Shows product card view
'''
try:
prod = Product.objects.get(id=int(prod_id))
except ObjectDoesNotExist:
return HttpResponse('<h1>Category Does Not Exist</h1>')
resp = []
resp.append('<h1>')
resp.append(prod.name)
resp.append('</h1>')
resp.append('<p>Active: ')
resp.append(str(prod.active))
resp.append(' Price: ')
resp.append(str(prod.price))
resp.append('</p>')
return HttpResponse(''.join(resp))
def delete_element(request, element_id):
'''
Deletes tree element
'''
try:
menu = MenuTree.objects.get(id=int(element_id))
except ObjectDoesNotExist:
return redirect('menu_admin')
menu.delete_element(True)
return redirect('menu_admin')
def generate_tree(request):
'''
Generates random tree
'''
root = MenuTree()
root = root.get_root()
root.generate_random_tree()
return redirect('menu_admin')
def menu_admin(request):
'''
Editable version menu view
'''
root = MenuTree()
root = root.get_root()
tree = root.get_tree()
cats_inactive = 0
prods_inactive = 0
active_prods_in_inactive_cats = 0
prods_total = 0
for item in tree:
if item.is_category:
if not item.category.active:
cats_inactive += 1
else:
prods_total += 1
if not item.product.active:
prods_inactive += 1
elif item.parent_category_inactive():
active_prods_in_inactive_cats += 1
elements_total = len(tree)
cats_total = elements_total - prods_total
cats = Category.objects.all()
prods = Product.objects.all()
return render(request,
'menu_app/menu.html',{'tree': tree,
'elements_total': elements_total,
'cats': cats,
'prods': prods,
'cats_inactive': cats_inactive,
'prods_inactive': prods_inactive,
'prods_total': prods_total,
'cats_total': cats_total,
'active_prods_in_inactive_cats': active_prods_in_inactive_cats})
def delete_tree(request):
'''
Deletes whole tree
'''
all_obj = MenuTree.objects.all()
all_obj.delete()
return redirect('menu_admin')
def wrap_in_li(li_list,tree_obj):
'''
Wraps MenuTree obj in li and adds to list
'''
wrap_element = False
if tree_obj.is_category:
if tree_obj.category.active:
wrap_element = True
else:
if tree_obj.product.active:
wrap_element = True
if wrap_element:
li_list.append('<li class="tree_li" id="li_{}">'.format(tree_obj.id))
li_list.append(get_tabulating_string(tree_obj.get_level()))
li_list.append(str(tree_obj.id))
if tree_obj.is_category:
li_list.append(' <a class="expand_btn" href="javascript:void(0)" \
data-id="{}" data-state="closed" onclick="expand_pressed(this)\
">+</a>'.format(tree_obj.id))
li_list.append(' <a class="category" \
href="/cat/{}">{}</a>'.format(tree_obj.category.id,tree_obj.category.name))
li_list.append(' ({})'.format(tree_obj.child_qty_active))
else:
li_list.append(' <a class="product" \
href="/prod/{}">{}</a>'.format(tree_obj.product.id,tree_obj.product.name))
li_list.append(' (€ {})'.format(tree_obj.product.price))
li_list.append('</li>')
def get_childs_li(request, parent_id):
'''
Returns category childs wraped in <li> elements
'''
li_list = []
try:
child = MenuTree.objects.get(parent=parent_id, top_sib=None)
except:
child = None
while child:
wrap_in_li(li_list,child)
child = child.bot_sib
if len(li_list) == 0:
return HttpResponse('Error: Category has no child elements')
else:
return HttpResponse(''.join(li_list))
| 07d7d5d8ff823067f1bec3fdddafaa9028a5c086 | [
"Markdown",
"Python",
"JavaScript"
] | 11 | Python | psylo8/dj_server | b5d1d1b17dcfa35f34f5aed414d77220567d46a9 | 8df8a573d74e4efd2737749136c71e09ca4b450e | |
refs/heads/master | <repo_name>devalticode/FlashAlert<file_sep>/app/src/main/java/com/alticode/flashalert/utils/FlashPreferenceCategory.java
package com.alticode.flashalert.utils;
import android.content.Context;
import android.graphics.Color;
import android.preference.PreferenceCategory;
import android.util.AttributeSet;
import android.view.View;
import android.widget.TextView;
/**
* Created by TienDzung on 9/6/2015.
*/
public class FlashPreferenceCategory extends PreferenceCategory {
public FlashPreferenceCategory(Context context) {
super(context);
}
public FlashPreferenceCategory(Context context, AttributeSet attrs) {
super(context, attrs);
}
public FlashPreferenceCategory(Context context, AttributeSet attrs,
int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void onBindView(View view) {
super.onBindView(view);
TextView titleView = (TextView) view.findViewById(android.R.id.title);
titleView.setTextColor(Color.parseColor("#30a399"));
}
}<file_sep>/app/src/main/java/com/alticode/flashalert/flashalert/FlashApplication.java
package com.alticode.flashalert.flashalert;
import android.app.Application;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceManager;
import android.util.Log;
import com.alticode.flashalert.R;
import com.alticode.flashalert.utils.FlashHelper;
import java.util.Timer;
import java.util.TimerTask;
/**
* Created by Dzung on 9/18/2015.
*/
public class FlashApplication extends Application {
public static SharedPreferences sharePref;
public static Context mContext;
public static FlashHelper mFlashHelper;
public BatteryReceiver batteryReceiver;
private static final int NOTIFY_TIMER = 60000 * 60;
@Override
public void onCreate() {
super.onCreate();
mContext = this;
sharePref = PreferenceManager.getDefaultSharedPreferences(this);
mFlashHelper = FlashHelper.getInstance(this);
/* Register battery receiver */
batteryReceiver = new BatteryReceiver();
IntentFilter iFilter = new IntentFilter();
iFilter.addAction(Intent.ACTION_BATTERY_CHANGED);
registerReceiver(batteryReceiver, iFilter);
/* Create notify */
Timer notifyTimer = new Timer();
TimerTask notifyTask = new TimerTask() {
@Override
public void run() {
createNotify();
}
};
notifyTimer.scheduleAtFixedRate(notifyTask, 0, NOTIFY_TIMER);
}
public static boolean getPrefIncomingCall() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.status_call_pref), false);
}
public static boolean getPrefIncomingSms() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.status_sms_pref), false);
}
public static boolean getPrefModeSilent() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.mode_silent_pref), false);
}
public static boolean getPrefModeVibrate() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.mode_vibrate_pref), false);
}
public static boolean getPrefModeNormal() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.mode_normal_pref), false);
}
public static int getPrefSpeed() {
return sharePref.getInt(mContext.getResources().getString(R.string.speed_pref), 100);
}
public static int getPrefBlinkTimes() {
return sharePref.getInt(mContext.getResources().getString(R.string.blink_time_pref), 3);
}
public static int getPrefBattery() {
return sharePref.getInt(mContext.getResources().getString(R.string.battery_pref), 20);
}
public static boolean getPrefNotify() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.notify_pref), false);
}
public static boolean getPrefAlert() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.alert_key_pref), true);
}
public static void savePrefAlert(boolean value) {
sharePref.edit().putBoolean(mContext.getResources().getString(R.string.alert_key_pref), value).apply();
}
private void createNotify() {
Intent i = new Intent(this, SettingsActivity.class);
PendingIntent pIntent = PendingIntent.getActivity(this.getApplicationContext(), (int) System.currentTimeMillis(), i, 0);
Notification notify;
Notification.Builder notifyBuilder = new Notification.Builder(this)
.setContentTitle(getString(R.string.app_name))
.setContentText("Touch to more setting")
.setSmallIcon(R.drawable.ic_flash_on_white_36dp)
.setContentIntent(pIntent);
// Build notification
if (Build.VERSION.SDK_INT < 16) {
notify = notifyBuilder.getNotification();
} else {
notify = notifyBuilder.build();
}
NotificationManager notifyManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
notify.flags |= Notification.FLAG_AUTO_CANCEL;
notifyManager.notify(0, notify);
}
}
<file_sep>/app/src/main/java/com/alticode/flashalert/flashalert/SMSreceiver.java
package com.alticode.flashalert.flashalert;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import com.alticode.flashalert.utils.FlashHelper;
public class SMSreceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
FlashHelper mFlashHelper = FlashApplication.mFlashHelper;
Bundle extras = intent.getExtras();
if (extras != null) {
if ((mFlashHelper.batteryLevel >= FlashApplication.getPrefBattery()) && mFlashHelper.isAlertOn() == true && FlashApplication.getPrefIncomingSms())
mFlashHelper.alert(FlashHelper.INCOMING_SMS);
}
}
}<file_sep>/app/src/main/java/com/alticode/flashalert/utils/PrefHelper.java
package com.alticode.flashalert.utils;
import android.content.Context;
import android.content.SharedPreferences;
import android.media.AudioManager;
import android.preference.PreferenceManager;
import com.alticode.flashalert.R;
/**
* Created by Dzung on 9/14/2015.
*/
public class PrefHelper {
private Context mContext;
private SharedPreferences sharePref;
private static PrefHelper mInstance = null;
private PrefHelper(Context context) {
mContext = context;
sharePref = PreferenceManager.getDefaultSharedPreferences(mContext);
}
public static PrefHelper getInstance(Context context) {
if (mInstance == null) {
mInstance = new PrefHelper(context);
}
return mInstance;
}
public boolean getPrefIncomingCall() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.status_call_pref), false);
}
public boolean getPrefIncomingSms() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.status_sms_pref), false);
}
public boolean getPrefModeSilent() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.mode_silent_pref), false);
}
public boolean getPrefModeVibrate() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.mode_vibrate_pref), false);
}
public boolean getPrefModeNormal() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.mode_normal_pref), false);
}
public int getPrefSpeed() {
return sharePref.getInt(mContext.getResources().getString(R.string.speed_pref), 100);
}
public int getPrefBlinkTimes() {
return sharePref.getInt(mContext.getResources().getString(R.string.blink_time_pref), 3);
}
public int getPrefBattery() {
return sharePref.getInt(mContext.getResources().getString(R.string.battery_pref), 20);
}
public boolean getPrefNotify() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.notify_pref), false);
}
public boolean getPrefAlert() {
return sharePref.getBoolean(mContext.getResources().getString(R.string.alert_key_pref), true);
}
public void savePrefAlert(boolean value) {
sharePref.edit().putBoolean(mContext.getResources().getString(R.string.alert_key_pref), value).apply();
}
}
<file_sep>/app/src/main/java/com/alticode/flashalert/utils/Config.java
package com.alticode.flashalert.utils;
import android.util.Log;
import java.util.Random;
/**
* Created by TienDzung on 9/14/2015.
*/
public class Config {
public static final int AD_FREQUENCY = 3;
public static final boolean SHOW_FULL_AD_AT_CREATE = true;
public static final boolean SHOW_FULL_AD_AT_CALL = true;
public static final boolean SHOW_FULL_AD_AT_SMS = true;
public static final boolean SHOW_FULL_AD_AT_SILENT = true;
public static final boolean SHOW_FULL_AD_AT_VIBRATE = true;
public static final boolean SHOW_FULL_AD_AT_NORMAL = true;
public static final boolean SHOW_BANNER_AD = true;
public static final String DEVICE_TEST_SONY = "YT91008UDL";
public static final String DEVICE_TEST_LG = "LGE975d3097707";
public static final String APP_URL = "https://play.google.com/store/apps/details?id=alert.flash.com.flashalert \n\n";
public static final String RATE_APP_URL = "https://play.google.com/store/apps/details?id=alert.flash.com.flashalert";
public static final String MORE_APP_URL = "https://play.google.com/store/apps/details?id=alert.flash.com.flashalert";
public static int randomInt() {
Random rand = new Random();
return rand.nextInt();
}
public static boolean allowAd() {
boolean show = randomInt() % AD_FREQUENCY == 0 ? true : false;
Log.d("DungNT", "Show: " + show);
return show;
}
}
<file_sep>/app/src/main/java/com/alticode/flashalert/flashalert/CallReceiver.java
package com.alticode.flashalert.flashalert;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.telephony.TelephonyManager;
import android.util.Log;
import com.alticode.flashalert.utils.FlashHelper;
/**
* Created by TienDzung on 9/7/2015.
*/
public class CallReceiver extends BroadcastReceiver {
FlashApplication app;
@Override
public void onReceive(Context context, Intent intent) {
app = (FlashApplication)context.getApplicationContext();
FlashHelper mFlashHelper = FlashApplication.mFlashHelper;
if (intent.getStringExtra(TelephonyManager.EXTRA_STATE).equals(TelephonyManager.EXTRA_STATE_RINGING)) {
if ((mFlashHelper.batteryLevel >= FlashApplication.getPrefBattery()) && mFlashHelper.isAlertOn() == true && FlashApplication.getPrefIncomingCall() == true) {
mFlashHelper.stop = false;
mFlashHelper.alert(FlashHelper.INCOMING_CALL);
}
} else if (intent.getStringExtra(TelephonyManager.EXTRA_STATE).equals(
TelephonyManager.EXTRA_STATE_IDLE)
|| intent.getStringExtra(TelephonyManager.EXTRA_STATE).equals(
TelephonyManager.EXTRA_STATE_OFFHOOK)) {
mFlashHelper.stop = true;
}
}
}
| 4e59b2936b6db84dc4cae0504e69c9fd6bd7fdf9 | [
"Java"
] | 6 | Java | devalticode/FlashAlert | 12b32e86c1ec8df5a04ec52a12d365e3863b346b | a907f5363391e8228ac6938fea087597a9153544 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.