file_name
stringlengths 6
86
| file_path
stringlengths 45
249
| content
stringlengths 47
6.26M
| file_size
int64 47
6.26M
| language
stringclasses 1
value | extension
stringclasses 1
value | repo_name
stringclasses 767
values | repo_stars
int64 8
14.4k
| repo_forks
int64 0
1.17k
| repo_open_issues
int64 0
788
| repo_created_at
stringclasses 767
values | repo_pushed_at
stringclasses 767
values |
---|---|---|---|---|---|---|---|---|---|---|---|
MybatisHelper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/MybatisHelper.java | package com.chengjs.cjsssmsweb.mybatis;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import tk.mybatis.mapper.common.IdsMapper;
import tk.mybatis.mapper.common.Mapper;
import tk.mybatis.mapper.common.MySqlMapper;
import tk.mybatis.mapper.common.SqlServerMapper;
import tk.mybatis.mapper.entity.Config;
import tk.mybatis.mapper.mapperhelper.MapperHelper;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.sql.Connection;
/**
* MybatisHelper:
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/9
*/
public class MybatisHelper {
private static SqlSessionFactory sqlSessionFactory;
static {
try {
/*SqlSessionFactory*/
Reader reader = Resources.getResourceAsReader("database/mybatis-config-javaapi.xml");
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
reader.close();
/*创建数据库*/
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
/*创建一个MapperHelper*/
MapperHelper mapperHelper = new MapperHelper();
/*=================特殊配置=================*/
Config config = new Config();
// 设置UUID生成策略
// 配置UUID生成策略需要使用OGNL表达式
// 默认值32位长度:@java.util.UUID@randomUUID().toString().replace("-", "")
config.setUUID("");
// 主键自增回写方法,默认值MYSQL,详细说明请看文档
// config.setIDENTITY("MYSQL");
config.setIDENTITY("select uuid()");
// 支持方法上的注解
// 3.3.1版本增加
config.setEnableMethodAnnotation(true);
config.setNotEmpty(false);
//校验Example中的类型是否一致
config.setCheckExampleEntityClass(true);
//启用简单类型
config.setUseSimpleType(true);
// 序列的获取规则,使用{num}格式化参数,默认值为{0}.nextval,针对Oracle
// 可选参数一共3个,对应0,1,2,分别为SequenceName,ColumnName, PropertyName
//config.setSeqFormat("NEXT VALUE FOR {0}");
// 设置全局的catalog,默认为空,如果设置了值,操作表时的sql会是catalog.tablename
//config.setCatalog("");
// 设置全局的schema,默认为空,如果设置了值,操作表时的sql会是schema.tablename
// 如果同时设置了catalog,优先使用catalog.tablename
//config.setSchema("");
// 主键自增回写方法执行顺序,默认AFTER,可选值为(BEFORE|AFTER)
config.setOrder("BEFORE");
//设置配置
mapperHelper.setConfig(config);
/*=================特殊配置=================*/
/*注册通用Mapper接口 - 可以自动注册继承的接口*/
mapperHelper.registerMapper(Mapper.class);
mapperHelper.registerMapper(MySqlMapper.class);
/*配置完成后,执行下面的操作*/
mapperHelper.processConfiguration(session.getConfiguration());
} catch (Exception e) {
throw e;
} finally {
if (session != null) {
session.close();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static SqlSession getSqlSession() {
return sqlSessionFactory.openSession();
}
/**
* 根据mybatis配置获取SqlSession,这个方法在测试Mapper时候不是很靠谱,需检测
*
* 此方法未设定mapper一些必要设定的属性,执行会报错 不要使用
*
* @return
* @throws IOException
*/
@Deprecated
public static SqlSession session() throws IOException {
String resource = "database/mybatis-config-javaapi.xml";
InputStream inputStream = Resources.getResourceAsStream(resource);
SqlSessionFactory sqlSessionFactory = (new SqlSessionFactoryBuilder()).build(inputStream);
SqlSession sqlSession = sqlSessionFactory.openSession();
return sqlSession;
}
}
| 4,210 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
package-info.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/package-info.java | /**
* package-info: dao数据库包
* Sys--管理系统
* *Mappper--mybatis自动映射产生接口
* *Dao--手动创建接口
*
*
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/23
*/
package com.chengjs.cjsssmsweb.mybatis.mapper; | 280 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
package-info.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/common/package-info.java | /**
* package-info: 公用mapper/dao
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/13
*/
package com.chengjs.cjsssmsweb.mybatis.mapper.common; | 199 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
ISelectDao.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/common/ISelectDao.java | package com.chengjs.cjsssmsweb.mybatis.mapper.common;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import tk.mybatis.mapper.common.Mapper;
import java.util.List;
import java.util.Map;
/**
* ISelectDao: 通用select查询方法
*
* 此处借助了通用Mapper,这里的Mapper<UUser>可以是任意合适的的对象.
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/12
*/
public interface ISelectDao extends Mapper<UUser>{
/**
* mybatis返回单一对象或对象列表的问题:
*
* 1.返回数据类型由dao中的接口和*map.xml文件共同决定。
* 另外,不论是返回单一对象还是对象列表,*map.xml中的配置都是一样的,都是resultMap="*Map"*或resultType=“* .* .*”类型.
* 2.每一次mybatis从数据库中select数据之后,都会检查数据条数和dao中定义的返回值是否匹配。
* 3.若返回一条数据,dao中定义的返回值是一个对象或对象的List列表,则可以正常匹配,将查询的数据按照dao中定义的返回值存放。
* 4.若返回多条数据,dao中定义的返回值是一个对象,则无法将多条数据映射为一个对象,此时mybatis报错。
* */
List<Map<String,String>> users(Map<String,String> params);
List<Map<String,String>> roles(Map<String,String> params);
}
| 1,399 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
IGridDao.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/common/IGridDao.java | package com.chengjs.cjsssmsweb.mybatis.mapper.common;
/**
* IGridDao: 通用表查询接口
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/24
*/
public interface IGridDao {
}
| 233 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UserRolePermissionDao.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/dao/UserRolePermissionDao.java | package com.chengjs.cjsssmsweb.mybatis.mapper.dao;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.Country;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.session.RowBounds;
import tk.mybatis.mapper.common.Mapper;
import java.util.List;
import java.util.Map;
import java.util.Set;
public interface UserRolePermissionDao extends Mapper<Country> {
UUser findByLogin(UUser user);
int findAllCount(UUser user);
List<UUser> findHotUser();
List<UUser> findByParams(UUser user, RowBounds rowBound);
List<UUser> findAllByQuery(UUser user);
List<UUser> list(Map<String, Object> map);
Long getTotal(Map<String, Object> map);
UUser findUserByUsername(String username);
Set<String> findRoleNames(String username);
Set<String> findPermissionNames(Set<String> roleNames);
Set<String> findRoleNamesByUserName(@Param("userName")String userName);
Set<String> findPermissionNamesByRoleNames(@Param("set")Set<String> roleNames);
UUser findUserByUserName(@Param("userName")String userName);
} | 1,096 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
SocketContentDao.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/dao/SocketContentDao.java | package com.chengjs.cjsssmsweb.mybatis.mapper.dao;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.Country;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.SocketContent;
import tk.mybatis.mapper.common.Mapper;
import java.util.List;
public interface SocketContentDao extends Mapper<Country> {
List<SocketContent> findSocketContentList();
} | 353 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
RedisContentMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/RedisContentMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.RedisContent;
import tk.mybatis.mapper.common.Mapper;
public interface RedisContentMapper extends Mapper<RedisContent> {
} | 228 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
URoleMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/URoleMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.URole;
import tk.mybatis.mapper.common.Mapper;
public interface URoleMapper extends Mapper<URole> {
} | 207 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
SocketContentMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/SocketContentMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.SocketContent;
import tk.mybatis.mapper.common.Mapper;
public interface SocketContentMapper extends Mapper<SocketContent> {
} | 231 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UConUserRoleMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/UConUserRoleMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UConUserRole;
import tk.mybatis.mapper.common.Mapper;
public interface UConUserRoleMapper extends Mapper<UConUserRole> {
} | 228 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
CountryMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/CountryMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.Country;
import tk.mybatis.mapper.common.Mapper;
public interface CountryMapper extends Mapper<Country> {
} | 214 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UPermissionMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/UPermissionMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UPermission;
import tk.mybatis.mapper.common.Mapper;
public interface UPermissionMapper extends Mapper<UPermission> {
} | 225 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UUserMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/UUserMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import tk.mybatis.mapper.common.Mapper;
import java.util.List;
import java.util.Map;
public interface UUserMapper extends Mapper<UUser> {
/**
* 测试通过POJO参数方式获取page
* @param uuser
* @return
*/
List<UUser> gridUsers(UUser uuser);
/**
* 测试通过Map参数方式获取page
* @param params
* @return
*/
List<Map<String,String>> users(Map<String,String> params);
} | 529 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UConRolePermissionMapper.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/mapper/master/UConRolePermissionMapper.java | package com.chengjs.cjsssmsweb.mybatis.mapper.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UConRolePermission;
import tk.mybatis.mapper.common.Mapper;
public interface UConRolePermissionMapper extends Mapper<UConRolePermission> {
} | 246 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
package-info.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/package-info.java | /**
* package-info: 数据库实体类包pojo
* author: <a href="mailto:[email protected]">chengjs</a> , version:1.0.0, 2017/8/23
*/
package com.chengjs.cjsssmsweb.mybatis.pojo;
| 190 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
Select.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/common/Select.java | package com.chengjs.cjsssmsweb.mybatis.pojo.common;
import javax.persistence.Column;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import java.util.Date;
@Table(name = "u_user")
public class Select {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
@Column(name = "username")
private String username;
@Column(name = "password")
private String password;
@Column(name = "description")
private String description;
@Column(name = "discard")
private String discard;
@Column(name = "createtime")
private Date createtime;
@Column(name = "modifytime")
private Date modifytime;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* @return username
*/
public String getUsername() {
return username;
}
/**
* @param username
*/
public void setUsername(String username) {
this.username = username == null ? null : username.trim();
}
/**
* @return password
*/
public String getPassword() {
return password;
}
/**
* @param password
*/
public void setPassword(String password) {
this.password = password == null ? null : password.trim();
}
/**
* @return description
*/
public String getDescription() {
return description;
}
/**
* @param description
*/
public void setDescription(String description) {
this.description = description == null ? null : description.trim();
}
/**
* @return discard
*/
public String getDiscard() {
return discard;
}
/**
* @param discard
*/
public void setDiscard(String discard) {
this.discard = discard == null ? null : discard.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
/**
* @return modifytime
*/
public Date getModifytime() {
return modifytime;
}
/**
* @param modifytime
*/
public void setModifytime(Date modifytime) {
this.modifytime = modifytime;
}
} | 2,657 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
RedisContent.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/RedisContent.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import java.util.Date;
import javax.persistence.*;
@Table(name = "redis_content")
public class RedisContent {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
@Column(name = "content")
private String content;
@Column(name = "createtime")
private Date createtime;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* @return content
*/
public String getContent() {
return content;
}
/**
* @param content
*/
public void setContent(String content) {
this.content = content == null ? null : content.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
} | 1,200 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UConUserRole.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/UConUserRole.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import java.util.Date;
import javax.persistence.*;
@Table(name = "u_con_user_role")
public class UConUserRole {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
@Column(name = "userid")
private String userid;
@Column(name = "roleid")
private String roleid;
@Column(name = "createtime")
private Date createtime;
@Column(name = "modifytime")
private Date modifytime;
@Column(name = "discard")
private String discard;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* @return userid
*/
public String getUserid() {
return userid;
}
/**
* @param userid
*/
public void setUserid(String userid) {
this.userid = userid == null ? null : userid.trim();
}
/**
* @return roleid
*/
public String getRoleid() {
return roleid;
}
/**
* @param roleid
*/
public void setRoleid(String roleid) {
this.roleid = roleid == null ? null : roleid.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
/**
* @return modifytime
*/
public Date getModifytime() {
return modifytime;
}
/**
* @param modifytime
*/
public void setModifytime(Date modifytime) {
this.modifytime = modifytime;
}
/**
* @return discard
*/
public String getDiscard() {
return discard;
}
/**
* @param discard
*/
public void setDiscard(String discard) {
this.discard = discard == null ? null : discard.trim();
}
} | 2,120 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
SocketContent.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/SocketContent.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import java.util.Date;
import javax.persistence.*;
@Table(name = "socket_content")
public class SocketContent {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
/**
* 发送者
*/
@Column(name = "contentsender")
private String contentsender;
/**
* 聊天内容
*/
@Column(name = "content")
private String content;
@Column(name = "createtime")
private Date createtime;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* 获取发送者
*
* @return contentsender - 发送者
*/
public String getContentsender() {
return contentsender;
}
/**
* 设置发送者
*
* @param contentsender 发送者
*/
public void setContentsender(String contentsender) {
this.contentsender = contentsender == null ? null : contentsender.trim();
}
/**
* 获取聊天内容
*
* @return content - 聊天内容
*/
public String getContent() {
return content;
}
/**
* 设置聊天内容
*
* @param content 聊天内容
*/
public void setContent(String content) {
this.content = content == null ? null : content.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
} | 1,829 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
Country.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/Country.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import javax.persistence.*;
@Table(name = "country")
public class Country {
/**
* 主键
*/
@Id
@Column(name = "Id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private Integer id;
/**
* 名称
*/
@Column(name = "countryname")
private String countryname;
/**
* 代码
*/
@Column(name = "countrycode")
private String countrycode;
/**
* 获取主键
*
* @return Id - 主键
*/
public Integer getId() {
return id;
}
/**
* 设置主键
*
* @param id 主键
*/
public void setId(Integer id) {
this.id = id;
}
/**
* 获取名称
*
* @return countryname - 名称
*/
public String getCountryname() {
return countryname;
}
/**
* 设置名称
*
* @param countryname 名称
*/
public void setCountryname(String countryname) {
this.countryname = countryname == null ? null : countryname.trim();
}
/**
* 获取代码
*
* @return countrycode - 代码
*/
public String getCountrycode() {
return countrycode;
}
/**
* 设置代码
*
* @param countrycode 代码
*/
public void setCountrycode(String countrycode) {
this.countrycode = countrycode == null ? null : countrycode.trim();
}
} | 1,538 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
URole.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/URole.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import java.util.Date;
import javax.persistence.*;
@Table(name = "u_role")
public class URole {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
@Column(name = "rolename")
private String rolename;
@Column(name = "description")
private String description;
@Column(name = "createtime")
private Date createtime;
@Column(name = "modifytime")
private Date modifytime;
@Column(name = "discard")
private String discard;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* @return rolename
*/
public String getRolename() {
return rolename;
}
/**
* @param rolename
*/
public void setRolename(String rolename) {
this.rolename = rolename == null ? null : rolename.trim();
}
/**
* @return description
*/
public String getDescription() {
return description;
}
/**
* @param description
*/
public void setDescription(String description) {
this.description = description == null ? null : description.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
/**
* @return modifytime
*/
public Date getModifytime() {
return modifytime;
}
/**
* @param modifytime
*/
public void setModifytime(Date modifytime) {
this.modifytime = modifytime;
}
/**
* @return discard
*/
public String getDiscard() {
return discard;
}
/**
* @param discard
*/
public void setDiscard(String discard) {
this.discard = discard == null ? null : discard.trim();
}
} | 2,181 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UPermission.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/UPermission.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import java.util.Date;
import javax.persistence.*;
@Table(name = "u_permission")
public class UPermission {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
@Column(name = "pername")
private String pername;
@Column(name = "description")
private String description;
@Column(name = "createtime")
private Date createtime;
@Column(name = "modifytime")
private Date modifytime;
@Column(name = "discard")
private String discard;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* @return pername
*/
public String getPername() {
return pername;
}
/**
* @param pername
*/
public void setPername(String pername) {
this.pername = pername == null ? null : pername.trim();
}
/**
* @return description
*/
public String getDescription() {
return description;
}
/**
* @param description
*/
public void setDescription(String description) {
this.description = description == null ? null : description.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
/**
* @return modifytime
*/
public Date getModifytime() {
return modifytime;
}
/**
* @param modifytime
*/
public void setModifytime(Date modifytime) {
this.modifytime = modifytime;
}
/**
* @return discard
*/
public String getDiscard() {
return discard;
}
/**
* @param discard
*/
public void setDiscard(String discard) {
this.discard = discard == null ? null : discard.trim();
}
} | 2,182 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UConRolePermission.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/UConRolePermission.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import java.util.Date;
import javax.persistence.*;
@Table(name = "u_con_role_permission")
public class UConRolePermission {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
@Column(name = "roleid")
private String roleid;
@Column(name = "perid")
private String perid;
@Column(name = "createtime")
private Date createtime;
@Column(name = "modifytime")
private Date modifytime;
@Column(name = "discard")
private String discard;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* @return roleid
*/
public String getRoleid() {
return roleid;
}
/**
* @param roleid
*/
public void setRoleid(String roleid) {
this.roleid = roleid == null ? null : roleid.trim();
}
/**
* @return perid
*/
public String getPerid() {
return perid;
}
/**
* @param perid
*/
public void setPerid(String perid) {
this.perid = perid == null ? null : perid.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
/**
* @return modifytime
*/
public Date getModifytime() {
return modifytime;
}
/**
* @param modifytime
*/
public void setModifytime(Date modifytime) {
this.modifytime = modifytime;
}
/**
* @return discard
*/
public String getDiscard() {
return discard;
}
/**
* @param discard
*/
public void setDiscard(String discard) {
this.discard = discard == null ? null : discard.trim();
}
} | 2,121 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UUser.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/mybatis/pojo/master/UUser.java | package com.chengjs.cjsssmsweb.mybatis.pojo.master;
import java.util.Date;
import javax.persistence.*;
@Table(name = "u_user")
public class UUser {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY, generator = "SELECT replace(t.uuid,\"-\",\"\") FROM (SELECT uuid() uuid FROM dual) t")
private String id;
@Column(name = "username")
private String username;
@Column(name = "password")
private String password;
@Column(name = "description")
private String description;
@Column(name = "discard")
private String discard;
@Column(name = "createtime")
private Date createtime;
@Column(name = "modifytime")
private Date modifytime;
/**
* @return id
*/
public String getId() {
return id;
}
/**
* @param id
*/
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
/**
* @return username
*/
public String getUsername() {
return username;
}
/**
* @param username
*/
public void setUsername(String username) {
this.username = username == null ? null : username.trim();
}
/**
* @return password
*/
public String getPassword() {
return password;
}
/**
* @param password
*/
public void setPassword(String password) {
this.password = password == null ? null : password.trim();
}
/**
* @return description
*/
public String getDescription() {
return description;
}
/**
* @param description
*/
public void setDescription(String description) {
this.description = description == null ? null : description.trim();
}
/**
* @return discard
*/
public String getDiscard() {
return discard;
}
/**
* @param discard
*/
public void setDiscard(String discard) {
this.discard = discard == null ? null : discard.trim();
}
/**
* @return createtime
*/
public Date getCreatetime() {
return createtime;
}
/**
* @param createtime
*/
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
/**
* @return modifytime
*/
public Date getModifytime() {
return modifytime;
}
/**
* @param modifytime
*/
public void setModifytime(Date modifytime) {
this.modifytime = modifytime;
}
} | 2,508 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
TestEnv.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/enums/TestEnv.java | package com.chengjs.cjsssmsweb.enums;
import com.chengjs.cjsssmsweb.common.util.resources.PropertiesUtil;
/**
* TestEnv: 测试环境各种开关类
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/8/31
*/
public class TestEnv {
/**
* 当测试开启时,onTTrue为 true
*/
public static boolean onTTrue = true;
/**
* 当测试开启时, onTFalse为 false
*/
public static boolean onTFalse = false;
public TestEnv() {
String testEnv = PropertiesUtil.getValue("test");
onTTrue = "true".equals(testEnv) ? true : false;
onTFalse = "true".equals(testEnv) ? false : true;
}
}
| 663 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
SessionEnum.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/enums/SessionEnum.java | package com.chengjs.cjsssmsweb.enums;
/**
* SessionEnum:
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/2
*/
public enum SessionEnum {
UserName
}
| 205 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
EnvEnum.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/enums/EnvEnum.java | package com.chengjs.cjsssmsweb.enums;
import java.util.ArrayList;
import java.util.List;
/**
* EnvEnum: 系统环境Enum类
* author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/8/29
*/
public enum EnvEnum {
/** 系统环境默认配置properties文件位置 */
CJS_SSMS_PROP_PATH("/env-config.properties", "系统环境默认配置properties文件位置"),
/** lucene索引位置--env-config.properties 里配置此路径实际位置 */
LUCENE_INDEX_PATH("lucene.store", "lucene索引文件位置"),
/** 系统请求路径配置 8080*/
IP8080("http://localhost:8080/","系统请求路径配置 8080"),
/** 系统请求路径配置 9000*/
IP9000("http://localhost:9000/","系统请求路径配置 9000")
;
/*===================================== string Enum General Method =====================================*/
/** 枚举值 */
private final String val;
/** 枚举描述 */
private final String message;
/**
* 构建一个 EvnEnum 。
* @param val 枚举值。
* @param message 枚举描述。
*/
private EnvEnum(String val, String message) {
this.val = val;
this.message = message;
}
/**
* 得到枚举值。
* @return 枚举值。
*/
public String getVal() {
return val;
}
/**
* 得到枚举描述。
* @return 枚举描述。
*/
public String getMessage() {
return message;
}
/**
* 得到枚举值。
* @return 枚举值。
*/
public String val() {
return val;
}
/**
* 得到枚举描述。
* @return 枚举描述。
*/
public String message() {
return message;
}
/**
* 通过枚举值查找枚举值。
* @param val 查找枚举值的枚举值。
* @return 枚举值对应的枚举值。
* @throws IllegalArgumentException 如果 val 没有对应的 EvnEnum 。
*/
public static EnvEnum findStatus(String val) {
for (EnvEnum status : values()) {
if (status.getVal().equals(val)) {
return status;
}
}
throw new IllegalArgumentException("ResultInfo EvnEnum not legal:" + val);
}
/**
* 获取全部枚举值。
*
* @return 全部枚举值。
*/
public static List<EnvEnum> getAllStatus() {
List<EnvEnum> list = new ArrayList<EnvEnum>();
for (EnvEnum status : values()) {
list.add(status);
}
return list;
}
/**
* 获取全部枚举值。
*
* @return 全部枚举值。
*/
public static List<String> getAllStatusVal() {
List<String> list = new ArrayList<String>();
for (EnvEnum status : values()) {
list.add(status.val());
}
return list;
}
}
| 2,676 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
BaseResponse.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/req/BaseResponse.java | package com.chengjs.cjsssmsweb.req;
import com.chengjs.cjsssmsweb.common.enums.StatusEnum;
import com.chengjs.cjsssmsweb.common.util.StringUtil;
import java.io.Serializable;
/**
* http请求响应对象
*
* @param <T>
* @author minipa_chengjs
*/
public class BaseResponse<T> implements Serializable {
/**
* 状态码
*/
private String code;
/**
* 响应消息
*/
private String message;
/**
* 请求号
*/
private String reqNo;
/**
* 响应体
*/
private T messageBody;
public BaseResponse() {
}
public BaseResponse(T messageBody) {
this.messageBody = messageBody;
}
public BaseResponse(String code, String message) {
this.code = code;
this.message = message;
}
public BaseResponse(String code, String message, T messageBody) {
this.code = code;
this.message = message;
this.messageBody = messageBody;
}
public BaseResponse(String code, String message, String reqNo, T messageBody) {
this.code = code;
this.message = message;
this.reqNo = reqNo;
this.messageBody = messageBody;
}
public static <T> BaseResponse<T> create(T t) {
return new BaseResponse<T>(t);
}
public static <T> BaseResponse<T> create(T t, StatusEnum statusEnum) {
return new BaseResponse<T>(statusEnum.getCode(), statusEnum.getMessage(), t);
}
public static <T> BaseResponse<T> createSuccess(T t, String message) {
return new BaseResponse<T>(StatusEnum.SUCCESS.getCode(),
StringUtil.isNullOrEmpty(message) ? StatusEnum.SUCCESS.getMessage() : message, t);
}
public static <T> BaseResponse<T> createFail(T t, String message) {
return new BaseResponse<T>(StatusEnum.FAIL.getCode(),
StringUtil.isNullOrEmpty(message) ? StatusEnum.FAIL.getMessage() : message, t);
}
public static <T> BaseResponse<T> create(T t, StatusEnum statusEnum, String message) {
return new BaseResponse<T>(statusEnum.getCode(), message, t);
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public T getMessageBody() {
return messageBody;
}
public void setMessageBody(T messageBody) {
this.messageBody = messageBody;
}
public String getReqNo() {
return reqNo;
}
public void setReqNo(String reqNo) {
this.reqNo = reqNo;
}
@Override
public String toString() {
return "BaseResponse{" +
"code=" + code +
", message='" + message + '\'' +
", reqNo='" + reqNo + '\'' +
", messageBody=" + messageBody +
'}';
}
}
| 2,706 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
package-info.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/common/package-info.java | /**
* package-info: 公用Service
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/13
*/
package com.chengjs.cjsssmsweb.service.common; | 189 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
ISelectService.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/common/ISelectService.java | package com.chengjs.cjsssmsweb.service.common;
import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* ISelectService:
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/13
*/
public interface ISelectService {
/**
* 通用<selected/>查询
* @return
*/
List<Map<String, String>> commonSelect(String method, HashMap<String, String> params)
throws InvocationTargetException, IllegalAccessException, NoSuchMethodException;
/**
* 通用grid查询
* @param params
* @return
*/
Map<String, Object> queryGridKey(int pageNum, int pageSize, String field, String sort, HashMap<String, String> params)
throws ClassNotFoundException, IllegalAccessException,
InstantiationException, NoSuchMethodException, InvocationTargetException;
}
| 899 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
SelectServiceImpl.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/common/impl/SelectServiceImpl.java | package com.chengjs.cjsssmsweb.service.common.impl;
import com.chengjs.cjsssmsweb.common.util.StringUtil;
import com.chengjs.cjsssmsweb.mybatis.MybatisHelper;
import com.chengjs.cjsssmsweb.mybatis.mapper.common.ISelectDao;
import com.chengjs.cjsssmsweb.service.common.ISelectService;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.github.pagehelper.PageRowBounds;
import org.apache.commons.collections.map.HashedMap;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import tk.mybatis.mapper.common.Mapper;
import javax.annotation.Resource;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* SelectServiceImpl:
* 1.通用 select 结果构造 (text val)
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/13
*/
@Service
public class SelectServiceImpl implements ISelectService {
private static final Logger log = LoggerFactory.getLogger(SelectServiceImpl.class);
private static final String MAPPER = "com.chengjs.cjsssmsweb.mybatis.mapper.master.";
private static final String POJO = "com.chengjs.cjsssmsweb.mybatis.pojo.master.";
@Resource
private ISelectDao selectDao;
@Resource(name = "sqlSessionFactory")
private SqlSessionFactory factory;
/**
* 通用selected方法查询
* 反射方式执行mybatis Mapper接口的代理实现对象的方法,
*
* @param method ISelectDao中定义的方法名
* @param params ISelectDao方法中的参数
* @return
* @throws InvocationTargetException
* @throws IllegalAccessException
* @throws NoSuchMethodException
*/
@Override
public List<Map<String, String>> commonSelect(String method, HashMap<String, String> params)
throws InvocationTargetException, IllegalAccessException, NoSuchMethodException {
SqlSession sqlSession = factory.openSession();
ISelectDao mapper = sqlSession.getMapper(ISelectDao.class);
Method m = mapper.getClass().getMethod(method, new Class[]{Map.class});
Object result = m.invoke(mapper, params);
/*此处强转获取类型
mybatis的interface中的的select查询返回值都必须是List
mybatis的mapper.xml中的的select查询返回值都必须是Map
Mybatis会自动根据返回值是多条纪录还是单条纪录,来返回值*/
return (List<Map<String, String>>) result;
}
@Override
public Map<String, Object> queryGridKey(int pageNum, int pageSize,
String field, String sort,
HashMap<String, String> params)
throws ClassNotFoundException, IllegalAccessException,
InstantiationException, NoSuchMethodException, InvocationTargetException {
String[] strs = params.get("key").split("_"); /*1.strs[0]:接口, strs[1]:方法*/
SqlSession sqlSession = MybatisHelper.getSqlSession(); /*2.sqlSession*/
PageRowBounds rowBounds = new PageRowBounds(pageNum, pageSize); /*3.rowBounds*/
/*
0 = {HashMap$Node@7360} "rolename" -> "0001"
1 = {HashMap$Node@7361} "key" -> "UUserMapper_gridUsers"
2 = {HashMap$Node@7362} "username" -> "0001"
* */
/*1.Mapper*/
Class mapper_clz = Class.forName(MAPPER + strs[0]);
Mapper mapper = (Mapper) sqlSession.getMapper(mapper_clz);
/*2.Page分页操作*/
// List<Object> list = pagePojo(params, strs, mapper, pageNum, pageSize); // pojo
List<Object> list = pageMap(params, strs, mapper, pageNum, pageSize); // map
PageInfo page = new PageInfo(list);
/*4.构造适合miniui_grid展示的map*/
Map<String, Object> map_grid = new HashedMap();
map_grid.put("total", page.getTotal());
map_grid.put("data", list);
return map_grid;
}
/**
* 2. 2)params==>pojo 属性归档, Map参数更加通用
*
* @param params 分页查询参数
* @param strs 分页查询接口,方法
* @param mapper mybatis反射dao/mapper对象
* @param pageNum
* @param pageSize
* @return
*/
private List<Object> pageMap(HashMap<String, String> params, String[] strs,
Mapper mapper, int pageNum, int pageSize)
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method mapper_method = mapper.getClass().getMethod(strs[1], new Class[]{Map.class});
/*miniui page从0开始 pagehelper从1开始*/
// PageHelper.startPage(pageNum + 1, pageSize);
// mapper.selectByRowBounds()
return (List<Object>) mapper_method.invoke(mapper, params);
}
/**
* 2. 1)params==>pojo 属性归档, POJO不合适多表关联查询, 条件录入--pass
*
* @param params 分页查询参数
* @param strs 分页查询接口,方法
* @param mapper mybatis反射dao/mapper对象
* @param pageNum
* @param pageSize
* @return
*/
private List<Object> pagePojo(HashMap<String, String> params,
String[] strs, Mapper mapper, int pageNum, int pageSize)
throws ClassNotFoundException, InstantiationException, IllegalAccessException,
NoSuchMethodException, InvocationTargetException {
Class pojo_clz = Class.forName(POJO + strs[0].replace("Mapper", ""));
Object pojo = pojo_clz.newInstance();
for (String s : params.keySet()) {
Method pojo_method = null;
try {
pojo_method = pojo_clz.getMethod("set" + StringUtil.reSqlLikeStr(StringUtil.captureName(s)), String.class);
pojo_method.invoke(pojo, params.get(s));
} catch (NoSuchMethodException e) {
log.warn("POJO不存在属性" + s + ".跳过.");
} catch (SecurityException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
log.warn("POJO方法" + pojo_method + "不存在.");
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
/*3. 1) 反射调用mapper方法 POJO 参数*/
Method mapper_method = mapper.getClass().getMethod(strs[1], new Class[]{pojo_clz});
/*miniui page从0开始 pagehelper从1开始*/
/*PageHelper方式似乎对反射调用方法没有起到效果*/
PageHelper.startPage(pageNum + 1, pageSize);
return (List<Object>) mapper_method.invoke(mapper, pojo);
}
}
/* miniui_grid 查询出来的数据结构
{
"total": 55931,
"data": [{
"city": "",
"age": 25,
"gender": 1,
"dept_id": "js",
},
{
"city": "",
"age": 25,
"gender": 1,
"dept_id": "rs",
}]
}
*/
| 6,837 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
IUserManagerService.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/master/IUserManagerService.java | package com.chengjs.cjsssmsweb.service.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import java.util.Set;
/**
* ISelectService:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/24
*/
public interface IUserManagerService {
Set<String> findRoleNames(String UserName);
Set<String> findPermissionNames(Set<String> roleNames);
UUser findUserByUserName(String UserName);
void registerUser(UUser uUser);
}
| 479 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UserManagerServiceImpl.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/master/UserManagerServiceImpl.java | package com.chengjs.cjsssmsweb.service.master;
import com.chengjs.cjsssmsweb.common.util.UUIDUtil;
import com.chengjs.cjsssmsweb.mybatis.mapper.dao.UserRolePermissionDao;
import com.chengjs.cjsssmsweb.mybatis.mapper.master.UUserMapper;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Set;
/**
* SelectServiceImpl:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/24
*/
@Service("userService")
public class UserManagerServiceImpl implements IUserManagerService {
@Autowired
private UserRolePermissionDao uURPdao;
@Autowired
private UUserMapper userMapper;
@Override
public Set<String> findRoleNames(String UserName) {
Set<String> roleNames = uURPdao.findRoleNamesByUserName(UserName);
return roleNames;
}
@Override
public Set<String> findPermissionNames(Set<String> roleNames) {
Set<String> permissionNames = uURPdao.findPermissionNamesByRoleNames(roleNames);
return permissionNames;
}
@Override
public UUser findUserByUserName(String userName) {
UUser user = uURPdao.findUserByUserName(userName);
return user;
}
@Override
public void registerUser(UUser user) {
UUser re_User = userMapper.selectByPrimaryKey(user.getUsername());
if (null == re_User) {
user.setId(UUIDUtil.uuid());
userMapper.insertSelective(user);
}
}
}
| 1,522 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
SocketContentServiceImpl.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/master/SocketContentServiceImpl.java | package com.chengjs.cjsssmsweb.service.master;
import com.chengjs.cjsssmsweb.mybatis.mapper.dao.SocketContentDao;
import com.chengjs.cjsssmsweb.mybatis.mapper.master.SocketContentMapper;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.SocketContent;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.List;
/**
* SocketContentServiceImpl:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 22:14
*/
@Service
public class SocketContentServiceImpl implements ISocketContentService {
@Autowired
private SocketContentDao socketContentDao;
@Autowired
private SocketContentMapper socketContentMapper;
@Override
public List<SocketContent> findSocketContentList() {
return socketContentDao.findSocketContentList();
}
@Override
public int insertSelective(SocketContent socketContent) {
return socketContentMapper.insertSelective(socketContent);
}
}
| 1,022 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
IUserFrontService.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/master/IUserFrontService.java | package com.chengjs.cjsssmsweb.service.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import com.chengjs.cjsssmsweb.util.page.Page;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* IUserFrontService:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 21:18
*/
public interface IUserFrontService {
public UUser getUserById(String userid);
public UUser findByLogin(UUser user) ;
public int createUser(UUser user) ;
public Page<UUser> findByParams(UUser user, int pageNo, int limit) ;
int deleteByPrimaryKey(String userid);
int updateByPrimaryKeySelective(UUser user);
int findAllCount(UUser user) ;
List<UUser> findHotUser() ;
List<UUser> findAllByQuery(UUser user) ;
/**
* 分页查询
* @param map
* @return
*/
public List<UUser> list(Map<String, Object> map) ;
public Long getTotal(Map<String, Object> map);
/**
* Shiro的登录验证,通过用户名查询用户信息
* @param username
* @return
*/
public UUser findUserByUsername(String username) ;
Set<String> findRoleNames(String username);
Set<String> findPermissionNames(Set<String> roleNames);
boolean registerUser(UUser user);
}
| 1,282 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
ISocketContentService.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/master/ISocketContentService.java | package com.chengjs.cjsssmsweb.service.master;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.SocketContent;
import java.util.List;
/**
* ISocketContentService:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 21:08
*/
public interface ISocketContentService {
List<SocketContent> findSocketContentList();
int insertSelective(SocketContent socketContent);
}
| 416 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UserFrontServiceImpl.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/service/master/UserFrontServiceImpl.java | package com.chengjs.cjsssmsweb.service.master;
import com.chengjs.cjsssmsweb.common.util.UUIDUtil;
import com.chengjs.cjsssmsweb.common.util.codec.MD5Util;
import com.chengjs.cjsssmsweb.mybatis.mapper.dao.UserRolePermissionDao;
import com.chengjs.cjsssmsweb.mybatis.mapper.master.UUserMapper;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import com.chengjs.cjsssmsweb.util.Transactioner;
import com.chengjs.cjsssmsweb.util.page.Page;
import org.apache.ibatis.session.RowBounds;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* IUserServiceImpl:
* author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/8/29
*/
@Service
public class UserFrontServiceImpl implements IUserFrontService{
@Autowired
private DataSourceTransactionManager transactionManager;
/*========================== userMapper ==========================*/
@Autowired
private UUserMapper userMapper;
@Override
public UUser getUserById(String userid) { return userMapper.selectByPrimaryKey(userid); }
@Override
public int deleteByPrimaryKey(String userid) { return userMapper.deleteByPrimaryKey(userid); }
@Override
public int updateByPrimaryKeySelective(UUser user) { return userMapper.updateByPrimaryKeySelective(user); }
@Override
public int createUser(UUser user) {
return userMapper.insertSelective(user);
}
/*========================== userDao ==========================*/
@Autowired
private UserRolePermissionDao userRolePermissionDao;
@Override
public UUser findByLogin(UUser user) {
return userRolePermissionDao.findByLogin(user);
}
/**
* 分页查询
* @param user
* @param pageNo
* @param limit
* @return
*/
@Override
public Page<UUser> findByParams(UUser user, int pageNo, int limit) {
Page<UUser> page = new Page<UUser>();
page.setPageNo(pageNo);
page.setLimit(limit);
int offset = page.getOffsets();
RowBounds rowBound = new RowBounds(offset, limit);
List<UUser> users = userRolePermissionDao.findByParams(user,rowBound);
page.setRows(users);
int total = userRolePermissionDao.findAllCount(user) ;
page.setTotal(total) ;
if(offset >= page.getTotal()){
page.setPageNo(page.getTotalPages());
}
return page ;
}
@Override
public int findAllCount(UUser user) {
return userRolePermissionDao.findAllCount(user);
}
@Override
public List<UUser> findHotUser() {
return userRolePermissionDao.findHotUser();
}
@Override
public List<UUser> findAllByQuery(UUser user) {
return userRolePermissionDao.findAllByQuery(user);
}
@Override
public List<UUser> list(Map<String, Object> map) {
return userRolePermissionDao.list(map);
}
@Override
public Long getTotal(Map<String, Object> map) {
return userRolePermissionDao.getTotal(map);
}
/**
* Shiro的登录验证,通过用户名查询用户信息
*
* EAO issue: 同一个app只能用一个shiro密码校验么? 注入多个daoManager,Realm等可实现多重的登录校验么??
*
* @param username
* @return
*/
@Override
public UUser findUserByUsername(String username) {
return userRolePermissionDao.findUserByUsername(username);
}
@Override
public Set<String> findRoleNames(String username) {
return userRolePermissionDao.findRoleNames(username);
}
@Override
public Set<String> findPermissionNames(Set<String> roleNames) {
return userRolePermissionDao.findPermissionNames(roleNames);
}
/**
* 开启事务:代码形式开启
* 其实这里不是n个DDL 是1个DQL&1DDL可以不用transaction
*
* @param user
* @return
*/
@Override
public boolean registerUser(UUser user) {
/*开启事务 -- 提取了个新方法, TODO 此处transactionManager不能再Transactioner中注入么??*/
Transactioner tr = new Transactioner(transactionManager);
boolean commit = false;
boolean result = false;
try {
UUser re_user = userMapper.selectByPrimaryKey(user.getUsername());
if (null == re_user) {
user.setId(UUIDUtil.uuid());
user.setPassword(MD5Util.md5(user.getPassword()));
userMapper.insertSelective(user);
result = true;
} else {
result = false;
}
commit = true;
} catch (Exception e) {
e.printStackTrace();
} finally {
tr.end(commit);
}
return result;
}
}
| 4,621 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
NULLBody.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/vo/NULLBody.java | package com.chengjs.cjsssmsweb.vo;
/**
* 空对象,用在泛型中,表示没有额外的请求参数或者返回参数
*/
public class NULLBody {
public NULLBody() {}
public static NULLBody create(){
return new NULLBody();
}
}
| 252 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
package-info.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/package-info.java | /**
* package-info: 项目工具包,项目特有的工具
* author: Chengjs, version:1.0.0, 2017-09-08
*/
package com.chengjs.cjsssmsweb.util; | 145 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
ExceptionUtil.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/ExceptionUtil.java | package com.chengjs.cjsssmsweb.util;
import com.chengjs.cjsssmsweb.common.enums.StatusEnum;
import org.slf4j.Logger;
import org.springframework.ui.Model;
import java.util.Map;
/**
* ExceptionUtil: 异常输出,日志打印,其他异常出现后的处理操作
* author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/8/28
*/
public class ExceptionUtil {
/**
* Controller Exception Handle ==> return string
*
* @param model spring mvc view model
* @param msg exception message
* @param e exception
* @param re_status 操作状态
* @param log sl4j logger of Controller class
*/
public static void controllerEH(Model model, String msg, Exception e, Logger log, StatusEnum re_status) {
if (null != model) {
model.addAttribute("error", msg);
}
re_status = StatusEnum.FAIL;
log.debug(msg);
e.printStackTrace();
}
/**
* Controller Exception Handle ==> return Map
* @param msg exception message
* @param e exception
* @param log sl4j logger of Controller class
* @param map object need be a JSONObject, otherwise ajax will be error
* @param re_status
*/
public static void controllerEHJson(String msg, Exception e, Logger log, Map<String, Object> map, StatusEnum re_status) {
controllerEH(null, msg, e, log, re_status);
HttpRespUtil.buildRespStatus(msg, map, re_status);
}
}
| 1,437 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
HttpRespUtil.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/HttpRespUtil.java | package com.chengjs.cjsssmsweb.util;
import com.chengjs.cjsssmsweb.common.enums.StatusEnum;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import net.sf.json.JSONObject;
import net.sf.json.util.JSONUtils;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Map;
/**
* HttpRespUtil:
*
* @author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 11:41
*/
public class HttpRespUtil {
/**
* 将对象和返回状态以json形式返回给浏览器
*
* @param status
* @param response
*/
public static void respJson(StatusEnum status, HttpServletResponse response) {
responseBuildJson(response, parseJson(status, null));
}
/**
* 将对象和返回状态以json形式返回给浏览器
*
* @param status
* @param data
* @param response
*/
public static void respJson(StatusEnum status, Object data, HttpServletResponse response) {
if (null == data) {
respJson(status, response);
} else {
responseBuildJson(response, parseJson(status, data));
}
}
/**
* 构建Jsonobject并返回response, 配置response通用属性
*
* @param response
* @param jo
*/
public static void responseBuildJson(HttpServletResponse response, Object jo) {
String json = "";
if (jo instanceof JSONObject) {
json = JSONUtils.valueToString(jo);
} else {
try {
ObjectMapper objectMapper = new ObjectMapper();
json = objectMapper.writeValueAsString(jo);
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
//response.setContentType("text/plain");
response.setContentType("application/json");
response.setHeader("Pragma", "No-cache");
response.setHeader("Cache-Control", "no-cache");
response.setCharacterEncoding("UTF-8");
PrintWriter writer = null;
try {
writer = response.getWriter();
writer.print(json);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (null != writer) {
writer.flush();
writer.close();
}
}
}
/**
* 构建http响应消息
* @param msg 响应消息
* @param map
* @param re_status
*/
public static void buildRespStatus(String msg, Map<String, Object> map, StatusEnum re_status) {
map.put("msg", msg); // 请求自定义相应信息
map.put("code", re_status.getCode()); // http请求响应code
map.put("meg", re_status.getMessage()); // http请求响应消息message
}
/**
* 生成日志实体工具
*
* @param objectFrom
* @param objectTo
*/
public static void setLogValueModelToModel(Object objectFrom, Object objectTo) {
Class<? extends Object> clazzFrom = objectFrom.getClass();
Class<? extends Object> clazzTo = objectTo.getClass();
for (Method toSetMethod : clazzTo.getMethods()) {
String mName = toSetMethod.getName();
if (mName.startsWith("set")) {
String field = mName.substring(3);//字段名
Object value;//获取from值
try {
if ("LogId".equals(field)) {
continue;
}
Method fromGetMethod = clazzFrom.getMethod("get" + field);
value = fromGetMethod.invoke(objectFrom);
toSetMethod.invoke(objectTo, value);//设置值
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
}
/**
* 加工HttpResponse,处理返回状态
*
* @param status @see com.chengjs.cjsssmsweb.enums.{@link StatusEnum}
* @param data 返回数据
* @return
*/
public static JSONObject parseJson(StatusEnum status, Object data) {
JSONObject jo = new JSONObject();
jo.put("code", status.code());
jo.put("msg", status.message());
jo.put("data", data);
return jo;
}
}
| 4,187 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
Transactioner.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/Transactioner.java | package com.chengjs.cjsssmsweb.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
/**
* Transactioner: 封装pring-mvc transaction,当然用注释也行
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/1
*/
public class Transactioner {
private static final Logger log = LoggerFactory.getLogger(Transactioner.class);
private TransactionStatus status = null;
private DataSourceTransactionManager transactionManager;
/**
* 初始化事务对象并开启事务
* @param transactionManager
*/
public Transactioner(DataSourceTransactionManager transactionManager) {
this.transactionManager = transactionManager;
start(transactionManager);
}
/**
* 开启事物
* @param transactionManager
*/
public void start(DataSourceTransactionManager transactionManager) {
DefaultTransactionDefinition def = new DefaultTransactionDefinition();
/*PROPAGATION_REQUIRES_NEW: 事物隔离级别,开启新事务*/
def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
status = transactionManager.getTransaction(def);
}
/**
* 提交事务
* @param commitOrRollback true-commit, false-rollback
*/
public void end(boolean commitOrRollback) {
if (null == status) {
log.warn("事务未开启无法提交");
return;
}
if (commitOrRollback) {
transactionManager.commit(status);
} else {
transactionManager.rollback(status);
}
}
}
| 1,855 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
PageUtil.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/page/PageUtil.java | package com.chengjs.cjsssmsweb.util.page;
/**
* 分页工具
*/
public class PageUtil {
/**
* PageUtil: 生成分页代码
*
* EAO issue: 现在更多的做法是将分页数据传递到前端,由浏览器js来执行构造展示.
*
* @param targetUrl 目标地址
* @param totalNum 总记录数
* @param currentPage 当前页
* @param pageSize 每页大小
*
* @author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 23:31
*/
public static String genPagination(String targetUrl,long totalNum,int currentPage,int pageSize,String param){
long totalPage=totalNum%pageSize==0?totalNum/pageSize:totalNum/pageSize+1;
if(totalPage==0){
return "未查询到数据";
}else{
StringBuffer pageCode=new StringBuffer();
pageCode.append("<li><a href='"+targetUrl+"?page=1&"+param+"'>首页</a></li>");
if(currentPage>1){
pageCode.append("<li><a href='"+targetUrl+"?page="+(currentPage-1)+"&"+param+"'>上一页</a></li>");
}else{
if(currentPage ==1){
pageCode.append("<li class='disabled'><a href='#'>上一页</a></li>");
}else {
pageCode.append("<li class='disabled'><a href='"+targetUrl+"?page="+(currentPage-1)+"&"+param+"'>上一页</a></li>");
}
}
for(int i=currentPage-2;i<=currentPage+2;i++){
if(i<1||i>totalPage){
continue;
}
if(i==currentPage){
pageCode.append("<li class='active'><a href='"+targetUrl+"?page="+i+"&"+param+"'>"+i+"</a></li>");
}else{
pageCode.append("<li><a href='"+targetUrl+"?page="+i+"&"+param+"'>"+i+"</a></li>");
}
}
if(currentPage<totalPage){
pageCode.append("<li><a href='"+targetUrl+"?page="+(currentPage+1)+"&"+param+"'>下一页</a></li>");
}else{
if(totalPage == currentPage){
pageCode.append("<li class='disabled'><a href='#'>下一页</a></li>");
}else {
pageCode.append("<li class='disabled'><a href='"+targetUrl+"?page="+(currentPage+1)+"&"+param+"'>下一页</a></li>");
}
}
pageCode.append("<li><a href='"+targetUrl+"?page="+totalPage+"&"+param+"'>尾页</a></li>");
return pageCode.toString();
}
}
}
| 2,131 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
Page.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/page/Page.java | package com.chengjs.cjsssmsweb.util.page;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* ClassName: Page
*
* @author chj
* @Description: datagrid分页工具类
* @date 2016-1-3 下午2:17:09
*/
/**
* Page:
* @author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 23:29
*/
public class Page<T> implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 每页显示的数量
**/
private int limit;
/**
* 总条数
**/
private int total;
/**
* 当前页数
**/
private int pageNo;
/**
* 存放集合
**/
private List<T> rows = new ArrayList<T>();
public int getOffset() {
return (pageNo - 1) * limit;
}
public void setOffset(int offset) {
}
public void setTotal(int total) {
this.total = total;
}
public int getLimit() {
return limit;
}
public void setLimit(int limit) {
this.limit = limit;
}
public List<T> getRows() {
return rows;
}
public void setRows(List<T> rows) {
this.rows = rows;
}
// 计算总页数
public int getTotalPages() {
int totalPages;
if (total % limit == 0) {
totalPages = total / limit;
} else {
totalPages = (total / limit) + 1;
}
return totalPages;
}
public int getTotal() {
return total;
}
public int getOffsets() {
return (pageNo - 1) * limit;
}
public int getEndIndex() {
if (getOffsets() + limit > total) {
return total;
} else {
return getOffsets() + limit;
}
}
public int getPageNo() {
return pageNo;
}
public void setPageNo(int pageNo) {
this.pageNo = pageNo;
}
}
| 1,882 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
PageEntity.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/page/PageEntity.java | package com.chengjs.cjsssmsweb.util.page;
import java.io.Serializable;
import java.util.List;
/**
* 分页实体
*
* @param <T>
*/
public class PageEntity<T> implements Serializable {
/**第几页**/
private int page ;
/**起始页**/
private int start ;
/**每页多少条记录**/
private int pageSize ;
public PageEntity(int page, int pageSize) {
this.page = page;
this.pageSize = pageSize;
}
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public int getStart() {
return (page-1)*pageSize ;
}
public void setStart(int start) {
this.start = start;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
}
| 800 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
HttpReqsUtil.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/util/page/HttpReqsUtil.java | package com.chengjs.cjsssmsweb.util.page;
import javax.servlet.http.HttpServletRequest;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.util.HashMap;
/**
* HttpReqsUtil:
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/17
*/
public class HttpReqsUtil {
/**
* 获取HttpRequest 参数
* @return
*/
public static HashMap<String,String> getRequestVals(HttpServletRequest request) throws UnsupportedEncodingException {
HashMap<String, String> map = new HashMap<>();
for (Enumeration<String> names = request.getParameterNames(); names.hasMoreElements();) {
String key = names.nextElement();
map.put(key, URLDecoder.decode(
request.getParameter(key).replace("%", "%25"), "utf-8"));
}
return map;
}
} | 873 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
package-info.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/package-info.java | /**
* package-info: 集成到web项目的各种组件
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/9
*/
package com.chengjs.cjsssmsweb.components; | 204 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
RealmTest.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/shiro/RealmTest.java | package com.chengjs.cjsssmsweb.components.shiro;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.config.IniSecurityManagerFactory;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.util.Factory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* RealmTest:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/26
*/
public class RealmTest {
private static final Logger LOG = LoggerFactory.getLogger(RealmTest.class);
public static void main(String[] args) {
//此处从ini文件来实现用用户角色权限配置,实际多从数据库表来实现
Factory<SecurityManager> factory = new IniSecurityManagerFactory("classpath:shiro.ini.bak");
//SercurityManager 对象
SecurityManager instance = factory.getInstance();
SecurityUtils.setSecurityManager(instance);
//测试用户
Subject currentUser = SecurityUtils.getSubject();
UsernamePasswordToken token = new UsernamePasswordToken("admin", "admin");
boolean result = false;
try {
currentUser.login(token);
result = true;
LOG.debug("认证成功");
} catch (Exception e) {
result = false;
LOG.debug("认证失败");
}
}
}
| 1,345 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
CustomCredentialsMatcher.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/shiro/CustomCredentialsMatcher.java | package com.chengjs.cjsssmsweb.components.shiro;
import com.chengjs.cjsssmsweb.common.util.codec.MD5Util;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.authc.credential.SimpleCredentialsMatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* CustomCredentialsMatcher:
* 密码校验方法继承 SimpleCredentialsMatcher或HashedCredentialsMatcher 实现doCredentialsMatch()
*
* author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/8/28
*/
public class CustomCredentialsMatcher extends SimpleCredentialsMatcher {
private static final Logger log = LoggerFactory.getLogger(CustomCredentialsMatcher.class);
@Override
public boolean doCredentialsMatch(AuthenticationToken authcToken, AuthenticationInfo info) {
UsernamePasswordToken token = (UsernamePasswordToken) authcToken;
Object tokenCredentials = encrypt(String.valueOf(token.getPassword()));
Object accountCredentials = getCredentials(info);
//将密码加密与系统加密后的密码校验,内容一致就返回true,不一致就返回false
return equals(tokenCredentials, accountCredentials);
}
/**
* 将传进来密码加密方法
* @param data
* @return
*/
private String encrypt(String data) {
String encrypt = MD5Util.md5(data, "");
//String encrypt = new Sha384Hash(data).toBase64();
log.debug(data + ":" + encrypt);
return encrypt;
}
}
| 1,559 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UUserRealm.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/shiro/UUserRealm.java | package com.chengjs.cjsssmsweb.components.shiro;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import com.chengjs.cjsssmsweb.service.master.IUserManagerService;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.SimpleAuthenticationInfo;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.subject.PrincipalCollection;
import javax.annotation.Resource;
import java.util.Set;
/**
* UserRealm:
* Realm: 其实现的数据模型规定了如何进行授权 与RDBMS LDAP等交流, 完全控制授权模型的创建和定义
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/25
*/
public class UUserRealm extends AuthorizingRealm {
@Resource
private IUserManagerService userMService;
/**
* 权限认证
*
* @param principals
* @return AuthorizationInfo
*/
@Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) {
String principal_username = principals.getPrimaryPrincipal().toString();
SimpleAuthorizationInfo info = new SimpleAuthorizationInfo();
Set<String> roleNames = userMService.findRoleNames(principal_username);
Set<String> permissionNames = userMService.findPermissionNames(roleNames);
info.setRoles(roleNames);
//基于权限的授权相比基于角色的授权更好,更灵活,更符合实际情况
info.setStringPermissions(permissionNames);
return info;
}
/**
* 登录认证,在权限认证前执行
*
* @param token
* @return AuthenticationInfo
* @throws AuthenticationException
*/
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token) throws AuthenticationException {
String username = token.getPrincipal().toString();
UUser user = userMService.findUserByUserName(username);
if (null == user) {
return null;
} else {
/**
* info中principal选择方案:1.username, 2.User, 3.UserWithRoleAndPermission
* 各有优劣,这里选择使用username
*
* EAO isssue: 新建对象WholeUser,有属性roles,permissions,登录时产生此对象作为principals,则authorization时无需再和sql交互
* 1.优势: 减少sql交互,
* 2.劣势:缓存大,对变更的用户信息反馈不及时
* 适用: 变化不大信息量少,但权限校验频繁的用户类型.
*
* SimpleAuthorizationInfo: param: principal检查源码最后被强转为Collection不知何意??
*/
SimpleAuthenticationInfo info = new SimpleAuthenticationInfo(user.getUsername(), user.getPassword(), "UserRealm");
return info;
}
}
}
| 2,898 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
UserRealm.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/shiro/UserRealm.java | package com.chengjs.cjsssmsweb.components.shiro;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import com.chengjs.cjsssmsweb.service.master.IUserFrontService;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.SimpleAuthenticationInfo;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.subject.PrincipalCollection;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Set;
/**
* UserRealm:
*
* eao issue: 多Reaml配置 TODO
*
* Realm: 其实现的数据模型规定了如何进行授权 与RDBMS LDAP等交流, 完全控制授权模型的创建和定义
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/25
*/
public class UserRealm extends AuthorizingRealm {
@Autowired
private IUserFrontService userFService;
/**
* 权限认证
*
* @param principals
* @return AuthorizationInfo
*/
@Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) {
String principal_username = principals.getPrimaryPrincipal().toString();
SimpleAuthorizationInfo info = new SimpleAuthorizationInfo();
Set<String> roleNames = userFService.findRoleNames(principal_username);
Set<String> permissionNames = userFService.findPermissionNames(roleNames);
info.setRoles(roleNames);
//基于权限的授权相比基于角色的授权更好,更灵活,更符合实际情况
info.setStringPermissions(permissionNames);
return info;
}
/**
* 登录认证,在权限认证前执行
*
* @param token
* @return AuthenticationInfo
* @throws AuthenticationException
*/
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token) throws AuthenticationException {
String userName = token.getPrincipal().toString();
UUser user = userFService.findUserByUsername(userName);
if (null == user) {
return null;
} else {
/**
* info中principal选择方案:1.username, 2.User, 3.UserWithRoleAndPermission
* 各有优劣,这里选择使用username
*
* EAO isssue: 新建对象WholeUser,有属性roles,permissions,登录时产生此对象作为principals,则authorization时无需再和sql交互
* 1.优势: 减少sql交互,
* 2.劣势:缓存大,对变更的用户信息反馈不及时
* 适用: 变化不大信息量少,但权限校验频繁的用户类型.
*
* SimpleAuthorizationInfo: param: principal检查源码最后被强转为Collection不知何意??
*/
SimpleAuthenticationInfo info = new SimpleAuthenticationInfo(user.getUsername(), user.getPassword(), "UserRealm");
return info;
}
}
}
| 2,963 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
SearcherTest.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/lucene/SearcherTest.java | package com.chengjs.cjsssmsweb.components.lucene;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Fragmenter;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import java.io.StringReader;
import java.nio.file.Paths;
/**
* SearcherTest:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/24
*/
public class SearcherTest {
public static void search(String indexDir, String q) throws Exception {
Directory dir = FSDirectory.open(Paths.get(indexDir));
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher is = new IndexSearcher(reader);
// Analyzer analyzer=new StandardAnalyzer(); // 标准分词器
SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer();
QueryParser parser = new QueryParser("desc", analyzer);
Query query = parser.parse(q);
long start = System.currentTimeMillis();
TopDocs hits = is.search(query, 10);
long end = System.currentTimeMillis();
System.out.println("匹配 " + q + " ,总共花费" + (end - start) + "毫秒" + "查询到" + hits.totalHits + "个记录");
QueryScorer scorer = new QueryScorer(query);
Fragmenter fragmenter = new SimpleSpanFragmenter(scorer);
SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter("<b><font color='red'>", "</font></b>");
Highlighter highlighter = new Highlighter(simpleHTMLFormatter, scorer);
highlighter.setTextFragmenter(fragmenter);
for (ScoreDoc scoreDoc : hits.scoreDocs) {
Document doc = is.doc(scoreDoc.doc);
System.out.println(doc.get("city"));
System.out.println(doc.get("desc"));
String desc = doc.get("desc");
if (desc != null) {
TokenStream tokenStream = analyzer.tokenStream("desc", new StringReader(desc));
System.out.println(highlighter.getBestFragment(tokenStream, desc));
}
}
reader.close();
}
public static void main(String[] args) {
String indexDir = IndexerTest.PATH_LUCENE;
String q = "南京";
try {
search(indexDir, q);
} catch (Exception e) {
e.printStackTrace();
}
}
}
| 2,841 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
package-info.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/lucene/package-info.java | /**
* package-info: lucene 嵌入式搜索引擎
* author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/8/29
*/
package com.chengjs.cjsssmsweb.components.lucene; | 203 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
IndexerTest.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/lucene/IndexerTest.java | package com.chengjs.cjsssmsweb.components.lucene;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import java.nio.file.Paths;
/**
* IndexerTest:
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/24
*/
public class IndexerTest {
public static final String PATH_LUCENE = "D:\\Projects\\cjs_ssms\\cjs_ssms_web\\generate\\lucenetmp";
private String[] usernames = {"admin","chengjs","malf"};
private String cities[] = {"青岛", "南京", "上海"};
private String descriptions[] = {
"青岛是一个美丽的城市。",
"南京是一个有文化的城市。南京是一个文化的城市南京,简称宁,是江苏省会,地处中国东部地区,长江下游," +
"濒江近海。全市下辖11个区,总面积6597平方公里,2013年建成区面积752.83平方公里,常住人口818.78万" +
",其中城镇人口659.1万人。[1-4] “江南佳丽地,金陵帝王州”,南京拥有着6000多年文明史、" +
"近2600年建城史和近500年的建都史,是中国四大古都之一,有“六朝古都”、“十朝都会”之称," +
"是中华文明的重要发祥地,历史上曾数次庇佑华夏之正朔,长期是中国南方的政治、经济、文化中心," +
"拥有厚重的文化底蕴和丰富的历史遗存。[5-7] 南京是国家重要的科教中心," +
"自古以来就是一座崇文重教的城市,有“天下文枢”、" +
"“东南第一学”的美誉。截至2013年,南京有高等院校75所," +
"其中211高校8所,仅次于北京上海;国家重点实验室25所、" +
"国家重点学科169个、两院院士83人,均居中国第三。[8-10] 。",
"上海是一个繁华的城市。"
};
private Directory dir;
/**
* 获取IndexWriter实例
*
* @return
* @throws Exception
*/
private IndexWriter getWriter() throws Exception {
//Analyzer analyzer=new StandardAnalyzer(); // 标准分词器
SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer();
IndexWriterConfig iwc = new IndexWriterConfig(analyzer);
IndexWriter writer = new IndexWriter(dir, iwc);
return writer;
}
/**
* 生成索引
*
* @param indexDir
* @throws Exception
*/
public void index(String indexDir) throws Exception {
dir = FSDirectory.open(Paths.get(indexDir));
IndexWriter writer = getWriter();
for (int i = 0; i < usernames.length; i++) {
Document doc = new Document();
doc.add(new StringField("username", usernames[i] + "", Field.Store.YES));
doc.add(new StringField("city", cities[i], Field.Store.YES));
doc.add(new TextField("description", descriptions[i], Field.Store.YES));
writer.addDocument(doc); // 添加文档
}
writer.close();
}
public static void main(String[] args) throws Exception {
new IndexerTest().index(PATH_LUCENE);
}
public String[] getUsernames() {
return usernames;
}
public void setUsernames(String[] usernames) {
this.usernames = usernames;
}
public String[] getCities() {
return cities;
}
public void setCities(String[] cities) {
this.cities = cities;
}
public String[] getDescriptions() {
return descriptions;
}
public void setDescriptions(String[] descriptions) {
this.descriptions = descriptions;
}
}
| 3,799 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
LuceneIndex.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/lucene/LuceneIndex.java | package com.chengjs.cjsssmsweb.components.lucene;
import com.chengjs.cjsssmsweb.common.util.StringUtil;
import com.chengjs.cjsssmsweb.common.util.resources.PropertiesUtil;
import com.chengjs.cjsssmsweb.enums.EnvEnum;
import com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Fragmenter;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import java.io.StringReader;
import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
/**
* LuceneIndex: lucene索引类
* author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 2017/8/24
*/
public class LuceneIndex {
private Directory dir = null;
/**
* 获取IndexWriter实例
*
* @return
* @throws Exception
*/
private IndexWriter getWriter() throws Exception {
/*
* 生成的索引位置在env-config.properties里配置
*/
dir = FSDirectory.open(Paths.get(PropertiesUtil.getValue(EnvEnum.LUCENE_INDEX_PATH.val())));
SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer();
IndexWriterConfig iwc = new IndexWriterConfig(analyzer);
IndexWriter writer = new IndexWriter(dir, iwc);
return writer;
}
/*
* 添加 User 数据
* @param user
*/
public void addIndex(UUser user) throws Exception {
IndexWriter writer = getWriter();
Document doc = new Document();
/*
* yes是会将数据存进索引,如果查询结果中需要将记录显示出来就要存进去,如果查询结果
* 只是显示标题之类的就可以不用存,而且内容过长不建议存进去
* 使用TextField类是可以用于查询的。
*/
try {
doc.add(new StringField("userid", String.valueOf(user.getId()), Field.Store.YES));
doc.add(new TextField("username", user.getUsername(), Field.Store.YES));
writer.addDocument(doc);
} catch (Exception e) {
e.printStackTrace();
throw e;
} finally {
writer.close();
}
}
/**
* 更新博客索引
*
* @param user
* @throws Exception
*/
public void updateIndex(UUser user) throws Exception {
IndexWriter writer = getWriter();
Document doc = new Document();
doc.add(new StringField("userid", String.valueOf(user.getId()), Field.Store.YES));
doc.add(new TextField("username", user.getUsername(), Field.Store.YES));
doc.add(new TextField("description", user.getDescription(), Field.Store.YES));
writer.updateDocument(new Term("userid", String.valueOf(user.getId())), doc);
writer.close();
}
/**
* 删除指定博客的索引
*
* @param userid
* @throws Exception
*/
public void deleteIndex(String userid) throws Exception {
IndexWriter writer = getWriter();
writer.deleteDocuments(new Term("userid", userid));
writer.forceMergeDeletes(); // 强制删除
writer.commit();
writer.close();
}
/**
* 查询用户
*
* @param query_key 查询关键字
* @return
* @throws Exception
*/
public List<UUser> searchBlog(String query_key) throws Exception {
/**
* 1.解析器parser获取
* 注意的是查询索引的位置得是存放索引的位置,不然会找不到。
*/
dir = FSDirectory.open(Paths.get(PropertiesUtil.getValue(EnvEnum.LUCENE_INDEX_PATH.val())));
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher is = new IndexSearcher(reader);
BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder();
SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer();
/*
* 2.username和description就是我们需要进行查找的两个字段
* 同时在存放索引的时候要使用TextField类进行存放。
*/
QueryParser parser = new QueryParser("username", analyzer);
Query query = parser.parse(query_key);
QueryParser parser2 = new QueryParser("description", analyzer);
Query query2 = parser2.parse(query_key);
booleanQuery.add(query, BooleanClause.Occur.SHOULD);
booleanQuery.add(query2, BooleanClause.Occur.SHOULD);
TopDocs hits = is.search(booleanQuery.build(), 100);
QueryScorer scorer = new QueryScorer(query);
Fragmenter fragmenter = new SimpleSpanFragmenter(scorer);
/*
* 这里可以根据自己的需要来自定义查找关键字高亮时的样式。
*/
SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter("<b><font color='red'>", "</font></b>");
Highlighter highlighter = new Highlighter(simpleHTMLFormatter, scorer);
highlighter.setTextFragmenter(fragmenter);
List<UUser> userList = new LinkedList<UUser>();
for (ScoreDoc scoreDoc : hits.scoreDocs) {
Document doc = is.doc(scoreDoc.doc);
UUser user = new UUser();
user.setUsername(doc.get("username"));
user.setDescription(doc.get("description"));
String username = doc.get("username");
String description = doc.get("description");
if (username != null) {
TokenStream tokenStream = analyzer.tokenStream("username", new StringReader(username));
String husername = highlighter.getBestFragment(tokenStream, username);
if (StringUtil.isEmpty(husername)) {
user.setUsername(username);
} else {
user.setUsername(husername);
}
}
if (description != null) {
TokenStream tokenStream = analyzer.tokenStream("description", new StringReader(description));
String hContent = highlighter.getBestFragment(tokenStream, description);
if (StringUtil.isEmpty(hContent)) {
if (description.length() <= 200) {
user.setDescription(description);
} else {
user.setDescription(description.substring(0, 200));
}
} else {
user.setDescription(hContent);
}
}
userList.add(user);
}
return userList;
}
}
| 6,945 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
WSSample.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/cxf/WSSample.java | package com.chengjs.cjsssmsweb.components.cxf;
import javax.jws.WebService;
/**
* WSSample: WebService 开放接口 案例
* @author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 14:56
*/
@WebService
public interface WSSample {
public String say(String str);
}
| 296 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
WSServerTest.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/cxf/WSServerTest.java | package com.chengjs.cjsssmsweb.components.cxf;
import com.chengjs.cjsssmsweb.components.cxf.impl.WSSampleImpl;
import com.chengjs.cjsssmsweb.enums.TestEnv;
import org.apache.cxf.jaxws.JaxWsServerFactoryBean;
public class WSServerTest {
/**
* 接口测试 wsdl: http://localhost:9000/wSSample?wsdl
* @param args
*/
public static void main(String[] args) {
System.out.println("web service 启动中。。。");
WSSampleImpl implementor = new WSSampleImpl();
String address = "http://192.168.245.221:9000/wSSample";
if (TestEnv.onTTrue) {
address = "http://localhost:9000/wSSample";
}
JaxWsServerFactoryBean factoryBean = new JaxWsServerFactoryBean();
factoryBean.setAddress(address); // 设置地址
factoryBean.setServiceClass(WSSample.class); // 接口类
factoryBean.setServiceBean(implementor); // 设置实现类
factoryBean.create(); // 创建webservice接口
System.out.println("web service 启动成功。。");
}
}
| 991 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
WSClientTest.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/cxf/WSClientTest.java | package com.chengjs.cjsssmsweb.components.cxf;
import com.chengjs.cjsssmsweb.enums.EnvEnum;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* WS2ClientTest: WebService后端UrlConnection调用
*
* @author: <a href="mailto:[email protected]">chengjs_minipa</a>, version:1.0.0, 2017/9/2
*/
public class WSClientTest {
/**
* 方法名
*/
private static final String METHOD = "say";
/**
* 请求参数
*/
private static final String PARAM = "chengjs";
/**
* 参数指代接口命名空间,可在wsdl请求返回信息中找到
*/
private static final String SERVICE_NAMESPACE = "http://cxf.components.cjsssmsweb.chengjs.com/";
/**
* 调用接口:http://blog.csdn.net/u011165335/article/details/51345224
* http://www.cnblogs.com/siqi/archive/2013/12/15/3475222.html
* @param args
*/
public static void main(String[] args) throws IOException {
//服务的地址
// URL wsUrl = new URL(EnvEnum.IP9000.getVal()+"wSSample");
URL wsUrl = new URL(EnvEnum.IP8080.getVal()+"webservice/wSSample");/*webservice是web.xml中配置的 tomcat发布接口测试*/
HttpURLConnection conn = (HttpURLConnection) wsUrl.openConnection();
conn.setDoInput(true);
conn.setDoOutput(true);
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type", "text/xml;charset=UTF-8");
OutputStream os = conn.getOutputStream();
//请求体
String soap = "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" " +
"xmlns:q0=\"" + SERVICE_NAMESPACE + "\" " +
"xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" " +
"xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
"<soapenv:Body> <q0:" + METHOD + "><arg0>" + PARAM + "</arg0> </q0:" + METHOD + "> </soapenv:Body> </soapenv:Envelope>";
os.write(soap.getBytes());
InputStream is = conn.getInputStream();
byte[] b = new byte[1024];
int len = 0;
String s = "";
while((len = is.read(b)) != -1){
String ss = new String(b,0,len,"UTF-8");
s += ss;
}
System.out.println(s);
/*
* 返回请求值
* <soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"><soap:Body>
* <ns2:sayResponse xmlns:ns2="http://cxf.cjsssmsweb.chengjs.com/">
* <return>Hellochengjs</return>
* </ns2:sayResponse>
* </soap:Body></soap:Envelope>
* */
is.close();
os.close();
conn.disconnect();
}
}
| 2,578 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
WSSampleImpl.java | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/java/com/chengjs/cjsssmsweb/components/cxf/impl/WSSampleImpl.java | package com.chengjs.cjsssmsweb.components.cxf.impl;
import com.chengjs.cjsssmsweb.components.cxf.WSSample;
import org.springframework.stereotype.Component;
import javax.jws.WebService;
/**
* WSSampleImpl:
* @author: <a href="mailto:[email protected]">chengjs</a>, version:1.0.0, 14:57
*/
@Component("wSSample")
@WebService
public class WSSampleImpl implements WSSample {
public String say(String str) {
return "你好这是wSSample/say返回值:"+str;
}
}
| 479 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
mybatis-config-javaapi.xml | /FileExtraction/Java_unseen/MiniPa_cjs_ssms/cjs_ssms_web/src/main/resources/database/mybatis-config-javaapi.xml | <?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE configuration
PUBLIC "-//mybatis.org//DTD Config 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-config.dtd">
<configuration>
<!--https://github.com/pagehelper/Mybatis-PageHelper/blob/master/wikis/en/HowToUse.md-->
<!--javaAPI 调用mybatis配置使用,项目中通过spring进行配置-->
<settings>
<setting name="cacheEnabled" value="true"/>
<setting name="lazyLoadingEnabled" value="false"/>
<setting name="aggressiveLazyLoading" value="true"/>
</settings>
<typeAliases>
<typeAlias type="com.chengjs.cjsssmsweb.mybatis.pojo.master.UUser"/>
<!--<package name="com.chengjs.cjsssmsweb.mybatis.pojo.master"/>-->
</typeAliases>
<plugins>
<plugin interceptor="com.github.pagehelper.PageInterceptor">
<property name="mappers" value="tk.mybatis.mapper.common.Mapper" />
<property name="offsetAsPageNum" value="false"/>
</plugin>
</plugins>
<environments default="development">
<environment id="development">
<transactionManager type="JDBC">
<property name="" value=""/>
</transactionManager>
<dataSource type="POOLED">
<property name="driver" value="com.mysql.jdbc.Driver"/>
<property name="url"
value="jdbc:mysql://192.168.245.220:3306/cjs_ssm_master?useUnicode=true&characterEncoding=utf8"/>
<property name="username" value="cjs_ssm_master"/>
<property name="password" value="Yqqlm!Gs1cl$"/>
</dataSource>
</environment>
</environments>
<!--非spring 项目配置 mybatis未提供ant路径通配符,spring提供了。所以这里只能一个一个添加映射文件-->
<mappers>
<mapper resource="mapping/master/UUserMapper.xml"/>
<mapper resource="mapping/master/CountryMapper.xml"/>
<!--<mapper class="com.chengjs.cjsssmsweb.mybatis.mapper.master.UUserMapper"/>-->
<!--如下扫描方式xml需和interface同包-->
<!--<package name="com.chengjs.cjsssmsweb.mybatis.mapper.master"/>-->
</mappers>
</configuration> | 2,052 | Java | .java | MiniPa/cjs_ssms | 11 | 11 | 1 | 2017-08-20T09:31:28Z | 2018-10-11T06:27:09Z |
OnlineDiagram.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/tools/OnlineDiagram.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.tools;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.Stroke;
import javax.swing.JPanel;
/**
* A simple swing based online-diagram for visualization of
* data sequences.
* <br></br>
* @author Sebastian Otte
*/
public class OnlineDiagram extends JPanel {
private static final long serialVersionUID = 5369278568426927464L;
//
private double lbd = -1.0;
private double ubd = 1.0;
private double range = 2.0;
private int length = 1;
private int curves = 1;
private int drawoffset = 0;
private int drawlength = 0;
private int thickness = 2;
private Stroke stroke = null;
private Stroke dotted = new BasicStroke(
1f,
BasicStroke.CAP_ROUND,
BasicStroke.JOIN_ROUND,
1f,
new float[] {4f},
0f
);
//
private double[][] data = null;
private Color[] color = null;
public void reset() {
this.drawoffset = 0;
this.drawlength = 0;
this.repaint();
}
public void record(final double ...values) {
//
// determine last idx.
//
final int idx = (this.drawoffset + this.drawlength) % this.length;
//
// add values.
//
final int min = Math.min(this.curves, values.length);
for (int i = 0; i < min; i++) {
this.data[i][idx] = values[i];
}
//
// shift values.
//
if (this.drawlength < this.length) {
this.drawlength++;
} else {
this.drawoffset++;
}
}
private void setup() {
this.data = new double[this.curves][this.length];
this.color = new Color[this.curves];
//
this.drawoffset = 0;
this.drawlength = 0;
//
this.stroke = new BasicStroke(this.thickness);
}
public void assignColor(final int curve, final Color color) {
this.color[curve] = color;
}
public OnlineDiagram(
final int length,
final double lbd,
final double ubd,
final int curves
) {
this.lbd = Math.min(lbd, ubd);
this.ubd = Math.max(lbd, ubd);
this.range = this.ubd - this.lbd;
//
this.length = Math.max(1, length);
this.curves = Math.max(1, curves);
//
this.setup();
this.setBackground(new Color(0, 30, 0));
}
private int xValue(final int x) {
final int width = this.getWidth();
final int xx = (x * width) / this.length;
//
return xx;
}
private int yValue(final double y) {
final double height = (this.getHeight());
final double yy = ((y - this.lbd) * height / this.range);
//
return (int)yy;
}
/**
* {@inheritDoc}
*/
@Override
protected void paintComponent(Graphics g) {
Graphics2D g2 = (Graphics2D)g;
g2.setRenderingHint(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON
);
super.paintComponent(g);
g2.setColor(Color.GRAY);
g2.setStroke(this.dotted);
g2.drawLine(
0,
(int)(0.75 * this.getHeight()),
this.getWidth() - 1,
(int)(0.75 * this.getHeight())
);
g2.drawLine(
0,
(int)(0.5 * this.getHeight()),
this.getWidth() - 1,
(int)(0.5 * this.getHeight())
);
g2.drawLine(
0,
(int)(0.25 * this.getHeight()),
this.getWidth() - 1,
(int)(0.25 * this.getHeight())
);
g2.setStroke(this.stroke);
//
for (int l = 0; l < this.curves; l++) {
final double[] line = data[l];
//
if (this.color[l] == null) {
g2.setColor(Color.BLACK);
} else {
g2.setColor(this.color[l]);
}
//
int hi = this.getHeight() - 1;
int xx1 = this.xValue(0);
int yy1 = hi - this.yValue(line[this.drawoffset % this.length]);
//
for (int i = 1; i < this.drawlength; i++) {
final int xx2 = xValue(i);
final int yy2 = hi - yValue(
line[(this.drawoffset + i) % this.length]
);
//
g2.drawLine(xx1, yy1, xx2, yy2);
//
xx1 = xx2;
yy1 = yy2;
}
}
}
public int getLines() {
return this.curves;
}
public double getRange() {
return this.range;
}
public double getLbd() {
return this.lbd;
}
public double getUbd() {
return this.ubd;
}
}
| 6,007 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
RNNGeneratorExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/generator/RNNGeneratorExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.generator;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.generator.RNNGenerator;
/**
* This example demonstrates how to setup standard RNNs. Hereby, all
* hidden-layers are fully self-recurrently connected.
* <br></br>
* @author Sebastian Otte
*/
public class RNNGeneratorExample {
public static void main(String[] args) {
RNNGenerator gen = new RNNGenerator();
//
// setup layers.
//
gen.inputLayer(3);
gen.hiddenLayer(8, CellType.TANH);
gen.outputLayer(2, CellType.TANH);
//
// generate network.
//
Net net = gen.generate();
//
// set max. sequences length. here 100 is just an example
// value.
//
net.rebuffer(100);
System.out.println(net);
}
}
| 1,765 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
SimpleNetGeneration.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/generator/SimpleNetGeneration.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.generator;
import de.jannlab.Net;
import de.jannlab.generator.NetCoreGenerator;
import de.jannlab.misc.TimeCounter;
/**
* This examples demonstrates handling the NetCoreGenerator.
* It also shows the runtime performance of generating networks.
* <br></br>
* @author Sebastian Otte
*/
public class SimpleNetGeneration {
public static final TimeCounter TC = new TimeCounter();
public static void main(String[] args) {
NetCoreGenerator gen = new NetCoreGenerator();
//
// input layer.
//
int inputsize = 7;
int inputlayer = gen.beginLayer();
int inputcells = gen.valueCells(inputsize);
gen.endLayer();
gen.defineInputLayer(inputlayer);
//
// add bias neuron.
//
int bias = gen.valueCell();
gen.assign(bias, -1);
//
// hidden layer.
//
int hiddensize = 30;
@SuppressWarnings("unused")
int hiddenlayer = gen.beginLayer();
int hiddencells = gen.sigmoidCells(hiddensize);
gen.endLayer();
//
// output layer.
//
int outputlayer = gen.beginLayer();
int outputcells = gen.sigmoidCells(2);
gen.endLayer();
gen.defineOutputLayer(outputlayer);
//
// connect input with output layer and non input layer with bias.
//
gen.weightedLink(inputcells, inputsize, hiddencells, hiddensize);
gen.weightedLink(hiddencells, hiddensize, outputcells, 1);
//
gen.weightedLink(bias, 1, hiddencells, hiddensize);
gen.weightedLink(bias, 1, outputcells, 1);
//
// generate network and measure time.
//
TC.reset();
Net net = gen.generate();
final double time_gen = TC.valueMilliDouble();
//
// print generated network.
//
System.out.println(net);
System.out.println("generation time : " + time_gen + " ms.");
}
}
| 2,933 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
BRNNGeneratorExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/generator/BRNNGeneratorExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.generator;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.generator.MLPGenerator;
import de.jannlab.generator.NetCoreGenerator;
/**
* This example demonstrates how to setup bidirectional
* networks.
* <br></br>
* @author Sebastian Otte
*
*/
public class BRNNGeneratorExample {
public static void main(String[] args) {
//
NetCoreGenerator gen = new NetCoreGenerator();
//
// define layers.
//
int input = MLPGenerator.inputLayer(gen, 2);
int hidden1 = MLPGenerator.hiddenLayer(gen, 4, CellType.TANH);
int hidden2 = MLPGenerator.hiddenLayer(gen, 4, CellType.TANH);
int output = MLPGenerator.outputLayer(gen, 2, CellType.TANH);
//
// connect layers.
//
gen.weightedLinkLayer(input, hidden1);
gen.weightedLinkLayer(input, hidden2);
gen.weightedLinkLayer(hidden1, hidden1);
gen.weightedLinkLayer(hidden2, hidden2);
gen.weightedLinkLayer(hidden1, output);
gen.weightedLinkLayer(hidden2, output);
//
// the following call, which can be used for LSTM layers analogous,
// makes the network bidirectional
//
gen.defineLayerAsReversed(hidden2);
//
final Net net = gen.generate();
//
// set max. sequences length. here 100 is just an example
// value.
//
net.rebuffer(100);
System.out.println(net);
}
}
| 2,432 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
MLPGeneratorExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/generator/MLPGeneratorExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.generator;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.generator.MLPGenerator;
/**
* This example demonstrates how to setup standard MLPs.
* <br></br>
* @author Sebastian Otte
*/
public class MLPGeneratorExample {
public static void main(String[] args) {
MLPGenerator gen = new MLPGenerator();
//
// setup layers.
//
gen.inputLayer(2);
gen.hiddenLayer(2, CellType.TANH);
gen.outputLayer(2, CellType.SIGMOID, true, -1.0);
//
// just generate.
//
Net net = gen.generate();
System.out.println(net);
}
}
| 1,578 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
LSTMGeneratorExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/generator/LSTMGeneratorExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.generator;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.generator.LSTMGenerator;
import de.jannlab.generator.NetCoreGenerator;
/**
* This example demonstrates how to setup LSTM networks.
* <br></br>
* @author Sebastian Otte
*/
public class LSTMGeneratorExample {
public static void main(String[] args) {
LSTMGenerator gen = new LSTMGenerator();
//
// define layers.
//
gen.inputLayer(5);
gen.hiddenLayer(5, CellType.SIGMOID, CellType.TANH, CellType.TANH, true);
gen.outputLayer(3, CellType.SIGMOID, true, -1.0);
//
// generate network.
//
Net net = gen.generate();
//
// set max. sequences length. here 100 is just an example
// value.
//
net.rebuffer(100);
System.out.println(net);
}
// ######################################################################
// The following method contains the code used by the LSTMGenerator
// for setting up an LSTM hidden-layer. As you can see, LSTMs block
// are modeled only with simple buildings blocks, such that the LSTMs
// blocks could be easily modified.
// ######################################################################
/**
* Creates a hidden layer with n LSTM blocks.
* <br></br>
* @param gen Instance of NetCoreGenerator.
* @param blocksnum Number of LSTM blocks
* @param gates CellType of the gates.
* @param netin CellType of cell-input.
* @param states CellType of cell-outpt.
* @param peepholes Use peepholes?
* @return Layer index.
*/
public static int hiddenLayer(
final NetCoreGenerator gen,
final int blocksnum,
final CellType gates,
final CellType netin,
final CellType states,
final boolean peepholes
) {
//
// create lstmlayer.
//
final int layer = gen.beginLayer();
//
// input gates, forget gates and input cells.
//
gen.inputConnectors();
final int input_gates = gen.cells(blocksnum, gates);
final int forget_gates = gen.cells(blocksnum, gates);
final int input_cells = gen.cells(blocksnum, netin);
gen.shiftComputationIndex();
//
// mul1, mul2 and dmul11, dmul12, dmul21, dmul22.
//
gen.nonConnectors();
final int dmul11 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul12 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul21 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul22 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
gen.shiftComputationIndex();
final int mul1 = gen.cells(blocksnum, CellType.MULTIPLICATIVE);
final int mul2 = gen.cells(blocksnum, CellType.MULTIPLICATIVE);
gen.shiftComputationIndex();
//
// state.
//
final int state_cells = gen.cells(blocksnum, CellType.LINEAR);
gen.shiftComputationIndex();
//
// state-squash and output gates.
//
final int state_squash = gen.cells(blocksnum, states);
gen.inputConnectors();
final int output_gates = gen.cells(blocksnum, gates);
gen.shiftComputationIndex();
//
// mul3 and dmul31, dmul32
//
gen.nonConnectors();
final int dmul31 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul32 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
gen.shiftComputationIndex();
//
gen.outputConnectors();
final int mul3 = gen.cells(blocksnum, CellType.MULTIPLICATIVE);
//
// define links.
//
gen.link(forget_gates, dmul11, blocksnum);
gen.link(input_gates, dmul21, blocksnum);
gen.link(input_cells, dmul22, blocksnum);
//
gen.link(dmul11, mul1, blocksnum);
gen.link(dmul12, mul1, blocksnum);
gen.link(mul1, state_cells, blocksnum);
//
gen.link(state_cells, dmul12, blocksnum);
//
gen.link(dmul21, mul2, blocksnum);
gen.link(dmul22, mul2, blocksnum);
gen.link(mul2, state_cells, blocksnum);
//
gen.link(state_cells, state_squash, blocksnum);
gen.link(state_squash, dmul31, blocksnum);
gen.link(output_gates, dmul32, blocksnum);
gen.link(dmul31, mul3, blocksnum);
gen.link(dmul32, mul3, blocksnum);
//
// use weighted peepholes?
//
if (peepholes) {
gen.weightedLink(state_cells, forget_gates, blocksnum);
gen.weightedLink(state_cells, input_gates, blocksnum);
gen.weightedLink(state_cells, output_gates, blocksnum);
}
//
gen.endLayer();
//
return layer;
}
}
| 5,864 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
DifferentialEvolutionExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/recurrent/DifferentialEvolutionExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.recurrent;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.Random;
import javax.swing.JFrame;
import javax.swing.Timer;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.examples.tools.OnlineDiagram;
import de.jannlab.generator.RNNGenerator;
import de.jannlab.optimization.BasicIterationListener;
import de.jannlab.optimization.Objective;
import de.jannlab.optimization.diffevo.DifferentialEvolution;
import de.jannlab.optimization.diffevo.Mutation;
/**
* This simple example demonstrates how Differential Evolution
* can be used for learning a damped oscillation function
* with a Recurrent Neural Network.
* <br></br>
* @author Sebastian Otte
*/
public class DifferentialEvolutionExample {
public static final Random rnd = new Random(0L);
public static double X_LBD = 0.0;
public static double X_UBD = Math.PI * 2.0 * 10.0;
public static double X_RANGE = X_UBD - X_LBD;
public static int TICKS = (int)(X_RANGE / (2.0 * Math.PI) * 20.0);
public static double X_INCR = X_RANGE / (double)TICKS;
/**
* Damped oscillation function, which is learned
* by the RNN.
*/
public static double f(final double x) {
final double xf = x * 1.0;
return (
Math.sin(xf) * Math.exp(-(0.075 * xf))
);
}
/**
* This error function computes the mean square error of the network
* output sequence and the oscillation function f. This function
* is used as objective function for the Differential Evolution
* optimization process.
*/
public static double error(final Net net) {
//
net.reset();
double[] input = new double[1];
double[] output = new double[1];
//
double x = X_LBD;
double error = 0.0;
//
for (int i = 0; i < TICKS; i++) {
input[0] = (i == 0)?(1.0):(0.0);
net.input(input, 0);
net.compute();
net.output(output, 0);
//
double diff = output[0] - f(x);
error += (diff * diff);
x += X_INCR;
}
return error / (double)TICKS;
}
/**
* This static method generated a Recurrent Neural Network
* with a specific number of hidden neurons. Hidden and output
* layer a activated with the standard sigmoidal function of
* the range [-1, 1].
*/
public static Net RNN(final int hidden) {
RNNGenerator gen = new RNNGenerator();
gen.inputLayer(1);
gen.hiddenLayer(hidden, CellType.SIGMOID1);
gen.outputLayer(1, CellType.SIGMOID1);
Net net = gen.generate();
return net;
}
public static void main(String[] args) {
//
// generate network.
//
final Net net = RNN(2);
//
// create a fitness (objective) function for differential evolution.
//
Objective obj = new Objective() {
@Override
public double compute(double[] args, int offset) {
//
// the arguments for the fitness function are used
// as weights within the recurrent network.
//
net.writeWeights(args, offset);
//
// then the error is computed based on the output
// of the network carrying the previously updated
// weights.
//
return error(net);
}
//
@Override
public int arity() {
//
// the number of weights of the network gives
// the arity of the fitness fuction.
//
return net.getWeightsNum();
}
};
//
// Now we setup an instance of the Differential Evolution
// optimizer. The following configuration parameters work
// well but others may also be suitable. Experience has
// shown that increasing the dimension of the fitness function,
// i.e., increasing the number of weights, the population size
// should be increased as well.
//
DifferentialEvolution de = new DifferentialEvolution();
de.setRnd(rnd);
de.setF(0.4);
de.setCR(0.7);
de.setInitLbd(-1.0);
de.setInitUbd(1.0);
de.setMutation(Mutation.BEST_TWO);
de.setParameters(net.getWeightsNum());
de.setPopSize(25);
de.updateObjective(obj);
de.addListener(new BasicIterationListener<DifferentialEvolution>());
//
// Initializing the optimizer before using is obligatory.
//
de.initialize();
System.out.println(de);
//
// We now start the optimization process over 300 generations. This
// results in 300*25 = 7500 fitness evaluation evaluations.
//
de.iterate(300, 0);
//
// Use the best solution vector of the optimizer as weight vector
// in the network.
//
double[] solution = new double[net.getWeightsNum()];
de.copyBestSolution(solution, 0);
net.writeWeights(solution, 0);
net.reset();
//
// Visualize network output. A Timer forces the RNN to compute
// a new state every 10 ms. The network output is plot we via a
// simple online diagram. Pressing the left or the right button
// feeds a 1.0 or, respectively, a -1.0 signal into the network.
//
final long[] ticks = new long[1];
final OnlineDiagram diagram = new OnlineDiagram(
300, -1.0, 1.0, 1
) {
private static final long serialVersionUID = 1L;
//
protected void paintComponent(Graphics g) {
super.paintComponent(g);
g.setColor(Color.WHITE);
g.drawString("cycles " + ticks[0], 10, 20);
}
};
diagram.assignColor(0, new Color(200, 80, 20));
//
final double[] input = new double[1];
final double[] output = new double[1];
//
Timer timer = new Timer(10, new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
net.input(input, 0);
net.compute();
ticks[0]++;
//
net.output(output, 0);
diagram.record(output);
diagram.getGraphics().setColor(Color.WHITE);
diagram.repaint();
}
});
//
diagram.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
input[0] = (e.getButton() == MouseEvent.BUTTON1)?(1.0):-(1.0);
}
//
@Override
public void mouseReleased(MouseEvent e) {
input[0] = 0.0;
ticks[0] = 0;
}
});
//
JFrame frame = new JFrame("Damped Oscillation learned by a Recurrent Neural Network ");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.add(diagram);
frame.setSize(600, 200);
frame.setVisible(true);
timer.start();
}
} | 8,437 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
SequentialXorExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/recurrent/SequentialXorExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.recurrent;
import java.util.Random;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
import de.jannlab.generator.RNNGenerator;
import de.jannlab.misc.TimeCounter;
import de.jannlab.tools.ClassificationValidator;
import de.jannlab.training.GradientDescent;
/**
* This simple example demonstrates how Differential Evolution
* can be used for learning simple sequential xor.
* <br></br>
* @author Sebastian Otte
*/
public class SequentialXorExample {
private static TimeCounter TC = new TimeCounter();
private static Random rnd = new Random(0L);
public static void incrBit(final boolean[] data, final int offset) {
if (offset >= data.length) return;
//
if (!data[offset]) {
data[offset] = true;
return;
}
//
data[offset] = false;
incrBit(data, offset + 1);
}
public static boolean xor(final boolean[] data) {
//
boolean value = false;
//
for (int i = 0; i < data.length; i++) {
value ^= data[i];
}
//
return value;
}
public static SampleSet generateSamples(final int length) {
SampleSet set = new SampleSet();
//
int size = 1 << length;
boolean[] data = new boolean[length];
//
for (int i = 0; i < size; i++) {
Sample s = new Sample(1, length, 1, 1);
//
for (int j = 0; j < data.length; j++) {
s.getInput()[j] = (data[j]?(1.0):(0.0));
}
//
s.getTarget()[0] = (xor(data)?(1.0):(0.0));
incrBit(data, 0);
set.add(s);
}
//
return set;
}
/**
* This static method generated a Recurrent Neural Network
* with a specific number of hidden neurons. Hidden and output
* layer a activated with the standard sigmoidal function of
* the range [-1, 1].
*/
public static Net RNN(final int hidden) {
RNNGenerator gen = new RNNGenerator();
gen.inputLayer(1);
gen.hiddenLayer(hidden, CellType.SIGMOID1);
gen.outputLayer(1, CellType.SIGMOID1);
Net net = gen.generate();
return net;
}
public static void main(String[] args) {
//
// config parameter.
//
final int epochs = 80000;
//
final double learningrate = 0.02;
final double momentum = 0.9;
//
// generate samples.
//
SampleSet samples = generateSamples(2);
//
// generate network.
//
final Net net = RNN(2);
//
net.rebuffer(samples.maxSequenceLength());
net.initializeWeights(rnd);
//
// setup trainer.
//
GradientDescent trainer = new GradientDescent();
trainer.setNet(net);
trainer.setRnd(rnd);
trainer.setPermute(false);
trainer.setTrainingSet(samples);
trainer.setLearningRate(learningrate);
trainer.setMomentum(momentum);
trainer.setEpochs(epochs);
//
TC.reset();
//
// perform training.
//
trainer.train();
System.out.println();
//
System.out.println(
"training time: " +
TC.valueMilliDouble() +
" ms."
);
//
// evaluate learning success.
//
final double thres = 0.1;
ClassificationValidator v = new ClassificationValidator(net, thres);
for (Sample s : samples) {
System.out.println(v.applyAsString(s, true));
System.out.println();
}
System.out.println("traning results: " + (v.ratio() * 100) + "%.");
}
} | 4,800 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
SequentialParityExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/recurrent/SequentialParityExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.recurrent;
import java.util.Random;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
import de.jannlab.generator.RNNGenerator;
import de.jannlab.training.RandomSearch;
import de.jannlab.tools.ClassificationValidator;
import de.jannlab.tools.NetTools;
/**
* The sequential parity problem is a typical sequence classification
* problem, often used to demonstrate the ability of recurrent networks
* to "connect" input events over time. Given a sequence over {0, 1}
* the task is to determine either the number of 1s is even or odd.
* In this example the problem is learned by an RNN via RandomSearch.
* <br></br>
* @author Sebastian Otte
*
*/
public class SequentialParityExample {
//
public static final double THRESHOLD = 0.5;
public static final double[] EVEN = {1.0, 0.0};
public static final double[] ODD = {0.0, 1.0};
public static double[] generateSequence(
final int length,
final Random rnd
) {
double[] result = new double[length];
//
for (int i = 0; i < length; i++) {
result[i] = 0.0;
}
int n = rnd.nextInt((length / 2) + 1);
for (int i = 0; i < n; i++) {
result[rnd.nextInt(length)] = rnd.nextBoolean()?(1.0):(0.0);
}
//
return result;
}
public static boolean parity(final double[] data) {
int ctr = 0;
for (int i = 0; i < data.length; i++) {
if (data[i] >= THRESHOLD) {
ctr++;
}
}
return (ctr % 2) == 0;
}
public static Sample generateSample(
final int length,
final Random rnd
) {
//
final double[] input = generateSequence(length, rnd);
final double[] target = (parity(input)?(EVEN):(ODD));
//
return new Sample(input, target, 1, length, 2, 1);
}
private static SampleSet generateSamples(
final int samples,
final int length,
final Random rnd
) {
final SampleSet set = new SampleSet();
//
for (int i = 0; i < samples; i++) {
final Sample s = generateSample(length, rnd);
set.add(s);
}
//
return set;
}
public static double runExperiment(
final SampleSet trainset,
final SampleSet testset,
final Net net,
final Random rnd,
final int epochs,
final double lbd,
final double ubd
) {
//
// setup network.
//
final int maxlength = Math.max(trainset.maxSequenceLength(), testset.maxSequenceLength());
net.initializeWeights(rnd);
net.rebuffer(maxlength);
//
// setup trainer.
//
RandomSearch trainer = new RandomSearch();
trainer.setEpochs(epochs);
trainer.setSearchLbd(lbd);
trainer.setSearchUbd(ubd);
trainer.setRnd(rnd);
trainer.setNet(net);
trainer.setTrainingSet(trainset);
trainer.train();
//
// evaluate training.
//
final double error = NetTools.computeError(net, testset);
return error;
}
public static Net RNN(final int in, final int hid, final int out) {
RNNGenerator gen = new RNNGenerator();
gen.inputLayer(in);
gen.hiddenLayer(hid, CellType.SIGMOID, true, -1.0);
gen.outputLayer(out, CellType.SIGMOID, true, -1.0);
gen.getCoreGenerator().weightedLinkLayer(0, 2);
return gen.generate();
}
public static void main(String[] args) {
//
// configure parameter.
//
final int length = 20;
final int trainset_size = 100;
final int epochs = 30000;
//
final Random rnd = new Random(System.currentTimeMillis());
//
final double weightslbd = -100.0;
final double weightsubd = 100.0;
//
// generate networks.
//
final Net rnn = RNN(1, 3, 2);
//
// build sample sets.
//
final SampleSet trainset = generateSamples(
trainset_size, length, rnd
);
System.out.println(trainset);
final SampleSet testset = trainset;
//
// evaluate learning success.
//
final double rnnerror = runExperiment(
trainset, trainset, rnn, rnd, epochs, weightslbd, weightsubd
);
System.out.println("rnn error : " + rnnerror);
{
ClassificationValidator f = new ClassificationValidator(rnn);
for (int i = 0; i < testset.size(); i++) {
Sample s = testset.get(i);
f.apply(s);
}
double ratio = f.ratio();
System.out.println("testset result (rnn): " + ratio * 100.0 + "%.");
}
}
}
| 5,967 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
AddingExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/recurrent/AddingExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.recurrent;
import java.io.IOException;
import java.util.Random;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
import de.jannlab.generator.LSTMGenerator;
import de.jannlab.training.GradientDescent;
import de.jannlab.misc.TimeCounter;
import de.jannlab.tools.RegressionValidator;
/**
* In this example the ability of LSTMs to learn adding from samples trained by
* gradient descent is proven. We are using a modified variation of the adding
* problem from [1].
* <br></br>
* For this experiment samples are generated as follows:
* <br></br>
* <ul>
* <li>Given a sequence of T vectors from R2, where the x part of each sequence is
* randomly chosen from [0,1].</li>
* <li>Two indices are rolled: i1 from {0, ..., 9}, and i2 from {0, ..., T/2}.</li>
* <li>In the y part the values at the indices i1 and i2 are set to 1.0.</li>
* <li>The target value of a sequence is (x[i1] + x[i2]) / 2.
* </ul>
* <br></br>
* [1] S. Hochreiter und J. Schmidhuber, "LSTM Can Solve Hard Long Time Lag Problems",
* ADVANCES IN NEURAL INFORMATION PROCESSING SYSTEMS 9, S. 473--479, 1997.
* <br></br>
* @author Sebastian Otte
*
*/
public class AddingExample {
private static TimeCounter TC = new TimeCounter();
private static Random rnd = new Random(0L);
private static int x(final int i) {
return (i * 2);
}
private static int y(final int i) {
return (i * 2) + 1;
}
public static Sample generateSample(final int length) {
double[] data = new double[length * 2];
//
for (int i = 0; i < length; i++) {
data[x(i)] = (rnd.nextDouble()); //(rnd.nextDouble() * 2.0) - 1.0;
}
//
// select i1
//
final int i1 = rnd.nextInt(10);
final int i2 = rnd.nextInt((length) / 2);
//
data[y(i1)] = 1.0;
data[y(i2)] = 1.0;
//
double[] target = {((data[x(i1)] + data[x(i2)])/2.0)};
return new Sample(data, target, 2, length, 1, 1);
}
public static SampleSet generate(final int n, final int length) {
SampleSet set = new SampleSet();
//
for (int i = 0; i < n; i++) {
set.add(generateSample(length));
}
//
return set;
}
public static Net LSTM(final int in, final int hid, final int out) {
//
LSTMGenerator gen = new LSTMGenerator();
gen.inputLayer(in);
gen.hiddenLayer(
hid, CellType.SIGMOID, CellType.TANH, CellType.TANH, true
);
gen.outputLayer(out, CellType.TANH);
//
return gen.generate();
}
public static void main(String[] args) throws IOException {
//
// config parameter.
//
final int epochs = 200;
//
final double learningrate = 0.001;
final double momentum = 0.9;
//
final int length = 30;
final int trainsize = 5000;
//
// build network. only 1 LSTM block
// is required to learn the problem.
// using 2 or 3 blocks slightly increases
// the recognition accuracy.
//
Net net = LSTM(2, 1, 1);
//
net.rebuffer(length);
net.initializeWeights();
//
// even much longer sequences are tested on the
// trained network later.
//
SampleSet trainset = generate(trainsize, length);
SampleSet testset = generate(1000, length);
SampleSet testset2 = generate(1000, 100);
SampleSet testset3 = generate(1000, 1000);
//
// setup network.
//
final int maxlength = Math.max(trainset.maxSequenceLength(), testset.maxSequenceLength());
net.initializeWeights(rnd);
net.rebuffer(maxlength);
//
// setup trainer.
//
GradientDescent trainer = new GradientDescent();
trainer.setNet(net);
trainer.setRnd(rnd);
trainer.setPermute(true);
trainer.setTrainingSet(trainset);
trainer.setLearningRate(learningrate);
trainer.setMomentum(momentum);
trainer.setEpochs(epochs);
//
TC.reset();
//
// perform training.
//
trainer.train();
//
System.out.println(
"training time: " +
TC.valueMilliDouble() +
" ms."
);
//
// evaluate learning success.
//
final double thres = 0.04;
{
RegressionValidator v = new RegressionValidator(net, thres);
for (Sample s : testset) {
v.apply(s);
}
System.out.println("validation set result (1): " + (v.ratio() * 100) + "%.");
}
{
RegressionValidator v = new RegressionValidator(net, thres);
for (Sample s : testset2) {
v.apply(s);
}
System.out.println("validation set result (2): " + (v.ratio() * 100) + "%.");
}
{
RegressionValidator v = new RegressionValidator(net, thres);
for (Sample s : testset3) {
v.apply(s);
}
System.out.println("validation set result (3): " + v.ratio() * 100 + "%.");
}
}
}
| 6,346 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Vector2f.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/math/Vector2f.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.math;
import de.jannlab.examples.math.Vector3f;
/**
* This class is a vector of two float values x, y.
* It contains some static methods for operating in R2 (2 dimensional space).
* <br></br>
* @author Sebastian Otte
*
*/
public class Vector2f {
public float x = 0.0f;
public float y = 0.0f;
/**
* Create an instance of Vector2f with zero values.
*/
public Vector2f() {
//
}
/**
* Creates in instance of Vector2f by a given Vector3f.
* The x and y value will by divided by the z-value.
* @param v Instance of Vector3f.
*/
public Vector2f(final Vector3f v) {
this(v.x / v.z, v.y / v.z);
}
/**
* Normalizes a vector and stores the result that is a vector
* in the same direction with length of 1 in pvret.
* @param pv Source vector.
* @param pvret Normalized vector.
*/
public static void normalize(Vector2f pv, Vector2f pvret) {
float l = 1.0f / pv.length();
pvret.x = pv.x * l;
pvret.y = pv.y * l;
}
/**
* Normalizes a vector and returns the result that is a vector
* in the same direction with length of 1 as a new vector.
* @param pv The source vector.
* @return A new normalized vector.
*/
public static Vector2f normalize(Vector2f pv) {
float l = 1.0f / pv.length();
return new Vector2f(pv.x * l, pv.y * l);
}
/**
* Creates an instance of Vector2f for a given x and y value.
* <br></br>
* @param px The x value.
* @param py The y value.
*/
public Vector2f(float px, float py) {
this.x = px;
this.y = py;
}
/**
* Creates an instance of Vector2f by a given Vector2f.
* <br></br>
* @param pv An Instance of Vector2f.
*/
public Vector2f(Vector2f pv) {
this.x = pv.x;
this.y = pv.y;
}
/**
* Copy the values of a given Vector2f.
* <br></br>
* @param pv An instance of Vector2f.
*/
public void copy(Vector2f pv) {
this.x = pv.x;
this.y = pv.y;
}
/**
* Creates a copy this instance.
* <br></br>
* @return Copy of this instance.
*/
public Vector2f copy() {
return new Vector2f(this.x, this.y);
}
/**
* Copies the values of a given float array.
* @param pd Values as float[].
*/
public void copy(float[] pd) {
this.x = pd[0];
this.y = pd[1];
}
/**
* Copies the values of a given float array at an specific index.
* @param pd Values as float[].
* @param poffset The values offset.
*/
public void copy(float[] pd, int poffset) {
this.x = pd[poffset];
this.y = pd[poffset + 1];
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return "Vector2f(" + this.x + "," + this.y + ")";
}
/**
* Computes the length of the vector (euclidian).
* @return Vector length.
*/
public float length() {
return (float)Math.sqrt((this.x * this.x) + (this.y * this.y));
}
/**
* Computes the squared length of the vector (euclidian). Can be
* used as a faster metric then length, of only the relative order
* of the vectors is needed.
* @return Vector length without final sqrt.
*/
public float length2() {
return ((this.x * this.x) + (this.y * this.y));
}
/**
* Adds two given Vector2f instances and stores the value in a third instance.
* @param pv1 The first instance.
* @param pv2 The second instance.
* @param pvret The result instance.
*/
public static void add(Vector2f pv1, Vector2f pv2, Vector2f pvret) {
pvret.x = pv1.x + pv2.x;
pvret.y = pv1.y + pv2.y;
}
/**
* Adds two given Vector2f instances and returns a new result instance.
* @param pv1 The first instance.
* @param pv2 The second instance.
* @return Resulting vector of pv1 + pv2.
*/
public static Vector2f add(Vector2f pv1, Vector2f pv2) {
return new Vector2f(pv1.x + pv2.x, pv1.y + pv2.y);
}
/**
* Subtracts two given Vector2f instances and stored the result
* in a third Vector2f instance.
* @param pv1 The first instance.
* @param pv2 The second instance.
* @param pvret The Third instance (result).
*/
public static void sub(Vector2f pv1, Vector2f pv2, Vector2f pvret) {
pvret.x = pv1.x - pv2.x;
pvret.y = pv1.y - pv2.y;
}
/**
* Subtracts two given Vector2f instances and return a
* new instance of Vector2f.
* @param pv1 The first instance.
* @param pv2 The second instance.
* @return Resulting vector of pv1 - pv2.
*/
public static Vector2f sub(Vector2f pv1, Vector2f pv2) {
return new Vector2f(pv1.x - pv2.x, pv1.y - pv2.y);
}
/**
* Multiplies a Vector2f instance with a given scalar value and stores
* the result in a second Vector2f instance.
* @param pv An instance of Vector2f.
* @param pscalar A scalar value.
* @param presult The second instance (result).
*/
public static void mul(Vector2f pv, float pscalar, Vector2f presult) {
presult.x = pv.x * pscalar;
presult.y = pv.y * pscalar;
}
/**
* Returns the scalar product of two vectors.
* @param pv1 Left operand vector.
* @param pv2 Right operand vector.
* @return The scalar product.
*/
public static float scalar(Vector2f pv1, Vector2f pv2) {
return (pv1.x * pv2.x) + (pv1.y * pv2.y);
}
/**
* Returns the cosinus of the angle between two vectors.
* @param pv1 The first vector.
* @param pv2 The second vector.
* @return The angle.
*/
public static float cos(Vector2f pv1, Vector2f pv2) {
final float scalar = scalar(pv1, pv2);
final float length = (pv1.length() * pv2.length());
return scalar / length;
}
/**
* Returns the angle (phi) between two vectors.
* @param pv1 The first vector.
* @param pv2 The second vector.
* @return The angle.
*/
public static float phi(Vector2f pv1, Vector2f pv2) {
return (float)Math.acos(cos(pv1, pv2));
}
/**
* Returns the angle (phi) between two vectors signed (counterclockwise).
* @param pv1 The first vector.
* @param pv2 The second vector.
* @return The angle.
*/
public static float signedPhi(Vector2f pv1, Vector2f pv2) {
//
final double a1 = Math.atan2(pv2.y, pv2.x);
final double a2 = Math.atan2(pv1.y, pv1.x);
//
return (float)(a1 - a2);
}
/**
* Multiplies a given vector with a scalar value. The method changes
* the given vector instance.
* @param pv The vector.
* @param pscalar The scalar value.
* @return The scales vector.
*/
public static Vector2f mul(Vector2f pv, float pscalar) {
return new Vector2f(pv.x * pscalar, pv.y * pscalar);
}
/**
* Rotates a vector and returns the result as new vector.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @return The return vector.
*/
public static Vector2f rotate(Vector2f pv, float pangle) {
Vector2f v = new Vector2f();
rotate(pv, pangle, v);
return v;
}
/**
* Rotates a vector and stores the result into pvret.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @param pvret The return vector.
*/
public static void rotate(Vector2f pv, float pangle, Vector2f pvret) {
float cos = (float)Math.cos(pangle);
float sin = (float)Math.sin(pangle);
//
final float x = pv.x;
final float y = pv.y;
//
pvret.x = (x * cos) + (y * -sin);
pvret.y = (x * sin) + (y * cos);
}
}
| 8,677 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Vector2fTools.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/math/Vector2fTools.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.math;
import java.util.Collection;
/**
* This class provides some static method for statistical
* analysis of Vector2f instances.
* <br></br>
* @author Sebastian Otte
*/
public class Vector2fTools {
/**
* Compute the mean vector of a given Collection of
* vectors.
* <br></br>
* @param points Collection of Vector2f instances.
* @return The mean vector.
*/
public static Vector2f mean(Collection<Vector2f> points) {
//
Vector2f result = new Vector2f();
//
// sum all vectors.
//
for (Vector2f p : points) {
Vector2f.add(result, p, result);
}
//
// divide by the number of vectors.
//
final float size = points.size();
final float ilength = (size > 0.0f)?(1.0f / size):(0.0f);
//
Vector2f.mul(result, ilength, result);
//
return result;
}
/**
* This methods uses the PCA (principle component analysis)
* to compute the angle of the main axis (in relation to the x-axis)
* of a given point cloud.
* <br></br>
* @param points A Collection of Vector2f instances.
* @return The angle of the main axis.
*/
public static float pcaAngle(Collection<Vector2f> points) {
//
final float size = points.size();
if (size < 2) return 0.0f;
//
// determin mean vector.
//
final Vector2f mean = mean(points);
//
double cov00 = 0.0;
double cov01 = 0.0;
//double cov10 = 0.0;
double cov11 = 0.0;
//
// determine the covariance matrix of the point cloud.
//
for (Vector2f p : points) {
final float x = mean.x - p.x;
final float y = mean.y - p.y;
//
cov00 += (x * x);
cov01 += (x * y);
//cov10 += (y * x);
cov11 += (y * y);
}
//
final double in = (size > 0.0f)?(1.0f / size):(0.0f);
//
cov00 *= (in);
cov01 *= (in);
//cov10 *= (in);
cov11 *= (in);
//
// compute angle.
//
final double covd = (cov00 - cov11);
if (covd == 0.0) return 0.0f;
final double theta = 0.5 * Math.atan(
(2.0 * cov01) / covd
);
/*
//
// compute eigenvalues.
//
final double term1 = (cov00 + cov11) * 0.5;
final double term2t = (4.0 * cov01 * cov01) + (covd * covd);
final double term2 = Math.sqrt(term2t) * 0.5;
final double lambda1 = term1 + term2;
final double lambda2 = term1 - term2;
System.out.println(Math.toDegrees(theta));
System.out.println(lambda1);
System.out.println(lambda2);
*/
return (float)theta;
}
/*
public static void main(String[] args) {
List<Vector2f> list = new LinkedList<Vector2f>();
list.add(new Vector2f(1, 0));
list.add(new Vector2f(2, 0));
list.add(new Vector2f(7, 7));
pca(list);
}
*/
}
| 4,109 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Vector3f.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/math/Vector3f.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.math;
/**
* This class is a vector of three float values x, y and z.
* It contains some static methods for operating in R3 (3 dimensional space).
* <br></br>
* @author Sebastian Otte
*
*/
public class Vector3f {
public float x = 0.0f;
public float y = 0.0f;
public float z = 0.0f;
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object pvec) {
if (!(pvec instanceof Vector3f)) return false;
Vector3f vec = (Vector3f)pvec;
return ((this.x == vec.x) && (this.y == vec.y) && (this.z == vec.z));
}
/**
* Copies the values of a given Vector3f.
* @param pv A vector for copy.
*/
public void copy(Vector3f pv) {
this.x = pv.x;
this.y = pv.y;
this.z = pv.z;
}
/**
* Constructs Vector3f by three float parameter.
* @param px The x value.
* @param py The y value.
* @param pz The z value.
*/
public Vector3f(float px, float py, float pz) {
this.x = px;
this.y = py;
this.z = pz;
}
/**
* Constructs Vector3f by an other Vector3f.
* @param pv Reference of other Vector3f.
*/
public Vector3f(Vector3f pv) {
this.x = pv.x;
this.y = pv.y;
this.z = pv.z;
}
/**
* Constructs an 0 initialized Vector3f.
*/
public Vector3f() {
//
}
/**
* Return the length of the vector in R3.
* @return Vector length.
*/
public float length() {
return (float)Math.sqrt((this.x*this.x) +
(this.y*this.y) +
(this.z*this.z));
}
/**
* Return the length of the vector in R3 without sqrt.
* @return Vector length.
*/
public float length2() {
return (this.x*this.x) +
(this.y*this.y) +
(this.z*this.z);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return "Vector3f(" + this.x + "," + this.y + "," + this.z + ")";
}
// ----------------------------------------------------------------
// static methods
// ----------------------------------------------------------------
/**
* Adds two vectors and stores the result into third vector.
* @param pv1 The left operand vector.
* @param pv2 The right operand vector.
* @param pvret The result vector.
*/
public static void add(Vector3f pv1, Vector3f pv2, Vector3f pvret) {
pvret.x = pv1.x + pv2.x;
pvret.y = pv1.y + pv2.y;
pvret.z = pv1.z + pv2.z;
}
/**
* Adds two vectors and returns a new vector within the result.
* @param pv1 The left operand vector.
* @param pv2 The right operand vector.
* @return A new vector within the result of the operation.
*/
public static Vector3f add(Vector3f pv1, Vector3f pv2) {
return new Vector3f(pv1.x + pv2.x, pv1.y + pv2.y, pv1.z + pv2.z);
}
/**
* Subtracts two vectors and stores the result into third vector.
* @param pv1 The left operand vector.
* @param pv2 The right operand vector.
* @param pvret The result vector.
*/
public static void sub(Vector3f pv1, Vector3f pv2, Vector3f pvret) {
pvret.x = pv1.x - pv2.x;
pvret.y = pv1.y - pv2.y;
pvret.z = pv1.z - pv2.z;
}
/**
* Subtracts two vectors and returns a new vector within the result.
* @param pv1 The left operand vector.
* @param pv2 The right operand vector.
* @return A new vector within the result of the operation.
*/
public static Vector3f sub(Vector3f pv1, Vector3f pv2) {
return new Vector3f(pv1.x - pv2.x, pv1.y - pv2.y, pv1.z - pv2.z);
}
/**
* Multiplies a vector with a scalar and stores the result in second vector.
* @param pv The left operand vector.
* @param psc The right operand scalar.
* @param pvret The result vector.
*/
public static void mul(Vector3f pv, float psc, Vector3f pvret) {
pvret.x = pv.x * psc;
pvret.y = pv.y * psc;
pvret.z = pv.z * psc;
}
/**
* Multiplies a vector with a scalar and returns the result as new vector.
* @param pv The left operand vector.
* @param psc The right operand scalar.
* @return A new vector within the result of the operation.
*/
public static Vector3f mul(Vector3f pv, float psc) {
return new Vector3f(pv.x * psc, pv.y * psc, pv.z * psc);
}
/**
* Divides a vector by a scalar and stores the result in second vector.
* @param pv The left operand vector.
* @param psc The right operand scalar.
* @param pvret The result vector.
*/
public static void div(Vector3f pv, float psc, Vector3f pvret) {
float iv = 1 / psc;
pvret.x = pv.x * iv;
pvret.y = pv.y * iv;
pvret.z = pv.z * iv;
}
/**
* Divides a vector by a scalar and returns the result as new vector.
* @param pv The left operand vector.
* @param psc The right operand scalar.
* @return A new vector within the result of the operation.
*/
public static Vector3f div(Vector3f pv, float psc) {
float iv = 1 / psc;
return new Vector3f(pv.x * iv, pv.y * iv, pv.z * iv);
}
/**
* Normalizes a vector and stores the result that is a vector
* in the same direction with length of 1 in pvret.
* @param pv Source vector.
* @param pvret Normalized vector.
*/
public static void normalize(Vector3f pv, Vector3f pvret) {
float l = 1 / pv.length();
pvret.x = pv.x * l;
pvret.y = pv.y * l;
pvret.z = pv.z * l;
}
/**
* Normalizes a vector and returns the result that is a vector
* in the same direction with length of 1 as a new vector.
* @param pv The source vector.
* @return A new normalized vector.
*/
public static Vector3f normalize(Vector3f pv) {
float l = 1 / pv.length();
return new Vector3f(pv.x * l, pv.y * l, pv.z * l);
}
/**
* Inverts a given vector and stores the result into pvret.
* @param pv The source vector.
* @param pvret Inverted vector.
*/
public static void invert(Vector3f pv, Vector3f pvret) {
pvret.x = -pv.x;
pvret.y = -pv.y;
pvret.z = -pv.z;
}
/**
* Inverts a given vector and returns a new inverted vector.
* @param pv The source vector.
* @return A new inverted vector.
*/
public static Vector3f invert(Vector3f pv) {
return new Vector3f(-pv.x, -pv.y, -pv.z);
}
/**
* Returns the scalar product of two vectors.
* @param pv1 Left operand vector.
* @param pv2 Right operand vector.
* @return The scalar product.
*/
public static float scalar(Vector3f pv1, Vector3f pv2) {
return (pv1.x * pv2.x) + (pv1.y * pv2.y) + (pv1.z * pv2.z);
}
/**
* Builds the cross product of two vectors an stores the result
* into pvret.
* @param pv1 Left operand vector.
* @param pv2 Right operand vector.
* @param pvret The result vector.
*/
public static void cross(Vector3f pv1, Vector3f pv2, Vector3f pvret) {
pvret.x = (pv1.y * pv2.z) - (pv1.z * pv2.y);
pvret.y = (pv1.z * pv2.x) - (pv1.x * pv2.z);
pvret.z = (pv1.x * pv2.y) - (pv1.y * pv2.x);
}
/**
* Returns the cross product of two vectors.
* @param pv1 Left operand vector.
* @param pv2 Right operand vector.
* @return A new vector within the result of the operation.
*/
public static Vector3f cross(Vector3f pv1, Vector3f pv2) {
return new Vector3f((pv1.y * pv2.z) - (pv1.z * pv2.y),
(pv1.z * pv2.x) - (pv1.x * pv2.z),
(pv1.x * pv2.y) - (pv1.y * pv2.x));
}
/**
* Returns the angle (phi) between two vectors.
* @param pv1 The first vector.
* @param pv2 The second vector.
* @return The angle.
*/
public static float phi(Vector3f pv1, Vector3f pv2) {
return (float)Math.acos(scalar(pv1, pv2) /
(pv1.length() * pv2.length()));
}
/**
* Rotates a vector around the x-axis and stores the result into pvret.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @param pvret The return vector.
*/
public static void rotateX(Vector3f pv, float pangle, Vector3f pvret) {
float cosa = (float)Math.cos(pangle);
float sina = (float)Math.sin(pangle);
pvret.x = pv.x;
pvret.y = (pv.y * cosa) + (pv.z * -sina);
pvret.z = (pv.y * sina) + (pv.z * cosa);
}
/**
* Rotates a vector around the x-axis and returns the result as new vector.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @return The return vector.
*/
public static Vector3f rotateX(Vector3f pv, float pangle) {
Vector3f v = new Vector3f();
rotateX(pv, pangle, v);
return v;
}
/**
* Rotates a vector around the y-axis and stores the result into pvret.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @param pvret The return vector.
*/
public static void rotateY(Vector3f pv, float pangle, Vector3f pvret) {
float cosa = (float)Math.cos(pangle);
float sina = (float)Math.sin(pangle);
pvret.x = (pv.x * cosa) + (pv.z * sina);
pvret.y = pv.y;
pvret.z = (pv.x * -sina) + (pv.z * cosa);
}
/**
* Rotates a vector around the y-axis and returns the result as new vector.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @return The return vector.
*/
public static Vector3f rotateY(Vector3f pv, float pangle) {
Vector3f v = new Vector3f();
rotateY(pv, pangle, v);
return v;
}
/**
* Rotates a vector around the z-axis and stores the result into pvret.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @param pvret The return vector.
*/
public static void rotateZ(Vector3f pv, float pangle, Vector3f pvret) {
//
float cosa = (float)Math.cos(pangle);
float sina = (float)Math.sin(pangle);
pvret.x = (pv.x * cosa) + (pv.y * -sina);
pvret.y = (pv.x * sina) + (pv.y * cosa);
pvret.z = pv.z;
}
/**
* Rotates a vector around the z-axis and returns the result as new vector.
* @param pv The vector which is to rotate.
* @param pangle The angle in rad of the rotation.
* @return The return vector.
*/
public static Vector3f rotateZ(Vector3f pv, float pangle) {
//
Vector3f v = new Vector3f();
rotateZ(pv, pangle, v);
return v;
}
/**
* The methods builds the normalized plane normal of the input vectors and stores
* the result into pvret. Note that this method uses the right hand rule.
* <br></br>
* @param pvec1 The first vector "a".
* @param pvec2 The second vector "b".
* @param pvec3 The third vector "c".
*/
public static void normal(Vector3f pvec1, Vector3f pvec2, Vector3f pvec3, Vector3f pvret) {
//
// Build plane with two vectors with source point pvec1.
//
Vector3f ab = Vector3f.sub(pvec2, pvec1);
Vector3f ac = Vector3f.sub(pvec3, pvec1);
//
// Build the cross product to get the plane normal, and normalize the vector.
//
Vector3f.cross(ab, ac, pvret);
Vector3f.normalize(pvret, pvret);
}
/**
* The methods builds the normalized plane normal of the input vectors and returns
* the result as a new vector. Note that this method uses the right hand rule.
* <br></br>
* @param pvec1 The first vector "a".
* @param pvec2 The second vector "b".
* @param pvec3 The third vector "c".
*/
public static Vector3f normal(Vector3f pvec1, Vector3f pvec2, Vector3f pvec3) {
//
Vector3f ret = new Vector3f();
normal(pvec1, pvec2, pvec3, ret);
return ret;
}
}
| 13,301 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
XORExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/feedforward/XORExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.feedforward;
import java.io.IOException;
import java.util.Random;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
import de.jannlab.generator.MLPGenerator;
import de.jannlab.training.GradientDescent;
import de.jannlab.misc.DoubleTools;
/**
* A simple example of learning XOR with a small multilayer perceptron.
* Using sigmoidal function of range [-2, 2] for activation provides the
* fastest convergence speed in training. Tanh works also nice.
* Note, that when standard sigmoid is used for activation, a bias neuron
* has to be added.
* <br></br>
* @author Sebastian Otte, Dirk Krechel
*/
public class XORExample {
public static double[] array(double ...x) { return x; }
public static void main(String[] args) throws IOException, ClassNotFoundException {
//
Random rnd = new Random(0L);
//
// generator simple xor trainset.
//
SampleSet set = new SampleSet();
set.add(new Sample(array(0, 0), array(0)));
set.add(new Sample(array(0, 1), array(1)));
set.add(new Sample(array(1, 0), array(1)));
set.add(new Sample(array(1, 1), array(0)));
//
// generate simple mlp.
//
MLPGenerator gen = new MLPGenerator();
gen.inputLayer(set.get(0).getInputSize());
gen.hiddenLayer(2, CellType.SIGMOID2);
gen.outputLayer(1, CellType.SIGMOID2);
Net mlp = gen.generate();
mlp.initializeWeights(rnd);
//
// setup trainer.
//
GradientDescent trainer = new GradientDescent();
trainer.setTrainingSet(set);
trainer.setNet(mlp);
trainer.setRnd(rnd);
trainer.setEpochs(500);
trainer.setTargetError(0);
trainer.setLearningRate(0.1);
trainer.setMomentum(0.5);
//
// perform training and print final error.
//
trainer.train();
//
// print computation results.
//
double[] out = new double[1];
for (Sample s : set) {
mlp.reset();
s.mapInput(mlp.inputPort());
mlp.compute();
mlp.output(out, 0);
System.out.println(
DoubleTools.asString(s.getInput(), 1) +
" ==> " +
DoubleTools.asString(out, 1)
);
}
}
}
| 3,353 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
GeometricExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/feedforward/GeometricExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.feedforward;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.util.Random;
import javax.swing.JFrame;
import javax.swing.JPanel;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
import de.jannlab.examples.math.Vector2f;
import de.jannlab.generator.MLPGenerator;
import de.jannlab.training.GradientDescent;
import de.jannlab.training.NetTrainer;
import de.jannlab.training.NetTrainerListener;
import de.jannlab.tools.ClassificationValidator;
/**
* This examples demonstrates the generalization ability of neural
* networks on learning geometric figures (here classification of circles).
* Through a visualization during the training, the learning process
* (convergence) can be observed. The visualization also illustrates out well
* the effect of an insufficient number of hidden neurons .
* <br></br>
* The samples are points positioned on the uni-square and each point
* corresponds to one of two classes. The first class is defined by three
* circles (white drawn points) and second class is given by all remaining
* samples (gray drawn points). These classes are obviously not linear
* separable.
* The more colored the background at a position (x,y) is, the more the MLP
* "thinks" this point would correspond to the first class.
*
* @author Sebastian Otte, Dirk Krechel
*/
public class GeometricExample {
public static final Vector2f c1 = new Vector2f(0.4f, 0.3f);
public static final double r1 = 0.2;
public static final Vector2f c2 = new Vector2f(0.7f, 0.6f);
public static final double r2 = 0.12;
public static final Vector2f c3 = new Vector2f(0.3f, 0.75f);
public static final double r3 = 0.10;
public static final Color CLASS_1 = new Color(255, 255, 255);
public static final Color CLASS_2 = new Color(130, 130, 130);
public static SampleSet generateData(
final int samples, final float rel, final Random rnd
) {
//
final SampleSet result = new SampleSet();
//
int size1 = (int)(samples * rel);
int size2 = samples - size1;
//
int size = size1 + size2;
while (size > 0) {
Vector2f v = new Vector2f(rnd.nextFloat(), rnd.nextFloat());
Vector2f l1 = Vector2f.sub(c1, v);
Vector2f l2 = Vector2f.sub(c2, v);
Vector2f l3 = Vector2f.sub(c3, v);
if (l1.length() > r1 && l2.length() > r2 && l3.length() > r3) {
//
// outer point
//
if (size2 > 0) {
result.add(new Sample(
new double[]{v.x, v.y}, new double[]{0.0})
);
size2--;
}
} else {
//
// inner point
//
if (size1 > 0) {
result.add(
new Sample(new double[]{v.x, v.y}, new double[]{1.0})
);
size1--;
}
}
size--;
}
return result;
}
public static void main(String[] args) {
//
final Random rnd = new Random(System.currentTimeMillis());
//
// network parameter.
//
final int input = 2;
final int hidden = 8;
//
final int trainset_size = 5000;
final int testset_size = 100;
//
// training parameter
//
final int epochs = 4000;
final double learningrate = 0.004;
final double momentum = 0.9;
//
// Generate a multilayer perceptron using the
// MLPGenerator class.
//
final MLPGenerator gen = new MLPGenerator();
//
gen.inputLayer(input);
gen.hiddenLayer(hidden, CellType.SIGMOID, true, -1.0);
gen.outputLayer(1, CellType.SIGMOID);
//
final Net mlp = gen.generate();
mlp.initializeWeights(rnd);
//
// Generate samples sets.
//
final SampleSet trainset = generateData(trainset_size + testset_size, 0.5f, rnd);
final SampleSet testset = trainset.split(testset_size, rnd);
//
// Setup back-propagation trainer.
//
GradientDescent trainer = new GradientDescent();
trainer.setEpochs(epochs);
trainer.setLearningRate(learningrate);
trainer.setMomentum(momentum);
trainer.setPermute(true);
trainer.setRnd(rnd);
trainer.setTargetError(10E-5);
trainer.setNet(mlp);
trainer.setTrainingSet(trainset);
trainer.addListener(new NetTrainerListener() {
@Override
public void started(NetTrainer trainer) {
}
@Override
public void finished(NetTrainer trainer) {
}
@Override
public void epoch(NetTrainer trainer) {
//
final int ep = trainer.getEpoch() + 1;
//
if ((ep) % (epochs / 16) != 0 && ep != 1) return;
final BufferedImage img = new BufferedImage(800, 800, BufferedImage.TYPE_INT_RGB);
double[] p = new double[2];
double[] o = new double[2];
for (int y = 0; y < img.getHeight(); y++) {
for (int x = 0; x < img.getWidth(); x++) {
//
p[0] = ((double)x) / ((double)(img.getWidth()));
p[1] = ((double)y) / ((double)(img.getHeight()));
//
mlp.reset();
mlp.input(p, 0);
mlp.compute();
mlp.output(o, 0);
//
int v = ((int)(o[0] * 255));
img.setRGB(x, y, ((v>>1) << 8) | v);
}
}
final Graphics2D g = (Graphics2D)img.getGraphics();
g.setRenderingHint(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON
);
for (Sample s : trainset) {
int x = (int)((double)(img.getWidth() - 1) *
s.getInput()[0]);
int y = (int)((double)(img.getHeight() - 1) *
s.getInput()[1]);
if (s.getTarget()[0] > 0.5) {
g.setColor(CLASS_1);
} else {
g.setColor(CLASS_2);
}
g.fillOval(x-2, y-2, 5, 5);
}
JFrame frame = new JFrame("Geometry Learning - Epoch: " + ep);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
//
JPanel panel = new JPanel() {
private static final long serialVersionUID = -4307908552010057652L;
@Override
protected void paintComponent(Graphics gfx) {
super.paintComponent(gfx);
gfx.drawImage(
img, 0, 0,
img.getWidth(), img.getHeight(), null
);
}
};
panel.setPreferredSize(new Dimension(img.getWidth(), img.getHeight()));
frame.add(panel);
frame.setResizable(false);
frame.pack();
frame.setVisible(true);
}
});
//
// perform training.
//
trainer.train();
//
// We use the ClassificationValidator to measure the recognition
// accuracy on the training set as well as on the test set.
//
System.out.println();
ClassificationValidator val = new ClassificationValidator(mlp);
//
for (Sample s : trainset) {
val.apply(s);
}
System.out.println(
"recognition accuracy on trainset: " +
(val.ratio() * 100.0) + "%"
);
//
val.reset();
//
for (Sample s : testset) {
val.apply(s);
}
System.out.println(
"recognition accuracy on testset: " +
(val.ratio() * 100.0) + "%"
);
}
}
| 9,679 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
StaticParityExample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/examples/de/jannlab/examples/feedforward/StaticParityExample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.examples.feedforward;
import java.util.Collections;
import java.util.Random;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
import de.jannlab.generator.MLPGenerator;
import de.jannlab.training.GradientDescent;
import de.jannlab.tools.ClassificationValidator;
/**
* This examples shows how to learn the parity problem with
* multilayer perceptrons (MLPs) using gradient descent with momentum term.
* This problem differs from the sequential parity problem, a well known toy
* problem for recurrent networks, in such a way, that samples are not
* sequences with variable lengths, but simple equal-sized patterns, which can
* be learn by MLPs. Therefore we refer here as static parity problem.
* <br></br><br></br>
* For a given number of bits n, 2^n samples containing every possible bit
* combination are generated. Initially the samples are shuffled. After that, a
* small subset of the samples are drawn as test set, while the remaining
* samples are used as train set.
* <br></br><br></br>
* This example also provides to observe the effect of over-fitting when using
* too large hidden layers.
* <br></br>
* @author Sebastian Otte, Dirk Krechel
*/
public class StaticParityExample {
public static final double ON = 1.0;
public static final double OFF = 0.0;
public static final int CLASS_EVEN = 0;
public static final int CLASS_ODD = 1;
public static double bitAsDouble(final boolean bit) {
return ((bit)?(ON):(OFF));
}
public static SampleSet generateData(final int bits, final Random rnd) {
//
final SampleSet result = new SampleSet();
//
final double[] input = new double[bits];
final double[] output = new double[2];
//
final int samples = 1 << bits;
//
for (int i = 0; i < samples; i++) {
//
// add 1 bit to bit-string.
//
for (int j = 0; j < bits; j++) {
if (input[j] == OFF) {
//
// 0**** -> 1****
//
input[j] = ON;
break;
} else {
//
// *1**** -> *0****
//
input[j] = OFF;
}
}
//
// generate parity sample
//
boolean parity = true;
//
for (int j = 0; j < bits; j++) {
if (input[j] == ON) {
parity = !parity;
}
}
//
if (parity) {
output[CLASS_EVEN] = ON;
output[CLASS_ODD] = OFF;
} else {
output[CLASS_EVEN] = OFF;
output[CLASS_ODD] = ON;
}
//
result.add(new Sample(input.clone(), output.clone()));
}
//
Collections.shuffle(result, rnd);
//
return result;
}
public static void main(String[] args) {
//
final Random rnd = new Random(System.currentTimeMillis());
//
// network parameter:
//
// o A good generalization can be achieved using a 8,4,4,2 MLP. In many
// cases (depending on the random initialization) the testset accuracy
// reaches 100% and is thereby often better than the trainset accurary.
//
// o To force over-fitting, try a 8,32,2 MLP. In some cases the MLP
// learns the samples and not the problem, which results in a clearly
// worser testset accuracy.
//
final int input = 8;
final int hidden1 = 4;
final int hidden2 = 4;
//
// Usually, a significant smaller test set is drawn from the
// samples to prove the generalization result.
//
final int testset_size = 96;
//
// training parameter
//
final int epochs = 5000;
final double learningrate = 0.001;
final double momentum = 0.9;
//
// Generate a multilayer perceptron using the
// MLPGenerator class.
//
// o While tuning the network parameters, we found out
// that an additional bias of 1.0 for hidden and output
// layer works good on learning the problem.
//
// o Note, that its absolutely
// necessary to randomly inialize the weights of
// the MLP for symmetry-breaking.
//
//
final MLPGenerator gen = new MLPGenerator();
//
gen.inputLayer(input);
gen.hiddenLayer(hidden1, CellType.TANH, true, 1.0);
gen.hiddenLayer(hidden2, CellType.TANH, true, 1.0);
gen.outputLayer(2, CellType.TANH, true, 1.0);
//
final Net mlp = gen.generate();
mlp.initializeWeights(rnd);
//
// Generate samples sets.
//
final SampleSet trainset = generateData(input, rnd);
final SampleSet testset = trainset.split(testset_size, rnd);
//
// Setup back-propagation trainer.
//
GradientDescent trainer = new GradientDescent();
trainer.setEpochs(epochs);
trainer.setLearningRate(learningrate);
trainer.setMomentum(momentum);
trainer.setPermute(true);
trainer.setRnd(rnd);
trainer.setTargetError(10E-5);
trainer.setNet(mlp);
trainer.setTrainingSet(trainset);
//
// perform training.
//
trainer.train();
//
// We use the ClassificationValidator to measure the recognition
// accuracy on the training set as well as on the test set.
//
System.out.println();
ClassificationValidator val = new ClassificationValidator(mlp);
//
for (Sample s : trainset) {
val.apply(s);
}
System.out.println(
"recognition accuracy on trainset: " +
(val.ratio() * 100.0) + "%"
);
//
val.reset();
//
for (Sample s : testset) {
val.apply(s);
}
System.out.println(
"recognition accuracy on testset: " +
(val.ratio() * 100.0) + "%"
);
}
}
| 7,366 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Net.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/Net.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab;
import java.io.Serializable;
import java.util.Random;
import de.jannlab.core.NetStructure;
import de.jannlab.data.ReadPort;
import de.jannlab.data.WritePort;
/**
* This interfaces represents neural networks in this framework. All
* ANN implementations are subclass of Net. The provided methods are
* for reading and writing data from and into the network, for computing
* and moving through the time (for recurrent networks).
* <br></br>
* @author Sebastian Otte
*/
public interface Net extends Serializable {
//
/**
* Resets the network, which means that all inner
* states (activations, derivations) are set to 0.0.
* The method also resets the time index which is 0
* after doing reset.
*/
public void reset();
/**
* Initialize weights randomly.
*/
public void initializeWeights();
/**
* Initialize weights randomly with a given Random
* instance.
* <br></br>
* @param rnd Instance of Random.
*/
public void initializeWeights(Random rnd);
/**
* Copies data to the input layer of the network. This method
* depends on the the current time step.
* <br></br>
* @param data Source data buffer
* @param offset Offset in the source data buffer.
*/
public void input(final double[] data, final int offset);
/**
* Copies data to the input layer of the network only for a given selection.
* This method depends on the the current time step.
* <br></br>
* @param data Source data buffer
* @param offset Offset in the source data buffer.
* @param selection A selection given as array of indices.
*/
public void input(final double[] data, final int offset, final int[] selection);
/**
* Copies data from the outputlayer of the network into a given data buffer.
* This method depends on the the current time step.
* @param data Destination data buffer.
* @param offset Offset in the destination data buffer.
*/
public void output(final double[] data, final int offset);
/**
* Copies data from the outputlayer of the network into a given data buffer
* only for a given selection. This method depends on the the current time step.
* @param data Destination data buffer.
* @param offset Offset in the destination data buffer.
* @param selection A selection given as array of indices.
*/
public void output(final double[] data, final int offset, final int[] selection);
/**
* Copies data to the output layer of the network (the target output). This method
* depends on the the current time step.
* <br></br>
* @param data Source data buffer
* @param offset Offset in the source data buffer.
*/
public void target(final double[] data, final int offset);
/**
* Copies data to the output layer of the network (the target output) for a given
* selection. This method depends on the the current time step.
* <br></br>
* @param data Source data buffer.
* @param offset Offset in the source data buffer.
* @param selection A selection given as array of indices.
*/
public void target(final double[] data, final int offset, final int[] selection);
//
/**
* Computes the error of the outputlayer, based on the current outputlayer
* activation and the injected target output.
* This method depends on the the current time step.
* <br></br>
* @return The current output array.
*/
public double error();
/**
* Returns a WritePort for writing into the inputlayer.
* <br></br>
* @return Instance of WritePort.
*/
public WritePort inputPort();
/**
* Returns a ReadPort for reading data from the outputlayer.
* @return Instance of ReadPort.
*/
public ReadPort outputPort();
/**
* Returns a WritePort for writing data into the outputlayer (the target output).
* @return Instance of WritePort.
*/
public WritePort targetPort();
/**
* Copies the entire network instance. This performs a deep copy.
* @return Deep copy of this instance.
*/
public Net copy();
/**
* Performs a shared copy of the network instance. Some data part such a links, weights
* etc. are shared. The rest (data buffers) are private. The shared copy provides
* parallel computation with the "same" network.
* @return Shared copy of this instance.
*/
public Net sharedCopy();
/**
* Computes the activation of the network. The method strongly depends on the underlying
* network implementation (online vs. offline).
*/
public void compute();
/**
* Computes the gradient (error flow) of the network. The method strongly depends on
* the underlying network implementation (online vs. offline).
*/
public void computeGradient();
/**
* Increases the current frame index (time index).
*/
public void incrFrameIdx();
/**
* Sets the current frame index (time index) to the given value.
* <br></br>
* @param idx The new time index.
*/
public void setFrameIdx(final int idx);
/**
* Get the frame width of the network.
* @return Frame width of this instance.
*/
public int getFrameWidth();
/**
* Return the current frame index (time index).
* @return Current frame index (time index).
*/
public int getFrameIdx();
/**
* Decreases the current frame index (time index).
*/
public void decrFrameIdx();
/**
* Gives the internal output buffer for a given frame index (time index).
* <br></br>
* @param frameidx The requested frame idx (time index).
* @return Output data buffer for frame idx (time index).
*/
public double[] getOutputBuffer(final int frameidx);
/**
* Gives the internal gradient output buffer for a given frame index (time index).
* <br></br>
* @param frameidx The requested frame idx (time index).
* @return Gradient output data buffer for frame idx (time index).
*/
public double[] getGradOutputBuffer(final int frameidx);
/**
* Gives the internal gradient input buffer for a given frame index (time index).
* <br></br>
* @param frameidx The requested frame idx (time index).
* @return Gradient input buffer for frame idx (time index).
*/
public double[] getGradInputBuffer(final int frameidx);
/**
* Returns true if the network has recurrent connections, false otherwise.
*/
public boolean isRecurrent();
/**
* Returns true if the network computes online, false otherwise.
*/
public boolean isOnline();
/**
* Return true is the network is bidirectional, false otherwise.
*/
public boolean isBidirectional();
/**
* Returns true if the network computes offline, false otherwise.
*/
public boolean isOffline();
/**
* Returns the number of input cells.
*/
public int getInputCells();
/**
* Returns the number of output cells.
*/
public int getOutputCells();
/**
* Returns the number of values cells.
*/
public int getValueCells();
/**
* Returns the number of computing cells.
*/
public int getComputingCells();
/**
* Returns the current weight vector. Note that the first
* weight of the vector is still 1.0.
*/
public double[] getWeights();
/**
* Returns the number of weights. Note that the first
* weight of the vector is still 1.0. The weight is not counted
* by this method.
*/
public int getWeightsNum();
/**
* Writes the given values into the internal weight buffer.
* @param data Reference to the source double array.
* @param offset Gives an offset of the weights within the data array.
*/
public void writeWeights(final double[] data, final int offset);
/**
* Copies the weights from the internal weight buffer into the given array.
* @param data Reference to the target double array.
* @param offset Gives an offset for the target array.
*/
public void readWeights(final double[] data, final int offset);
/**
* Returns the forward links. The links are optimized for
* the forward pass.
*/
public int[] getLinks();
/**
* Returns the backward links. The links are optimized for the
* backward pass.
*/
public int[] getLinksRev();
/**
* Returns the number of links, which can be bigger than the number
* of weights.
*/
public int getLinksNum();
/**
* Rebuffers (reallocates) the internal data buffers.
* @param frames The number of time steps provided by the network.
*/
public void rebuffer(final int frames);
/**
* Performs a numerical check for debugging the framework. Return false
* if at least one value is NaN or Infinity.
*/
public boolean numericalCheck();
/**
* Returns the NetStructure instance of the current network.
* <br></br>
* Warning: The variables of the instance are all public accessible.
* Changing them may affect the inner consistency of the network.
*/
public NetStructure getStructure();
}
| 10,247 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
ReadPort.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/data/ReadPort.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.data;
import java.io.Serializable;
/**
* A read port provides a defined interface for access data for reading.
* <br></br>
* @author Sebastian Otte
*/
public interface ReadPort extends Serializable {
/**
* Reads data from the data source and stores the result in buffer.
* <br></br>
* @param buffer Target data buffer.
* @param offset Offset in the buffer.
*/
public void read(final double[] buffer, final int offset);
/**
* Reads data from the data source and stores the result in buffer
* but only for a special selection.
* <br></br>
* @param buffer Target data buffer.
* @param offset Offset in the buffer.
* @param selection Selection as array of indices.
*/
public void read(
final double[] buffer, final int offset, final int[] selection
);
}
| 1,767 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
WritePort.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/data/WritePort.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.data;
import java.io.Serializable;
/**
* A write port provides a defined interface for access data for writing.
* <br></br>
* @author Sebastian Otte
*/
public interface WritePort extends Serializable{
/**
* Write the data given by buffer into the WritePort.
* <br></br>
* @param buffer Source data buffer.
* @param offset Source data offset.
*/
public void write(final double[] buffer, final int offset);
/**
* Write the data given by buffer into the WritePort only for a given selection.
* <br></br>
* @param buffer Source data buffer.
* @param offset Source data offset.
* @param selection A selection as an array of indices.
*/
public void write(final double[] buffer, final int offset, final int[] selection);
}
| 1,714 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Sample.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/data/Sample.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.data;
import java.io.Serializable;
import de.jannlab.misc.DoubleTools;
/**
* This class represents samples. A sample has
* an input and a target sequence. These sequences
* consist of vectors. All vectors of the sequences
* are mapped on a flat buffer.
* <br></br>
* @author Sebastian Otte
*/
public final class Sample implements Serializable {
private static final long serialVersionUID = 4129994911525616651L;
/**
* The specific sample tag.
*/
private String tag;
/**
* The size of the input vectors.
*/
private int inputsize;
/**
* The number of input vectors.
*/
private int inputlength;
/**
* The size of the target vectors.
*/
private int targetsize;
/**
* The number of target vectors.
*/
private int targetlength;
/**
* The input data buffer.
*/
private double[] input;
/**
* The output data buffer.
*/
private double[] target;
/**
* Map the input into a write port.
* @param input Destination write port.
*/
final public void mapInput(
final WritePort input
) {
input.write(this.input, 0);
}
/**
* Maps the input into a write port only for a given selection.
* @param input Destination write port.
*/
final public void mapInput(
final WritePort input,
final int[] selection
) {
input.write(this.input, 0, selection);
}
/**
* Maps the target into a write port.
* <br><br>
* @param target Destination write port.
*/
final public void mapTarget(
final WritePort target
) {
target.write(this.target, 0);
}
/**
* Maps the target into a write port only for a given selection.
* @param target Destination write port.
* @param selection Sequence index.
*/
final public void mapTarget(
final WritePort target,
final int[] selection
) {
target.write(this.target, 0, selection);
}
/**
* Maps input and target into two write ports.
* <br></br>
* @param input Input destination write port.
* @param target Target destination write port.
*/
final public void map(
final WritePort input,
final WritePort target
) {
input.write(this.input, 0);
target.write(this.target, 0);
}
/**
* Maps the input into a write port for a specific seq. index.
* <br></br>
* @param input Destination write port.
* @param seqidx Sequence index.
*/
final public void mapInput(
final WritePort input, final int seqidx
) {
input.write(this.input, this.inputsize * seqidx);
}
/**
* Maps the target into a write port only for a given selection
* and a specific seq. index.
* <br></br>
* @param target Target write port.
* @param seqidx Sequence index.
*/
final public void mapTarget(
final WritePort target, final int seqidx
) {
target.write(this.target, this.targetsize * seqidx);
}
/**
* Maps the input into a write port only for a given selection
* and a specific seq. index.
* <br></br>
* @param input Destination write port.
* @param seqidx Sequence index.
* @param selection Selection as array of indices.
*/
final public void mapInput(
final WritePort input, final int seqidx, final int[] selection
) {
input.write(this.input, this.inputsize * seqidx, selection);
}
/**
* Create an instance of Sample.
*/
public Sample(
final int inputsize,
final int inputlength,
final int targetsize,
final int targetlength
) {
this(
null,
inputsize,
inputlength,
targetsize,
targetlength
);
//
}
/**
* Create an instance of Sample.
*/
public Sample(
final String tag,
final int inputsize,
final int inputlength,
final int targetsize,
final int targetlength
) {
this(
new double[inputsize * inputlength],
new double[targetsize * targetlength],
inputsize,
inputlength,
targetsize,
targetlength
);
//
}
/**
* Create an instance of Sample.
*/
public Sample(
final double[] input,
final double[] target
) {
this(
null,
input,
target
);
}
/**
* Create an instance of Sample.
*/
public Sample(
final String tag,
final double[] input,
final double[] target
) {
this(
tag,
input,
target,
input.length,
1,
target.length,
1
);
}
/**
* Create an instance of Sample.
*/
public Sample(
final double[] input,
final double[] target,
final int inputsize,
final int targetsize
) {
this(
null,
input, target,
inputsize,
targetsize
);
}
/**
* Create an instance of Sample.
*/
public Sample(
final String tag,
final double[] input,
final double[] target,
final int inputsize,
final int targetsize
) {
this(
tag,
input, target,
inputsize, input.length / inputsize,
targetsize, target.length / targetsize
);
}
/**
* Create an instance of Sample.
*/
public Sample(
final double[] input,
final double[] target,
final int inputsize,
final int inputlength,
final int targetsize,
final int targetlength
) {
this(
null, input, target,
inputsize, inputlength,
targetsize, targetlength
);
}
/**
* Create an instance of Sample.
*/
public Sample(
final String tag,
final double[] input,
final double[] target,
final int inputsize,
final int inputlength,
final int targetsize,
final int targetlength
) {
this.tag = tag;
this.input = input;
this.target = target;
//
this.inputsize = inputsize;
this.inputlength = inputlength;
this.targetsize = targetsize;
this.targetlength = targetlength;
}
/**
* Returns the tag of the sample.
*/
final public String getTag() { return this.tag; }
/**
* Returns the input size.
*/
final public int getInputSize() { return this.inputsize; }
/**
* Returns the target size.
*/
final public int getTargetSize() { return this.targetsize; }
/**
* Returns the input length.
*/
final public int getInputLength() { return this.inputlength; }
/**
* Returns the target length.
*/
final public int getTargetLength() { return this.targetlength; }
/**
* Returns the input buffer.
*/
final public double[] getInput() { return this.input; }
/**
* Returns the target buffer.
*/
final public double[] getTarget() { return this.target; }
/**
* Maps the target into a write port only for a given selection and
* specific seq. index.
* <br></br>
* @param target Destination write port.
* @param seqidx Sequence index.
* @param selection Selection as array of indices.
*/
final public void mapTarget(
final WritePort target, final int seqidx, final int[] selection
) {
target.write(this.target, this.targetsize * seqidx, selection);
}
/**
* Maps input and target into two write ports.
* @param input Input destination write port.
* @param target Target destination write port.
* @param seqidx Sequence index.
*/
final public void map(
final WritePort input,
final WritePort target,
final int seqidx
) {
input.write(this.input, this.inputsize * seqidx);
target.write(this.target, this.targetsize * seqidx);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
StringBuilder w = new StringBuilder();
//
w.append("[\n");
//
int ioff = 0;
int toff = 0;
//
final int num = Math.max(this.inputlength, this.targetlength);
//
for (int i = 0; i < num; i++) {
//
String sinput = "";
String starget = "";
//
if (i < this.inputlength) {
sinput = DoubleTools.asString(
this.input, ioff, this.inputsize, 5
);
}
if (i < this.targetlength) {
starget = DoubleTools.asString(
this.target, toff, this.targetsize, 5
);
}
//
w.append(
"\t[" + sinput + "] -> " +
"[" + starget + "]\n"
);
//
ioff += this.inputsize;
toff += this.targetsize;
}
w.append("]\n");
return w.toString();
}
}
| 10,659 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
SampleSet.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/data/SampleSet.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.data;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Random;
/**
* This class represents collections of samples. It is implemened as
* a simple ArrayList<Sample>.
* <br></br>
* @author Sebastian Otte
*
*/
public final class SampleSet extends ArrayList<Sample> {
//
private static final long serialVersionUID = 1346099801457631110L;
//
/**
* Computed the maximum length of all sequences in this collection.
* @return The maximum sequence length.
*/
public int maxSequenceLength() {
int max = 0;
for (Sample s : this) {
final int length = Math.max(
s.getInputLength(), s.getTargetLength()
);
if (length > max) {
max = length;
}
}
return max;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return super.toString();
}
/**
* Splits n random selected samples from the sample set.
*
* @return A set of n random selected samples.
*/
public SampleSet split(final int n, final Random rnd) {
SampleSet result = new SampleSet();
//
// select n samples.
//
final int size = Math.min(n, this.size());
for (int i = 0; i < size; i++) {
final int idx = rnd.nextInt(this.size());
result.add(this.get(idx));
}
//
// remove samples from set.
//
this.removeAll(result);
return result;
}
/**
* This methods shuffles the sample using the method "Collections.shuffle".
* <br></br>
* @param rnd An instance of Random.
*/
public void shuffle(final Random rnd) {
Collections.shuffle(this, rnd);
}
/**
* This static helper method joins givens sample set into one
* sample set.
* <br></br>
* @param sets A various number of SampleSet instances.
* @return An instance of SampleSet containing all samples of the given sets.
*/
public static SampleSet join(final SampleSet ...sets) {
SampleSet set = new SampleSet();
//
for (SampleSet s : sets) {
set.addAll(s);
}
//
return set;
}
}
| 3,228 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
DataPort.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/data/DataPort.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.data;
import java.io.Serializable;
/**
* Combines the ReadPort and the WritePort as a shorthand.
* <br></br>
* @author Sebastian Otte
*/
public interface DataPort extends ReadPort, WritePort, Serializable {
//
}
| 1,141 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
SampleTools.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/data/SampleTools.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.data;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
/**
* This class provides some useful methods for handling Samples
* and SampleSets. These concern normalizing features using mean
* and standard deviation or reading and writing
* comma-seperated-value (csv) files
* <br></br>
* @see Sample
* @see SampleSet
* @author Sebastian Otte
*/
public class SampleTools {
/**
* This regular expression determines the delimiters, which
* separate values of a vector in a csv file.
*/
public static final String DEFAULT_VALUEDELIMITER = "[\\s]";
/**
* This regular expression determines the delimiters, which
* separate vectors of sequence in a csv file.
*/
public static final String DEFAULT_VECTORDELIMITER = "[,;]";
/**
* Computes the mean of a features for a given set
* of Samples. This method requires all Sample instances
* to have the same input size. The result is a double array
* with length of the input size containing the means for all
* features.
* <br></br>
* @param set A set of Samples with the same input size.
* @return The mean values for all features.
*/
public static double[] mean(final SampleSet set) {
if (set.size() == 0) return null;
//
final int inputsize = set.get(0).getInputSize();
final double[] result = new double[inputsize];
long ctr = 0;
//
// add all features values.
//
for (Sample sample : set) {
//
final double[] input = sample.getInput();
int offset = 0;
for (int s = 0; s < sample.getInputLength(); s++) {
for (int i = 0; i < inputsize; i++) {
result[i] += input[offset++];
}
ctr++;
}
}
//
// divide all values by the number of samples.
//
final double inv = (1.0 / (double)ctr);
//
for (int i = 0; i < result.length; i++) {
result[i] = result[i] * inv;
}
return result;
}
/**
* Computes the standard deviation of all features for a given set of
* Samples based on an additionally given array of means. This method
* requires all Sample instances to have the same input size, which must
* also fit the length of the array of means. The result is a double array
* with length of the input size containing the standard derivations for all
* features.
* <br></br>
* @param set A set of Samples with the same input size.
* @param mean A previously computed array of means with length of
* input size.
* @return The standard derivations for all features.
*/
public static double[] stdDeviation(final SampleSet set, final double[] mean) {
if (set.size() == 0) return null;
//
final int inputsize = set.get(0).getInputSize();
final double[] result = new double[inputsize];
long ctr = 0;
//
// compute the squared difference to the mean vector.
//
for (Sample sample : set) {
//
final double[] input = sample.getInput();
int offset = 0;
for (int s = 0; s < sample.getInputLength(); s++) {
for (int i = 0; i < inputsize; i++) {
final double d = mean[i] - input[offset++];
result[i] += (d * d);
}
ctr++;
}
}
//
// finally compute the standard deviation.
//
final double inv = (1.0 / (double)(ctr - 1));
//
for (int i = 0; i < result.length; i++) {
result[i] = Math.sqrt(result[i] * inv);
}
return result;
}
/**
* Normalized all features determined by a list of indices of a set
* of samples. The method computes the mean and the standard deviation
* first, which are both used for the normalization. It requires the
* samples to have the same input size.
* <br></br>
* @param set The set of samples with the same input size.
* @param idxs A given list of indices determining the feature
* which are to be normalized.
*/
public static void normalize(
final SampleSet set,
final int ...idxs
) {
if (set.size() == 0) return;
//
// compute the means and standard deviations first.
//
final double[] mean = mean(set);
final double[] stddev = stdDeviation(set, mean);
//
normalize(set, mean, stddev, idxs);
}
/**
* Normalizes all features determined by a list of indices of a set
* of samples. The normalization is based the given means and standard
* deviations. The method requires the samples to have the same input size
* which must also fit the length of the array of means and the array of
* standard deviations.
* <br></br>
* @param set The set of samples with the same input size.
* @param idxs A given list of indices determining the feature
* which are to be normalized.
*/
public static void normalize(
final SampleSet set,
final double[] mean,
final double[] stddev,
final int ...idxs
) {
if (set.size() == 0) return;
//
final int inputsize = set.get(0).getInputSize();
//
for (Sample sample : set) {
//
final double[] input = sample.getInput();
//
int offset = 0;
for (int s = 0; s < sample.getInputLength(); s++) {
//
for (int i = 0; i < idxs.length; i++) {
final int idx = idxs[i];
final double x = input[offset + idx];
input[offset + idx] = ((x - mean[idx]) / stddev[idx]);
}
//
offset += inputsize;
}
}
}
private static int maxSize(final double[][] data) {
int max = 0;
for (double[] s : data) {
final int size = s.length;
if (size > max) max = size;
}
return max;
}
private static double[][] transform(final String[] vector) {
double[][] data = new double[vector.length][];
for (int i = 0; i < vector.length; i++) {
String v = vector[i].trim();
String[] vs = v.split(DEFAULT_VALUEDELIMITER);
data[i] = new double[vs.length];
for (int j = 0; j < data[i].length; j++) {
//int k = j;
data[i][j] = Double.parseDouble(vs[j]);
}
}
return data;
}
private static void map(final double[][] source, final double[] dest, final int size) {
int idx = 0;
for (int i = 0; i < source.length; i++) {
for (int j = 0; j < size; j++) {
if (j < source[i].length) {
dest[idx++] = source[i][j];
} else {
idx++;
}
}
}
}
public static SampleSet readCSV(final String filename) throws IOException {
SampleSet set = new SampleSet();
readCSV(filename, set);
return set;
}
public static void readCSV(final String filename, final SampleSet set) throws IOException {
//
File file = new File(filename);
BufferedReader reader = new BufferedReader(new FileReader(file));
//
while (reader.ready()) {
//
String line1 = null;
String line2 = null;
boolean grab1 = true;
boolean grab2 = true;
//
// grab first line:
//
while (grab1 && reader.ready()) {
line1 = reader.readLine().trim();
if (line1.startsWith("#") || line1.length() == 0) continue;
grab1 = false;
}
while (grab2 && reader.ready()) {
line2 = reader.readLine().trim();
if (line2.startsWith("#") || line2.length() == 0) continue;
grab2 = false;
}
if ((line1 == null) || (line2 == null)) {
break;
}
//
String[] inputs = line1.split(DEFAULT_VECTORDELIMITER);
String[] targets = line2.split(DEFAULT_VECTORDELIMITER);
//
final int inputlength = inputs.length;
final int targetlength = targets.length;
double[][] data1 = transform(inputs);
double[][] data2 = transform(targets);
final int inputsize = maxSize(data1);
final int targetsize = maxSize(data2);
final double[] input = new double[inputlength * inputsize];
final double[] target = new double[targetlength * targetsize];
//
map(data1, input, inputsize);
map(data2, target, targetsize);
//
Sample sample = new Sample(
input, target,
inputsize, inputlength,
targetsize, targetlength
);
set.add(sample);
}
reader.close();
//
}
private static String asSeqString(
final double[] data,
final int size,
final int length
) {
StringBuilder out = new StringBuilder();
//
int off = 0;
//
for (int i = 0; i < length; i++) {
if (i > 0) out.append(DEFAULT_VECTORDELIMITER);
//
for (int j = 0; j < size; j++) {
if (j > 0) out.append(DEFAULT_VALUEDELIMITER);
final double value = data[off++];
out.append(value);
}
}
//
return out.toString();
}
public static void writeCSV(final SampleSet set, final String filename) throws IOException {
//
File file = new File(filename);
BufferedWriter out = new BufferedWriter(new FileWriter(file));
//
final int size = set.size();
//
out.write("# " + size + " samples\n");
//
for (int i = 0; i < size; i++) {
if (i > 0) {
out.append("\n");
}
final Sample s = set.get(i);
//
final String input = asSeqString(
s.getInput(), s.getInputSize(), s.getInputLength()
);
final String target = asSeqString(
s.getTarget(), s.getTargetSize(), s.getTargetLength()
);
out.append(input);
out.append("\n");
out.append(target);
}
out.flush();
out.close();
}
}
| 12,080 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Serializer.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/io/Serializer.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
/**
* This class provides methods for persisting data objects. The
* objects are stored serialized into GZip archives.
* <br></br>
* @author Sebastian Otte
*/
public class Serializer {
/**
* Write a given object into a file. The instance obj
* must implement Serializable.
* <br></br>
* @param obj The object which is to be stored.
* @param filename Destination filename.
* @throws IOException
*/
public static <T> void write(final T obj, final String filename) throws IOException {
//
File file = new File(filename);
//
ObjectOutputStream out = new ObjectOutputStream(
new GZIPOutputStream(new FileOutputStream(file))
);
out.writeObject(obj);
out.flush();
out.close();
}
/**
* Reads an object from a file. The instance if casted automatically.
* <br></br>
* @param filename Source filename.
* @return Instance of the persisted object.
* @throws IOException
* @throws ClassNotFoundException
*/
public static <T> T read(final String filename) throws IOException, ClassNotFoundException {
//
File file = new File(filename);
//
ObjectInputStream in = new ObjectInputStream(
new GZIPInputStream(new FileInputStream(file))
);
//
@SuppressWarnings("unchecked")
T obj = (T)in.readObject();
in.close();
return obj;
}
}
| 2,670 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
DataTools.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/tools/DataTools.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.tools;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import de.jannlab.data.SampleSet;
import de.jannlab.data.SampleTools;
import de.jannlab.exception.JANNLabException;
import de.jannlab.io.Serializer;
/**
* This class provides some helper methods for data handling.
* <br></br>
* @author Sebastian Otte
*/
public final class DataTools {
public static final String FEATURE_SEPERATOR = "[;,]";
public static final String FILE_SEPARATOR = "[;,]";
public static final String EXTENSION_CSV = ".csv";
public static final String EXTENSION_GZ = ".gz";
public static int[] features(final String features) {
if (features.length() == 0) return null;
List<Integer> buffer = new ArrayList<Integer>();
String[] elements = features.split(FEATURE_SEPERATOR);
for (String e : elements) {
String f = e.trim();
if (f.length() > 0) {
buffer.add(Integer.parseInt(f));
}
}
int[] result = new int[buffer.size()];
for (int i = 0; i < result.length; i++) {
result[i] = buffer.get(i);
}
return result;
}
private static String extension(final String filename) {
final int dotPos = filename.lastIndexOf(".");
return filename.substring(dotPos);
}
public static void loadToSampleSet(final SampleSet set, final String filename)
throws IOException, ClassNotFoundException {
//
final File file = new File(filename);
final String ext = extension(file.getName());
//
//
if (ext.equalsIgnoreCase(EXTENSION_CSV)) {
SampleSet s = SampleTools.readCSV(filename);
set.addAll(s);
} else if (ext.equalsIgnoreCase(EXTENSION_GZ)) {
final SampleSet s = Serializer.read(filename);
set.addAll(s);
} else {
throw new JANNLabException("file type '" + ext + "' not supported.");
}
}
public static SampleSet loadSampleSets(final String filenames)
throws IOException, ClassNotFoundException {
//
final String[] files = filenames.split(FILE_SEPARATOR);
return loadSampleSets(files);
}
public static SampleSet loadSampleSets(final String ...files)
throws IOException, ClassNotFoundException {
//
SampleSet set = new SampleSet();
//
for (String f : files) {
String filename = f.trim();
if (filename.length() > 0) {
loadToSampleSet(set, filename);
}
}
//
return set;
}
}
| 3,663 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Debug.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/tools/Debug.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.tools;
/**
* This class provides Methods for handling String containing lines,
* which can be used for debugging tasks.
* <br></br>
* @author Sebastian Otte
*
*/
public final class Debug {
/**
* A debug flag which can be used as a switch in source code.
*/
public static final boolean DEBUG = true;
/**
* Merges a array of Strings into one String with
* some "glue".
* <br></br>
* @param s The array of strings.
* @param glue The glue.
* @return The merged string.
*/
public static String combine(String[] s, String glue) {
int k=s.length;
if (k==0) {
return null;
}
//
StringBuilder out = new StringBuilder();
out.append(s[0]);
//
for (int x=1;x<k;++x) {
out.append(glue).append(s[x]);
}
return out.toString();
}
/**
* Indents all lines in a given String.
* <br></br>
* @param lines A string containing lines.
* @return All lines indented.
*/
public static String indent(String lines) {
String[] split = lines.split("\n");
for (int i = 0; i < split.length; i++) {
split[i] = "\t" + split[i];
}
return combine(split, "\n");
}
}
| 2,210 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
RegressionValidator.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/tools/RegressionValidator.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.tools;
import de.jannlab.Net;
import de.jannlab.data.Sample;
/**
* This class allows to simply evaluate regression tasks. Therefore
* a threshold is given. A sample for which the reference network
* produces a error < threshold is counted as recognized. After
* testing a couple of samples the recognition ratio can be extracted.
* <br></br>
* @author Sebastian Otte
*/
public class RegressionValidator {
/**
* The reference network.
*/
private Net net;
/**
* Number of tested samples.
*/
private int ctr = 0;
/**
* Number of well recognized samples.
*/
private int goodctr = 0;
/**
* The regression theshold.
*/
private double threshold = 0.001;
/**
* Returns the internal counter.
*/
public void reset() {
this.ctr = 0;
this.goodctr = 0;
}
/**
* Creates an instance of RegressionValidator.
* @param net A given reference network.
* @param threshold The regression threshold.
*/
public RegressionValidator(final Net net, final double threshold) {
this.net = net;
this.threshold = threshold;
}
/**
* Returns the current recognition rate. 100 = 1.0
* @return The current recognition rate.
*/
public double ratio() {
return ((double)this.goodctr) / ((double)this.ctr);
}
/**
* Applies a given sample to the reference network for only given selection.
* <br></br>
* @param sample An instance of Sample.
* @param features A selection as array of indices.
* @return The output error of the network after processing the sample.
*/
public double apply(final Sample sample, final int[] features) {
this.net.reset();
this.ctr++;
//
// feed net with sample (discard error).
//
double error = 0.0;
if (features == null) {
error = NetTools.performForward(this.net, sample);
} else {
error = NetTools.performForward(this.net, sample, features);
}
//
if (error < this.threshold) {
this.goodctr++;
}
//
return error;
}
/**
* Applies a given sample to the reference network.
* <br></br>
* @param sample An instance of Sample.
* @return The output error of the network after processing the sample.
*/
public double apply(final Sample sample) {
return this.apply(sample, null);
}
}
| 3,467 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
ClassificationValidator.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/tools/ClassificationValidator.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.tools;
import de.jannlab.Net;
import de.jannlab.data.Sample;
import de.jannlab.math.MathTools;
import de.jannlab.misc.DoubleTools;
/**
* This class allows to simply evaluate classification tasks. Therefore
* after presentation the sample, the output component with the highest
* value is taken as class index. If the result fits the target vector
* is counted as well recognized.
* <br></br>
* @author Sebastian Otte
*/
public class ClassificationValidator {
/**
* On threshold.
*/
public static final double DEFAULT_THRESHOLD = 0.5;
/**
* The reference network.
*/
private Net net;
/**
* Counts the numbers of samples.
*/
private int ctr = 0;
/**
* Counts the number of well classified samples.
*/
private int goodctr = 0;
/**
* A threshold which defines a correct classification.
*/
private double threshold = DEFAULT_THRESHOLD;
/**
* Returns the internal counter.
*/
public void reset() {
this.ctr = 0;
this.goodctr = 0;
}
/**
* Creates an instance of ClassificationValidator. Here DEFAULT_THRESHOLD
* is used as classification threshold.
* @param net A given reference network.
*/
public ClassificationValidator(final Net net) {
this.net = net;
}
/**
* Creates an instance of ClassificationValidator.
* @param net A given reference network.
* @param threshold Gives a specific classification threshold.
*/
public ClassificationValidator(final Net net, final double threshold) {
this.net = net;
this.threshold = threshold;
}
/**
* Returns the classification threshold.
* @return Current classification threshold.
*/
public double getThreshold() {
return this.threshold;
}
/**
* Returns the current recognition rate. 100 = 1.0
* @return The current recognition rate.
*/
public double ratio() {
return ((double)this.goodctr) / ((double)this.ctr);
}
/**
* Applies a given sample to the reference network for only given selection.
* <br></br>
* @param sample An instance of Sample.
* @param features A selection as array of indices.
* @return The output vector of the network after classification.
*/
public double[] apply(final Sample sample, final int[] features) {
this.net.reset();
this.ctr++;
//
double[] result = new double[this.net.getOutputCells()];
//
// feed net with sample (discard error).
//
if (features == null) {
NetTools.performForward(this.net, sample);
} else {
NetTools.performForward(this.net, sample, features);
}
//
// read net output.
//
this.net.output(result, 0);
//
// choose class.
//
if (result.length == 1) {
if (Math.abs(sample.getTarget()[0] - result[0]) < this.threshold) {
this.goodctr++;
}
} else {
final int maxidx = MathTools.argmax(result);
//
if ((sample.getTarget()[maxidx] > this.threshold)) {
this.goodctr++;
}
}
//
return result;
}
/**
* Applies a given sample to the reference network.
* <br></br>
* @param sample An instance of Sample.
* @return The output vector of the network after classification.
*/
public double[] apply(final Sample sample) {
return this.apply(sample, null);
}
/**
* Applies a given sample to the reference network. Returns a string
* representation of the output.
* <br></br>
* @param sample An instance of Sample.
* @return The output vector of the network after classification as String.
*/
public String applyAsString(final Sample sample, final boolean printsample) {
//
StringBuilder out = new StringBuilder();
//
// first apply sample.
//
double[] output = this.apply(sample);
//
// print result into string.
//
if (printsample) {
out.append(sample.toString());
} else {
out.append(
"target: [" +
DoubleTools.asString(
sample.getTarget(), 0, sample.getTargetSize(), 5
) + "]\n"
);
}
out.append(
"output: [" +
DoubleTools.asString(
output, 0, output.length, 5
) + "]\b"
);
//
return out.toString();
}
}
| 5,650 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
EvaluationTools.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/tools/EvaluationTools.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.tools;
import de.jannlab.Net;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
/**
* This class provides some helper methods, to keep the code
* corresponding to experiments simple.
* <br></br>
* @author Sebastian Otte
*/
public final class EvaluationTools {
public static final double REGRESSION_THRESHOLD = 0.001;
public static final double performClassification(
final Net net,
final SampleSet samples
) {
ClassificationValidator val = new ClassificationValidator(net);
for (Sample s : samples) {
val.apply(s);
}
return val.ratio();
}
public static final double performRegression(
final Net net,
final SampleSet samples
) {
RegressionValidator val = new RegressionValidator(net, REGRESSION_THRESHOLD);
for (Sample s : samples) {
val.apply(s);
}
return val.ratio();
}
public static final double performClassification(
final Net net,
final SampleSet samples,
final int cls
) {
ClassificationValidator val = new ClassificationValidator(net);
for (Sample s : samples) {
if (s.getTarget()[cls] > ClassificationValidator.DEFAULT_THRESHOLD) {
val.apply(s);
}
}
return val.ratio();
}
}
| 2,329 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
NetTools.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/tools/NetTools.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.tools;
import de.jannlab.Net;
import de.jannlab.data.Sample;
import de.jannlab.data.SampleSet;
/**
* This class contains common methods for handling net instance.
* <br></br>
* @author Sebastian Otte
*/
public final class NetTools {
/**
* Computes the error for a given set of samples. Note that
* the result error is divided by the number of samples.
* <br></br>
* @param net A instance of Net which computes the errors.
* @param set A set of samples.
* @return The error produces by the network for set of sample.
*/
public static double computeError(
final Net net, final SampleSet set
) {
//
double error = 0.0;
//
// perform forward pass for each sample.
//
for (Sample s : set) {
net.reset();
final double err = performForward(net, s);
error += err;
}
//
return error / ((double)set.size());
}
/**
* Computes the forward pass of a given net for a given sample.
* Note that this method resets the frame index to zero before
* feeding the input into the net.
* <br></br>
* @param net A network instance.
* @param sample A sample.
*/
public static double performForward(
final Net net,
final Sample sample
) {
return performForward(net, sample, null);
}
/**
* Computes the forward pass of a given net for a given sample only
* on a selection of features.
* Note that this method resets the frame index to zero before
* feeding the input into the net.
* <br></br>
* @param net A network instance.
* @param sample A sample.
*/
public static double performForward(
final Net net,
final Sample sample,
final int[] features
) {
final int inputlength = sample.getInputLength();
final int targetlength = sample.getTargetLength();
final int last = (sample.getInputLength() - 1);
//
// reset time.
//
net.setFrameIdx(0);
//
double error = 0.0;
//
// choose computation paradigm.
//
if (net.isOnline()) {
//
// on online computation, we must feed the net with
// inputs and perform the activation for each time step.
//
for (int t = 0; t <= last; t++) {
if (features != null) {
sample.mapInput(net.inputPort(), t, features);
} else {
sample.mapInput(net.inputPort(), t);
}
net.compute();
if (t < last) net.incrFrameIdx();
}
//
// compute error.
//
final int first = Math.max(0, inputlength - targetlength);
int soff = targetlength - 1;
//
for (int t = last; t >= first; t--) {
sample.mapTarget(net.targetPort(), soff--);
error += net.error();
if (t > 0) net.decrFrameIdx();
}
net.setFrameIdx(last);
//
} else {
//
// on offline computation, we must feed the net with
// inputs frame wise and let then the net perform the activation at
// once. it is important to not reset the frame index after
// feed in the data, because the net uses the frame index
// as upper bound for the computation range over time.
//
for (int t = 0; t <= last; t++) {
if (features != null) {
sample.mapInput(net.inputPort(), t, features);
} else {
sample.mapInput(net.inputPort(), t);
}
if (t < last) net.incrFrameIdx();
}
//
net.compute();
//
// the computation process ensures, that we are in correct
// frame index to acquire the error.
//
sample.mapTarget(net.targetPort());
error = net.error();
}
return (error / ((double)targetlength));
}
/**
* Computes the backward pass of a given network.
* Note that this method resets the frame index to zero before
* feeding the input into the net.
* <br></br>
* @param net A network instance.
*/
public static void performBackward(
final Net net
) {
//
// we assume the error has already been fed in the network.
// so we just need to compute the gradients back in time.
//
if (net.isOnline()) {
//
// on online computation, me must compute gradients for
// each step.
//
final int T = net.getFrameIdx();
for (int t = T; t >= 0; t--) {
net.computeGradient();
//
// step back in time.
//
if (net.getFrameIdx() > 0) net.decrFrameIdx();
}
} else {
//
// on offline computation, the network will compute
// the gradients for all time steps at once.
//
net.computeGradient();
}
}
}
| 6,339 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
DefaultNetTrainerListener.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/tools/DefaultNetTrainerListener.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.tools;
import de.jannlab.training.NetTrainer;
import de.jannlab.training.NetTrainerListener;
/**
* This class implements a standard NetTrainerListener,
* which just prints out the validation error over time.
* <br></br>
* @author Sebastian Otte
*/
public final class DefaultNetTrainerListener implements NetTrainerListener {
private double minerror = Double.MAX_VALUE;
/**
* {@inheritDoc}
*/
@Override
public void started(final NetTrainer trainer) {
//
}
/**
* {@inheritDoc}
*/
@Override
public void epoch(final NetTrainer trainer) {
//
final int epoch = trainer.getEpoch();
final double error = trainer.getValidationError();
String state = "-";
//
if (error < this.minerror){
this.minerror = error;
state = "+";
}
//
System.out.println(
"epoch: " + epoch + "\t" + state +
"\terror: " + error +
"\tminerror: " + this.minerror
);
}
/**
* {@inheritDoc}
*/
@Override
public void finished(final NetTrainer trainer) {
System.out.println("final validation error: " + trainer.getValidationError());
minerror = Double.MAX_VALUE;
}
}
| 2,233 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
LSTMGenerator.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/LSTMGenerator.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.generator.exception.NoInputLayerDefined;
import de.jannlab.generator.exception.NoModificationAllowed;
/**
* This class provides macro methods to easy setup LSTM networks.
* The class uses the NetCoreGenerator internal.
* <br></br>
* @see NetCoreGenerator
* @author Sebastian Otte
*/
public class LSTMGenerator implements NetGenerator {
/**
* The core generator.
*/
private NetCoreGenerator gen = new NetCoreGenerator();
//
private boolean sealed = false;
private int lastlayer = -1;
/**
* Returns the internal core generator.
* @return Instance of CoreGenerator.
*/
public NetCoreGenerator getCoreGenerator() {
return this.gen;
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
this.gen.clear();
this.sealed = false;
this.lastlayer = -1;
}
/**
* Creates an instance of LSTMGenerator.
*/
public LSTMGenerator() {
this.clear();
}
/**
* Adds a input layer to the current model.
* @param num The number of input neurons.
* @return Layer index.
*/
public int inputLayer(final int num) {
final int layer = MLPGenerator.inputLayer(this.gen, num);
this.updateLastLayer(layer);
return layer;
}
/**
* Links last with current layer.
* <br></br>
* @param layer Current layer index.
*/
private void linkWithLastLayer(final int layer) {
this.gen.weightedLinkLayer(this.lastlayer, layer);
}
/**
* Links current with current layer (recurrent).
* <br></br>
* @param layer Current layer index.
*/
private void linkWithCurrentLayer(final int layer) {
this.gen.weightedLinkLayer(layer, layer);
}
/**
* Check if hidden layer is well defined.
*/
private void hiddenLayerCheck() {
if (this.lastlayer < 0) {
throw new NoInputLayerDefined();
}
if (this.sealed) {
throw new NoModificationAllowed();
}
}
/**
* Updates the last layer index to the current layer index.
* @param layer Layer index.
*/
private void updateLastLayer(final int layer) {
this.lastlayer = layer;
}
/**
* Checks if output layer if well defined.
*/
private void outputLayerCheck() {
this.hiddenLayerCheck();
}
/**
* Adds a hidden layer to the current model with n LSTM blocks.
* <br></br>
* @param blocksnum Number of LSTM blocks
* @param gates CellType of the gates.
* @param netin CellType of cell-input.
* @param states CellType of cell-outpt.
* @param peepholes Use peepholes?
* @return Layer index.
*/
public int hiddenLayer(
final int blocksnum,
final CellType gates,
final CellType netin,
final CellType states,
final boolean peepholes
) {
this.hiddenLayerCheck();
final int layer = hiddenLayer(
gen, blocksnum, gates, netin, states,
peepholes
);
//
// create feed forward links.
//
this.linkWithLastLayer(layer);
//
// create recurrent links.
//
this.linkWithCurrentLayer(layer);
this.updateLastLayer(layer);
return layer;
}
/**
* Adds a output layer (with sigmoid cells) to the current model.
* @param num Number of hidden neurons.
* @return Layer index.
*/
public int outputLayer(final int num) {
return this.outputLayer(num, CellType.SIGMOID);
}
/**
* Adds a new hidden layer to the current model of given cell type.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public int outputLayer(final int num, final CellType type) {
return this.outputLayer(num, type, false, 0.0);
}
/**
* Adds a output layer to the current model of given cell type with bias support.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @param usebias True if bias sould be used.
* @param bias Value of the bias.
* @return Layer index.
*/
public int outputLayer(
final int num, final CellType type,
final boolean usebias, final double bias
) {
this.outputLayerCheck();
final int layer = MLPGenerator.outputLayer(
this.gen, num, type, usebias, bias
);
this.linkWithLastLayer(layer);
this.updateLastLayer(layer);
this.sealed = true;
return layer;
}
/**
* {@inheritDoc}
*/
@Override
public Net generate() {
//
Net result = this.gen.generate();
return result;
}
//-------------------------------------------------------------------------
// STATIC MACRO METHODS.
//-------------------------------------------------------------------------
public static int inputLayer(final NetCoreGenerator gen, final int num) {
return MLPGenerator.inputLayer(gen, num);
}
/**
* Creates a hidden layer with n LSTM blocks.
* <br></br>
* @param gen Instance of NetCoreGenerator.
* @param blocksnum Number of LSTM blocks
* @param gates CellType of the gates.
* @param netin CellType of cell-input.
* @param netout CellType of cell-output.
* @param peepholes Use peepholes?
* @return Layer index.
*/
public static int hiddenLayer(
final NetCoreGenerator gen,
final int blocksnum,
final CellType gates,
final CellType netin,
final CellType netout,
final boolean peepholes
) {
return hiddenLayer(
gen, blocksnum, gates, netin, netout, peepholes,
false, 0.0, false, 0.0, false, 0.0
);
}
/**
* Creates a hidden layer with n LSTM blocks.
* <br></br>
* @param gen Instance of NetCoreGenerator.
* @param blocksnum Number of LSTM blocks
* @param gates CellType of the gates.
* @param netin CellType of cell-input.
* @param netout CellType of cell-output.
* @param peepholes Use peepholes?
* @param usegatesbias Add bias to the gates?
* @param gatesbias The bias value for the gates.
* @param useinputbias Add bias to input?
* @param inputbias The bias value for the input.
* @return Layer index.
*/
public static int hiddenLayer(
final NetCoreGenerator gen,
final int blocksnum,
final CellType gates,
final CellType netin,
final CellType netout,
final boolean peepholes,
final boolean usegatesbias,
final double gatesbias,
final boolean useinputbias,
final double inputbias,
final boolean useoutputbias,
final double outputbias
) {
//
// create lstmlayer.
//
final int layer = gen.beginLayer();
//
// input gates, forget gates and input cells.
//
gen.inputConnectors();
final int input_gates = gen.cells(blocksnum, gates);
final int forget_gates = gen.cells(blocksnum, gates);
final int input_cells = gen.cells(blocksnum, netin);
gen.shiftComputationIndex();
//
// mul1, mul2 and dmul11, dmul12, dmul21, dmul22.
//
gen.nonConnectors();
final int dmul11 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul12 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul21 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul22 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
gen.shiftComputationIndex();
final int mul1 = gen.cells(blocksnum, CellType.MULTIPLICATIVE);
final int mul2 = gen.cells(blocksnum, CellType.MULTIPLICATIVE);
gen.shiftComputationIndex();
//
// state.
//
final int state_cells = gen.cells(blocksnum, CellType.LINEAR);
gen.shiftComputationIndex();
//
// state-squash and output gates.
//
final int output_squash = gen.cells(blocksnum, netout);
gen.inputConnectors();
final int output_gates = gen.cells(blocksnum, gates);
gen.shiftComputationIndex();
//
// mul3 and dmul31, dmul32
//
gen.nonConnectors();
final int dmul31 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
final int dmul32 = gen.cells(blocksnum, CellType.DMULTIPLICATIVE);
gen.shiftComputationIndex();
//
gen.outputConnectors();
final int mul3 = gen.cells(blocksnum, CellType.MULTIPLICATIVE);
//
// define links.
//
gen.link(forget_gates, dmul11, blocksnum);
gen.link(input_gates, dmul21, blocksnum);
gen.link(input_cells, dmul22, blocksnum);
//
gen.link(dmul11, mul1, blocksnum);
gen.link(dmul12, mul1, blocksnum);
gen.link(mul1, state_cells, blocksnum);
//
gen.link(state_cells, dmul12, blocksnum);
//
gen.link(dmul21, mul2, blocksnum);
gen.link(dmul22, mul2, blocksnum);
gen.link(mul2, state_cells, blocksnum);
//
gen.link(state_cells, output_squash, blocksnum);
gen.link(output_squash, dmul31, blocksnum);
gen.link(output_gates, dmul32, blocksnum);
gen.link(dmul31, mul3, blocksnum);
gen.link(dmul32, mul3, blocksnum);
//
// use weighted peepholes?
//
if (peepholes) {
gen.weightedLink(state_cells, forget_gates, blocksnum);
gen.weightedLink(state_cells, input_gates, blocksnum);
gen.weightedLink(state_cells, output_gates, blocksnum);
}
//
// add biases if requested.
//
if (usegatesbias) {
//
final int bias = gen.valueCell();
gen.assign(bias, gatesbias);
//
// link bias to the gates.
//
gen.weightedLink(bias, 1, forget_gates, blocksnum);
gen.weightedLink(bias, 1, input_gates, blocksnum);
gen.weightedLink(bias, 1, output_gates, blocksnum);
}
//
if (useinputbias) {
//
final int bias = gen.valueCell();
gen.assign(bias, inputbias);
//
// link bias to the input.
//
gen.weightedLink(bias, 1, input_cells, blocksnum);
}
//
if (useoutputbias) {
//
final int bias = gen.valueCell();
gen.assign(bias, outputbias);
//
// link bias to output.
//
gen.weightedLink(bias, 1, output_squash, blocksnum);
}
//
gen.endLayer();
//
return layer;
}
/**
* Creates a output layer with the given NetCoreGenerator with bias support.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public static int outputLayer(
final NetCoreGenerator gen,
final int num,
final CellType type
) {
return MLPGenerator.outputLayer(gen, num, type);
}
/**
* Creates a output layer with the given NetCoreGenerator with bias support.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @param usebias Enable a bias neuron for this layer.
* @param bias Gives the value for the bias neuron.
* @return Layer index.
*/
public static int outputLayer(
final NetCoreGenerator gen,
final int num,
final CellType type,
final boolean usebias,
final double bias
) {
return MLPGenerator.outputLayer(gen, num, type, usebias, bias);
}
}
| 13,158 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
NetCoreGenerator.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/NetCoreGenerator.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import de.jannlab.Net;
import de.jannlab.core.BidirectionalNetBase;
import de.jannlab.core.CellArray;
import de.jannlab.core.CellType;
import de.jannlab.core.FeedForwardNetBase;
import de.jannlab.core.Layer;
import de.jannlab.core.LayerTag;
import de.jannlab.core.Link;
import de.jannlab.core.NetData;
import de.jannlab.core.NetStructure;
import de.jannlab.core.OfflineRecurrentNetBase;
import de.jannlab.core.OnlineRecurrentNetBase;
import de.jannlab.core.RecurrentNetBase;
import de.jannlab.generator.exception.LinkArrayCorrupt;
/**
* This is the core class of the generator concept. The
* methods of this class define the atomic building blocks
* which can be used to model ANNs.
* <br></br>
* @author Sebastian Otte
*/
public class NetCoreGenerator implements NetGenerator {
/**
* Constant value for "weight is needed later".
*/
private static final int WEIGHT_NEEDED = -2;
/**
* Constant value for "has no weight".
*/
private static final int NOT_ASSIGNED = -1;
/**
* Default frame width.
*/
private static final int DEFAULT_FRAMES = 1;
/**
* A data buffer for links.
*/
private List<int[]> links = new ArrayList<int[]>();
/**
* A data buffer for cell arrays.
*/
private List<CellArray> arrays = new ArrayList<CellArray>();
/**
* A data buffer for layers.
*/
private List<Layer> layers = new ArrayList<Layer>();
/**
* Contains the net structure after modeling.
*/
private NetStructure structure = null;
/**
* Contains the net data buffer.
*/
private NetData data = null;
/**
* Maps cells to layers.
*/
private int[] layermap = null;
/**
* Maps cells to cell arrays.
*/
private int[] arraymap = null;
/**
* Contains the indeg for all cells.
*/
private int[] indeg = null;
/**
* Contains the outdeg for all cells.
*/
private int[] outdeg = null;
/**
* The current computation index counter.
*/
private int compctr = 0;
/**
* The current computation index.
*/
private int compidx = 0;
/**
* The pre-configured number of time steps.
*/
private int framewidth = 0;
/**
* The map of assignment. The content will be transformated
* later int arrays.
*/
private Map<Integer, Double> assigns = new HashMap<Integer, Double>();
/**
* Defines the inputlayer.
*/
private int inputlayer = NOT_ASSIGNED;
/**
* Defines the outputlayer.
*/
private int outputlayer = NOT_ASSIGNED;
/**
* Gives the current number of cells (or the cell offset).
*/
private int cellsoffset = 0;
/**
* Gives the current number of computing cells.
*/
private int comcellsnum = 0;
/**
* Gives the current number of values cells.s
*/
private int valcellsnum = 0;
/**
* Gives the current number of layer (or the layer offset).
*/
private int layeroffset = 0;
/**
* Gives the current connection mode.
*/
private int ilctag = CellArray.ILC_BOTH;
/**
* The current layer.
*/
private Layer layer = null;
/**
* Are we in a layer?
*/
private boolean inlayer = false;
/**
* Computes the network offline?
*/
private boolean offline = false;
/**
* Create an instance of NetCoreGenerator.
*/
public NetCoreGenerator() {
this.clear();
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
this.links.clear();
this.arrays.clear();
this.layers.clear();
//
this.inputlayer = NOT_ASSIGNED;
this.outputlayer = NOT_ASSIGNED;
//
this.cellsoffset = 0;
this.layeroffset = 0;
this.inlayer = false;
//
this.comcellsnum = 0;
this.valcellsnum = 0;
//
this.structure = null;
this.data = null;
//
this.layermap = null;
this.arraymap = null;
//
this.indeg = null;
this.outdeg = null;
//
this.compctr = 0;
this.compidx = 0;
//
this.framewidth = DEFAULT_FRAMES;
//
this.ilctag = CellArray.ILC_BOTH;
//
this.assigns.clear();
}
//-------------------------------------------------------------------------
// GENERATING METHODS.
//-------------------------------------------------------------------------
/**
* Sorting links, redundant link elimination, rev. links generation.
*/
private void setupLinks() {
//
// now acquire links.
//
final int rawlinksnum = this.links.size();
int[] rawlinks = new int[rawlinksnum * Link.LINK_SIZE];
int idx = 0;
//
for (int i = 0; i < rawlinksnum; i++) {
final int[] link = this.links.get(i);
//
final int src = link[0];
final int dst = link[1];
final int widx = link[2];
//
rawlinks[idx] = src; idx++;
rawlinks[idx] = dst; idx++;
rawlinks[idx] = widx; idx++;
}
//
// sort raw links and eliminate redundant links.
//
Link.sortDstMaj(rawlinks, Link.ORDER_ASC);
int[] links = Link.eliminateRedundantLinks(rawlinks);
//
if (links.length % Link.LINK_SIZE != 0) {
throw new LinkArrayCorrupt();
}
//
this.structure.links = links;
final int linksnum = links.length / Link.LINK_SIZE;
this.structure.linksnum = linksnum;
//
// count weights and assign weight indices. the
// first weights is always 1.0.
//
int woff = 1;
idx = 0;
for (int i = 0; i < linksnum; i++) {
final int src = links[idx + Link.IDX_SRC];
final int dst = links[idx + Link.IDX_DST];
final int widx = links[idx + Link.IDX_WEIGHT];
//
this.indeg[dst]++;
this.outdeg[src]++;
//
// recurrence check.
//
final int lsrc = this.layermap[src];
final int ldst = this.layermap[dst];
if (lsrc >= ldst) {
this.structure.recurrent = true;
}
//
if (widx == WEIGHT_NEEDED) {
links[idx + Link.IDX_WEIGHT] = woff; woff++;
} else if (widx == Link.NOWEIGHT) {
links[idx + Link.IDX_WEIGHT] = 0;
}
//
idx += Link.LINK_SIZE;
}
//
// dont count the first weight, which is constant 1.
//
this.structure.weightsnum = (woff - 1);
//
// create reverted links. the weight indices are
// now in linksrev too. the reverted are sorted in
// source major order.
//
int[] linksrev = links.clone();
Link.sortSrcMaj(linksrev, Link.ORDER_ASC);
//
// swap src and dst in linksrev.
//
idx = 0;
for (int i = 0; i < linksnum; i++) {
final int src = linksrev[idx + Link.IDX_SRC];
final int dst = linksrev[idx + Link.IDX_DST];
//
linksrev[idx + Link.IDX_DST] = src;
linksrev[idx + Link.IDX_SRC] = dst;
//
idx += Link.LINK_SIZE;
}
this.structure.linksrev = linksrev;
}
/**
* Configure cell arrays. Map predecessor and successor on each cell array.
*/
private void setupArrays() {
//
// prepare arrays and sum indeg and outdeg.
//
CellArray[] arrays = new CellArray[this.arrays.size()];
for (int i = 0; i < arrays.length; i++) {
final CellArray a = this.arrays.get(i);
//
for (int j = a.cellslbd; j <= a.cellsubd; j++) {
a.indeg += this.indeg[j];
a.outdeg += this.outdeg[j];
}
//
arrays[i] = a;
}
//
// forward:
// map arrays (pred) to links and count weights
//
int link = 0;
//
for (int i = 0; i < arrays.length; i++) {
//
final CellArray a = arrays[i];
if ((a.cellsnum <= 0) || (a.indeg == 0)) continue;
final int albd = a.cellslbd;
final int aubd = a.cellsubd;
//
int lbd = -1;
int ctr = 0;
//
// find links range begin by dst idx.
//
while (
((link + Link.IDX_DST) < this.structure.links.length) &&
(this.structure.links[link + Link.IDX_DST] < albd)
) {
link += Link.LINK_SIZE;
}
lbd = link;
//
// find links range end by dst idx.
//
while (
((link + Link.IDX_DST) < this.structure.links.length) &&
(this.structure.links[link + Link.IDX_DST] <= aubd)
) {
link += Link.LINK_SIZE;
ctr++;
}
//
a.predslbd = lbd;
a.predsnum = ctr;
//
// changed: 01.02.2012, Sebastian Otte
// a.predsubd = (a.predslbd + a.predsnum) - 1;
//
a.predsubd = link - Link.LINK_SIZE;
}
//
// backward:
// map arrays (succ) to links and count weights
//
link = 0;
//
for (int i = 0; i < arrays.length; i++) {
//
final CellArray a = arrays[i];
//
if ((a.cellsnum <= 0) || (a.outdeg == 0)) continue;
final int albd = a.cellslbd;
final int aubd = a.cellsubd;
//
int lbd = -1;
int ctr = 0;
//
// find links range begin by src idx (swapped src and dst)
//
while (
((link + Link.IDX_DST) < this.structure.linksrev.length) &&
(this.structure.linksrev[link + Link.IDX_DST] < albd)
//((link + Link.IDX_SRC) < this.structure.linksrev.length) &&
//(this.structure.linksrev[link + Link.IDX_SRC] < albd)
) {
link += Link.LINK_SIZE;
}
lbd = link;
//
// find links range end by src idx (swapped src and dst).
//
while (
((link + Link.IDX_DST) < this.structure.linksrev.length) &&
(this.structure.linksrev[link + Link.IDX_DST] <= aubd)
//((link + Link.IDX_SRC) < this.structure.linksrev.length) &&
//(this.structure.linksrev[link + Link.IDX_SRC] <= aubd)
) {
link += Link.LINK_SIZE;
ctr++;
}
a.succslbd = lbd;
a.succsnum = ctr;
//
// changed: 01.02.2012, Sebastian Otte
// a.succsubd = (a.succslbd + a.succsnum) - 1;
//
a.succsubd = link - Link.LINK_SIZE;
}
this.structure.arrays = arrays;
this.structure.arraysnum = arrays.length;
}
/**
* Setup up cell to X maps.
*/
private void setupMaps() {
//
this.layermap = new int[this.cellsoffset];
this.arraymap = new int[this.cellsoffset];
this.indeg = new int[this.cellsoffset];
this.outdeg = new int[this.cellsoffset];
//
//
for (int i = 0; i < this.cellsoffset; i++) {
this.layermap[i] = NOT_ASSIGNED;
this.arraymap[i] = NOT_ASSIGNED;
}
//
// determine layer assignment.
//
for (int i = 0; i < this.layers.size(); i++) {
final Layer layer = this.layers.get(i);
//
for (int j = layer.cellslbd; j <= layer.cellsubd; j++) {
this.layermap[j] = i;
}
}
//
// determine array assignment.
//
for (int i = 0; i < this.arrays.size(); i++) {
final CellArray array = this.arrays.get(i);
//
for (int j = array.cellslbd; j <= array.cellsubd; j++) {
this.arraymap[j] = i;
}
}
}
/**
* Setup layers. This is mainly the computation order.
*/
private void setupLayers() {
//
// prepare layers and sum indeg and outdeg.
//
Layer[] layers = new Layer[this.layers.size()];
for (int i = 0; i < layers.length; i++) {
final Layer layer = this.layers.get(i);
if (layer.tag == LayerTag.REVERSED) {
this.structure.offline = true;
this.structure.bidirectional = true;
}
//
for (int j = layer.cellslbd; j <= layer.cellsubd; j++) {
layer.indeg += this.indeg[j];
layer.outdeg += this.outdeg[j];
}
//
// clean up compwidth (value arrays has compidx -1).
//
int maxidx = -1;
for (int a = layer.arrayslbd; a <= layer.arraysubd; a++) {
final CellArray array = this.structure.arrays[a];
if (array.compidx > maxidx) {
maxidx = array.compidx;
}
}
layer.compwidth = maxidx + 1;
//
// determine computation order lower and upper bounds.
//
layer.complbds = new int[layer.compwidth];
layer.compubds = new int[layer.compwidth];
int idx = -1;
int cidx = -1;
//
for (int a = layer.arrayslbd; a <= layer.arraysubd; a++) {
final CellArray array = this.structure.arrays[a];
if (array.compidx > cidx) {
idx++;
layer.complbds[idx] = a;
layer.compubds[idx] = a;
cidx = array.compidx;
} else {
if (idx >= 0) layer.compubds[idx]++;
}
}
//
layers[i] = layer;
}
//
// if net is not bidirectional force all layers to be regular.
//
if (!this.structure.recurrent || !this.structure.bidirectional) {
for (int i = 0; i < layers.length; i++) {
layers[i].tag = LayerTag.REGULAR;
}
}
//
this.structure.layers = layers;
this.structure.layersnum = layers.length;
//
final Layer in = layers[this.inputlayer];
final Layer out = layers[this.outputlayer];
//
this.structure.inputlayer = this.inputlayer;
this.structure.outputlayer = this.outputlayer;
this.structure.incellslbd = in.cellslbd;
this.structure.incellsubd = in.cellsubd;
this.structure.incellsnum = in.cellsnum;
this.structure.outcellslbd = out.cellslbd;
this.structure.outcellsubd = out.cellsubd;
this.structure.outcellsnum = out.cellsnum;
}
/**
* Builds a feedforward network.
*/
private FeedForwardNetBase buildFeedForwardNet() {
return new FeedForwardNetBase(this.structure, this.data);
}
/**
* Builds a unidirectional recurrent offline network.
*/
private OfflineRecurrentNetBase buildOfflineRecurrentNet() {
return new OfflineRecurrentNetBase(this.structure, this.data);
}
/**
* Builds a bidirectional network.
*/
private BidirectionalNetBase buildBidirectionalNet() {
return new BidirectionalNetBase(this.structure, this.data);
}
/**
* Builds a unidirectional recurrent network.
*/
private OnlineRecurrentNetBase buildOnlineRecurrentNet() {
return new OnlineRecurrentNetBase(this.structure, this.data);
}
/**
* Builds a reccurent network.
*/
private RecurrentNetBase buildRecurrentNet() {
//
if (this.structure.offline) {
if (this.structure.bidirectional) {
return this.buildBidirectionalNet();
} else {
return this.buildOfflineRecurrentNet();
}
} else {
return this.buildOnlineRecurrentNet();
}
}
/**
* Builds a network.
*/
private Net buildNet() {
if (this.structure.recurrent) {
return this.buildRecurrentNet();
} else {
return this.buildFeedForwardNet();
}
}
/**
* Setup the data buffer. Important here are
* the assignments.
*/
private void setupData() {
final int cells = this.structure.cellsnum;
int fn = this.framewidth;
if (fn < 0) fn = 1;
if (!this.structure.recurrent) fn = 1;
//
this.data.input = new double[fn][cells];
this.data.output = new double[fn][cells];
this.data.gradinput = new double[fn][cells];
this.data.gradoutput = new double[fn][cells];
//
// build assignments arrays.
//
final Set<Integer> keys = this.assigns.keySet();
final int asgnsnum = keys.size();
//
this.data.asgns = new int[asgnsnum];
this.data.asgnsv = new double[asgnsnum];
//
int idx = 0;
for (int key : keys) {
final double value = this.assigns.get(key);
this.data.asgns[idx] = key;
this.data.asgnsv[idx] = value;
idx++;
}
//
this.data.weightsnum = this.structure.weightsnum;
this.data.weights = new double[this.data.weightsnum + 1];
this.data.framewidth = fn;
//
// always set first weight to 1.0.
//
this.data.weights[0] = 1.0;
}
/**
* {@inheritDoc}
*/
@Override
public Net generate() {
//
this.structure = new NetStructure();
this.structure.cellsnum = this.cellsoffset;
this.structure.offline = this.offline;
this.structure.valcellsnum = this.valcellsnum;
this.structure.comcellsnum = this.comcellsnum;
//
// setup maps:
// - cell to layer,
// - cell to array.
//
this.setupMaps();
//
// setup links:
// - sort and eliminate redundant links,
// - assign weights indices and determine number of weights,
// - create reverted links.
// - determine if net is feed forward or recurrent.
//
// ignore offline computation for feedforward networks.
//
if (this.structure.recurrent) {
this.structure.offline = false;
}
this.setupLinks();
//
// setup arrays:
// - determine indeg and outdeg.
// - determine predecessors ranges,
// - determine successors ranges,
// - determine weighted? input/ouput links.
//
this.setupArrays();
//
// setup layer.
//
this.setupLayers();
//
/*
if (DEBUG) {
System.out.println("NetStructure : {");
System.out.println(indent(this.structure.toString()));
System.out.println("}");
//
System.out.println("");
//
System.out.println("Assignments : {");
for (Integer i : this.assigns.keySet()) {
System.out.println("\t" + i + " : " + this.assigns.get(i));
}
System.out.println("}");
System.out.println("");
}
*/
//
// setup data.
//
this.data = new NetData();
this.setupData();
//
final Net net = this.buildNet();
//
// resetting performs assignments.
//
net.reset();
return net;
}
//-------------------------------------------------------------------------
// GENERAL NETWORK METHODS.
//-------------------------------------------------------------------------
/**
* Forces the generated net to compute online. This will be ignored if
* the net is bidirectional. A bidirectional net cannot compute online.
* <br></<br>
* A non bidirectional recurrent network computes online by default.
*/
public void computeOnline() {
this.offline = false;
}
/**
* Forces the generated net to compute offline. This will be ignored
* in the case of a feedforward network. A feedforward network has no
* time context. We define that it would compute online.
* <br></<br>
* A non bidirectional recurrent network computes online by default.
*/
public void computeOffline() {
this.offline = true;
}
//-------------------------------------------------------------------------
// CELL METHODS
//-------------------------------------------------------------------------
/**
* Assigns a constant value to a given cell (cell should be a value cell).
* <br></br>
* @param cell Cell index.
* @param value Assigment value.
*/
public void assign(final int cell, final double value) {
this.assigns.put(cell, value);
}
/**
* Compute the new cells offset after num cells.
* <br></br>
* @param num Number of cells.
* @return New celss offset.
*/
private int cellsOffset(final int num) {
final int value = this.cellsoffset;
this.cellsoffset += num;
return value;
}
/**
* Creates a single value cell.
* <br></br>
* @return Index of the cell.
*/
public int valueCell() {
return this.valueCells(1);
}
/**
* Creates an array of num value cells.
* <br></br>
* @param num The number of cells.
* @return The index of the first cell of the array.
*/
public int valueCells(final int num) {
return this.cells(num, CellType.VALUE);
}
/**
* Creates a single cell of a given type.
* <br></br>
* @param type The specific cell type.
* @return Index of the cell.
*/
public int cell(final CellType type) {
return this.cells(1, type);
}
/**
* Creates an array of num cells of a given type.
* <br></br>
* @param num The number of cells.
* @param type The specific cell type.
* @return The index of the first cell of the array.
*/
public int cells(final int num, final CellType type) {
final int value = this.cellsOffset(num);
//
// setup up array.
//
CellArray array = new CellArray();
//
array.cellslbd = value;
array.cellsubd = (value + num) - 1;
array.cellsnum = num;
//
array.celltype = type;
array.ilctag = this.ilctag;
//
array.layer = (this.inlayer)?(this.layeroffset):(NOT_ASSIGNED);
array.compidx = this.compidx;
this.arrays.add(array);
//
if (type == CellType.VALUE) {
this.valcellsnum += num;
array.compidx = -1;
} else {
this.comcellsnum += num;
}
//
// update computation counter (per computation idx).
//
if (this.inlayer) {
this.compctr++;
}
//
return value;
}
/**
* Creates an array of num sigmoid cells.
* <br></br>
* @param num The number of cells.
* @return The index of the first cell of the array.
*/
public int sigmoidCells(final int num) {
return this.cells(num, CellType.SIGMOID);
}
/**
* Creates an array of num tanh cells.
* <br></br>
* @param num The number of cells.
* @return The index of the first cell of the array.
*/
public int tanhCells(final int num) {
return this.cells(num, CellType.TANH);
}
/**
* Creates an array of num linear cells.
* <br></br>
* @param num The number of cells.
* @return The index of the first cell of the array.
*/
public int linearCells(final int num) {
return this.cells(num, CellType.LINEAR);
}
/**
* Creates an array of num multiplicative cells.
* <br></br>
* @param num The number of cells.
* @return The index of the first cell of the array.
*/
public int multiplicativeCells(final int num) {
return this.cells(num, CellType.MULTIPLICATIVE);
}
/**
* Creates an array of num dmultiplicative cells
* (multiplicative delta correction).
* <br></br>
* @param num The number of cells.
* @return The index of the first cell of the array.
*/
public int dmultiplicativeCells(final int num) {
return this.cells(num, CellType.DMULTIPLICATIVE);
}
//-------------------------------------------------------------------------
// LINK METHODS
//-------------------------------------------------------------------------
/**
* Links cell i with cell j (non weighted).
* <br></br>
* @param i Source cell.
* @param j Destination cell.
*/
public void link(final int i, final int j) {
this.links.add(Link.link(i, j));
}
/**
* Links cell i with cell j (weighted).
* <br></br>
* @param i Source cell.
* @param j Destination cell.
*/
public void weightedLink(final int i, final int j) {
this.links.add(Link.link(i, j, WEIGHT_NEEDED));
}
/**
* Fully links cell range with another cell range (non weighted).
* <br></br>
* @param i Source cells.
* @param numi Number of source cells.
* @param j Destination cells.
* @param numj Number of destination cells.
*/
public void link(final int i, final int numi, final int j, final int numj) {
//
final int jubd = (j + numj) - 1;
final int iubd = (i + numi) - 1;
//
for (int jj = j; jj <= jubd; jj++) {
for (int ii = i; ii <= iubd; ii++) {
this.link(ii, jj);
}
}
}
/**
* Fully links cell range with another cell range (weighted).
* <br></br>
* @param i Source cells.
* @param numi Number of source cells.
* @param j Destination cells.
* @param numj Number of destination cells.
*/
public void weightedLink(final int i, final int numi, final int j, final int numj) {
//
final int jubd = (j + numj) - 1;
final int iubd = (i + numi) - 1;
//
for (int jj = j; jj <= jubd; jj++) {
for (int ii = i; ii <= iubd; ii++) {
this.weightedLink(ii, jj);
}
}
}
/**
* Symmetrically links cell range with another cell range (non weighted).
* <br></br>
* @param i Source cells.
* @param j Destination cells.
* @param num Number of cells.
*/
public void link(final int i, final int j, final int num) {
//
int ii = i;
int jj = j;
//
for (int k = 0; k < num; k++) {
this.link(ii, jj);
ii++;
jj++;
}
}
/**
* Symmetrically links cell range with another cell range (weighted).
* <br></br>
* @param i Source cells.
* @param j Destination cells.
* @param num Number of cells.
*/
public void weightedLink(final int i, final int j, final int num) {
//
int ii = i;
int jj = j;
//
for (int k = 0; k < num; k++) {
this.weightedLink(ii, jj);
ii++;
jj++;
}
}
/**
* Fully links output connector cells of the first layer with the
* input connector cells of the second layer (non weighted).
* <br></br>
* @param l1 First layer index.
* @param l2 Second layer index.
*/
public void linkLayer(final int l1, final int l2) {
//
final Layer layer1 = this.layers.get(l1);
final Layer layer2 = this.layers.get(l2);
//
for (int i = layer1.arrayslbd; i <= layer1.arraysubd; i++) {
final CellArray a1 = this.arrays.get(i);
if ((a1.ilctag & CellArray.ILC_OUT) != CellArray.ILC_OUT) continue;
//
for (int j = layer2.arrayslbd; j <= layer2.arraysubd; j++) {
final CellArray a2 = this.arrays.get(j);
if ((a2.ilctag & CellArray.ILC_IN) != CellArray.ILC_IN) continue;
//
this.link(a1.cellslbd,a1.cellsnum, a2.cellslbd, a2.cellsnum);
}
}
}
/**
* Fully links output connector cells of the first layer with the
* input connector cells of the second layer (weighted).
* <br></br>
* @param l1 First layer index.
* @param l2 Second layer index.
*/
public void weightedLinkLayer(final int l1, final int l2) {
//
final Layer layer1 = this.layers.get(l1);
final Layer layer2 = this.layers.get(l2);
//
for (int i = layer1.arrayslbd; i <= layer1.arraysubd; i++) {
final CellArray a1 = this.arrays.get(i);
if ((a1.ilctag & CellArray.ILC_OUT) != CellArray.ILC_OUT) continue;
//
for (int j = layer2.arrayslbd; j <= layer2.arraysubd; j++) {
final CellArray a2 = this.arrays.get(j);
if ((a2.ilctag & CellArray.ILC_IN) != CellArray.ILC_IN) continue;
//
this.weightedLink(a1.cellslbd,a1.cellsnum, a2.cellslbd, a2.cellsnum);
}
}
}
//-------------------------------------------------------------------------
// LAYER METHODS
//-------------------------------------------------------------------------
/**
* Marks all following cells in this layer as input connectors.
*/
public void inputConnectors() {
this.ilctag = CellArray.ILC_IN;
}
/**
* Marks all following cells in this layer as non connectors.
*/
public void nonConnectors() {
this.ilctag = CellArray.ILC_NONE;
}
/**
* Marks all following cells in this layer as output connectors.
*/
public void outputConnectors() {
this.ilctag = CellArray.ILC_OUT;
}
/**
* Marks all following cells in this layer as input connectors as well
* as output connectors.
*/
public void inputOutputConnectors() {
this.ilctag = CellArray.ILC_BOTH;
}
/**
* Increment computation index. This models a time dependency
* in the computation process.
*/
public void shiftComputationIndex() {
if (this.inlayer && (this.compctr > 0)) {
this.compidx++;
this.compctr = 0;
}
}
/**
* Start a new layer. Returns the index of the new layer.
* <br></br>
* @return Layer index.
*/
public int beginLayer() {
if (this.inlayer) return this.layeroffset;
this.layer = new Layer();
this.layers.add(this.layer);
//
this.layer.arrayslbd = this.arrays.size();
this.layer.cellslbd = this.cellsoffset;
this.layer.tag = LayerTag.REGULAR;
this.ilctag = CellArray.ILC_BOTH;
//
this.inlayer = true;
return this.layeroffset;
}
/**
* Closes the current layer. Returns the index the closed layer.
* <br></br>
* @return Layer index.
*/
public int endLayer() {
if (!this.inlayer) return this.layeroffset;
//
this.layer.arraysnum = this.arrays.size() - this.layer.arrayslbd;
this.layer.arraysubd = (this.layer.arrayslbd + this.layer.arraysnum) - 1;
this.layer.cellsnum = this.cellsoffset - this.layer.cellslbd;
this.layer.cellsubd = (this.layer.cellslbd + this.layer.cellsnum) - 1;
this.layer.compwidth = (this.compidx + 1);
//
this.layer = null;
//
this.inlayer = false;
this.compidx = 0;
this.compctr = 0;
return this.layeroffset++;
}
/**
* Get the currently defined input layer index.
* <br></br>
* @return Layer index.
*/
public int getInputLayer() {
return this.inputlayer;
}
/**
* Get the currently defined output layer index.
* <br></br>
* @return Layer index.
*/
public int getOutputLayer() {
return this.outputlayer;
}
/**
* Defines the given layer as regular computing (default).
* <br></br>
* @param idx Layer index.
*/
public void defineLayerAsRegular(int idx) {
this.layers.get(idx).tag = LayerTag.REGULAR;
}
/**
* Defines the given layer as reversed computing. The resulting
* is then bidirectional.
* <br></br>
* @param idx Layer index.
*/
public void defineLayerAsReversed(int idx) {
this.layers.get(idx).tag = LayerTag.REVERSED;
}
/**
* Defines the given layer as input layer.
* <br></br>
* @param idx Layer index.
*/
public void defineInputLayer(int idx) {
this.inputlayer = idx;
}
/**
* Defines the given layer as output layer.
* <br></br>
* @param idx Layer index.
*/
public void defineOutputLayer(int idx) {
this.outputlayer = idx;
}
/**
* Get the first cell index of the cells contained the
* given layer.
* <br></br>
* @param layer Layer index.
* @return Cell index.
*/
public int getLayerCells(final int layer) {
return this.layers.get(layer).cellslbd;
}
}
| 35,371 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
MLPGenerator.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/MLPGenerator.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.generator.exception.GeneratingFailed;
import de.jannlab.generator.exception.NoInputLayerDefined;
import de.jannlab.generator.exception.NoModificationAllowed;
import de.jannlab.generator.exception.OnlyPerceptronsAllowed;
/**
* This class provides macro methods to easy setup MLPs.
* The class uses the NetCoreGenerator internal.
* <br></br>
* @see NetCoreGenerator
* @author Sebastian Otte
*/
public class MLPGenerator implements NetGenerator {
/**
* The core generator.
*/
private NetCoreGenerator gen = new NetCoreGenerator();
private boolean sealed = false;
private int lastlayer = -1;
/**
* Returns the internal core generator.
* @return Instance of CoreGenerator.
*/
public NetCoreGenerator getCoreGenerator() {
return this.gen;
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
this.gen.clear();
this.sealed = false;
this.lastlayer = -1;
}
/**
* Creates an instance of MLPGenerator.
*/
public MLPGenerator() {
this.clear();
}
/**
* Adds a input layer to the current model.
* @param num The number of input neurons.
* @return Layer index.
*/
public int inputLayer(final int num) {
final int layer = inputLayer(this.gen, num);
this.updateLastLayer(layer);
return layer;
}
/**
* Links last with current layer.
* <br></br>
* @param layer Current layer index.
*/
private void linkWithLastLayer(final int layer) {
this.gen.weightedLinkLayer(this.lastlayer, layer);
}
/**
* Check if hidden layer is well defined.
*/
private void hiddenLayerCheck() {
if (this.lastlayer < 0) {
throw new NoInputLayerDefined();
}
if (this.sealed) {
throw new NoModificationAllowed();
}
}
/**
* Updates the last layer index to the current layer index.
* @param layer Layer index.
*/
private void updateLastLayer(final int layer) {
this.lastlayer = layer;
}
/**
* Checks if output layer if well defined.
*/
private void outputLayerCheck() {
this.hiddenLayerCheck();
}
/**
* Adds a new hidden layer (with sigmoid cells) to the current model.
* @param num Number of hidden neurons.
* @return Layer index.
*/
public int hiddenLayer(final int num) {
return this.hiddenLayer(num, CellType.SIGMOID);
}
/**
* Adds a new hidden layer to the current model of given cell type.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public int hiddenLayer(final int num, final CellType type) {
return this.hiddenLayer(num, type, false, 0.0);
}
/**
* Adds a new hidden layer to the current model of given cell type with bias support.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @param usebias True if bias sould be used.
* @param bias Value of the bias.
* @return Layer index.
*/
public int hiddenLayer(
final int num, final CellType type,
final boolean usebias, final double bias
) {
this.hiddenLayerCheck();
final int layer = hiddenLayer(
this.gen, num, type, usebias, bias
);
this.linkWithLastLayer(layer);
this.updateLastLayer(layer);
return layer;
}
/**
* Adds a output layer (with sigmoid cells) to the current model.
* @param num Number of hidden neurons.
* @return Layer index.
*/
public int outputLayer(final int num) {
return this.outputLayer(num, CellType.SIGMOID);
}
/**
* Adds a new hidden layer to the current model of given cell type.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public int outputLayer(final int num, final CellType type) {
return this.outputLayer(num, type, false, 0.0);
}
/**
* Adds a output layer to the current model of given cell type with bias support.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @param usebias True if bias sould be used.
* @param bias Value of the bias.
* @return Layer index.
*/
public int outputLayer(
final int num, final CellType type,
final boolean usebias, final double bias
) {
this.outputLayerCheck();
final int layer = outputLayer(
this.gen, num, type, usebias, bias
);
this.linkWithLastLayer(layer);
this.updateLastLayer(layer);
this.sealed = true;
return layer;
}
/**
* {@inheritDoc}
*/
@Override
public Net generate() {
//
Net result = this.gen.generate();
if (result.isRecurrent()) {
throw new GeneratingFailed();
}
//
return result;
}
//-------------------------------------------------------------------------
// STATIC MACRO METHODS.
//-------------------------------------------------------------------------
/**
* Checks the perceptron-constraint for a given cell type.
* @param type Instance of CellType.
*/
private static void perceptronCheck(final CellType type) {
if (!type.perceptron) {
throw new OnlyPerceptronsAllowed();
}
}
/**
* Macro for building a bias neuron.
* @param gen Instance of NetCoreGenerator.
* @param value Value of the bias neuron.
* @return Cell index.
*/
public static int bias(final NetCoreGenerator gen, final double value) {
final int cell = gen.valueCell();
gen.assign(cell, value);
return cell;
}
/**
* Creates an input layer with the given NetCoreGenerator.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @return Layer index.
*/
public static int inputLayer(final NetCoreGenerator gen, final int num) {
final int layer = gen.beginLayer();
gen.valueCells(num);
gen.endLayer();
gen.defineInputLayer(layer);
return layer;
}
/**
* Creates an non input layer with the given NetCoreGenerator with bias support.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @param usebias Enable a bias neuron for this layer.
* @param bias Gives the value for the bias neuron.
* @return Layer index.
*/
private static int nonInputLayer(
final NetCoreGenerator gen,
final int num,
final CellType type,
final boolean usebias,
final double bias
) {
perceptronCheck(type);
final int layer = gen.beginLayer();
gen.cells(num, type);
gen.endLayer();
//
if (usebias) {
//
// set bias.
//
final int bcell = bias(gen, bias);
//
// connect to layer.
//
gen.weightedLink(bcell, 1, gen.getLayerCells(layer), num);
}
return layer;
}
/**
* Creates a hidden layer with the given NetCoreGenerator.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public static int hiddenLayer(
final NetCoreGenerator gen,
final int num,
final CellType type
) {
return nonInputLayer(gen, num, type, false, 0.0);
}
/**
* Creates a hidden layer with the given NetCoreGenerator with bias support.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @param usebias Enable a bias neuron for this layer.
* @param bias Gives the value for the bias neuron.
* @return Layer index.
*/
public static int hiddenLayer(
final NetCoreGenerator gen,
final int num,
final CellType type,
final boolean usebias,
final double bias
) {
return nonInputLayer(gen, num, type, usebias, bias);
}
/**
* Creates an output layer with the given NetCoreGenerator.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public static int outputLayer(
final NetCoreGenerator gen,
final int num,
final CellType type
) {
final int layer = nonInputLayer(gen, num, type, false, 0.0);
gen.defineOutputLayer(layer);
return layer;
}
/**
* Creates a output layer with the given NetCoreGenerator with bias support.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @param usebias Enable a bias neuron for this layer.
* @param bias Gives the value for the bias neuron.
* @return Layer index.
*/
public static int outputLayer(
final NetCoreGenerator gen,
final int num,
final CellType type,
final boolean usebias,
final double bias
) {
final int layer = nonInputLayer(gen, num, type, usebias, bias);
gen.defineOutputLayer(layer);
return layer;
}
}
| 10,743 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
NetGenerator.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/NetGenerator.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator;
import de.jannlab.Net;
/**
* This interface defines what a NetGenerator must be albe to. On one hand
* this is to clear the inner model, and on other hand this is a method
* for generation of networks.
<br></br>
* @author Sebastian Otte
*
*/
public interface NetGenerator {
/**
* Generates an Instance of Net from the specific inner model.
*/
public Net generate();
/**
* Resets the inner model.
*/
public void clear();
}
| 1,399 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
RNNGenerator.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/RNNGenerator.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator;
import de.jannlab.Net;
import de.jannlab.core.CellType;
import de.jannlab.generator.exception.NoInputLayerDefined;
import de.jannlab.generator.exception.NoModificationAllowed;
/**
* This class provides macro methods to easy setup RNNs.
* The class uses the NetCoreGenerator internal.
* <br></br>
* @see NetCoreGenerator
* @author Sebastian Otte
*/
public class RNNGenerator implements NetGenerator {
/**
* The core generator.
*/
private NetCoreGenerator gen = new NetCoreGenerator();
//
private boolean sealed = false;
private int lastlayer = -1;
/**
* Returns the internal core generator.
* @return Instance of CoreGenerator.
*/
public NetCoreGenerator getCoreGenerator() {
return this.gen;
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
this.gen.clear();
this.sealed = false;
this.lastlayer = -1;
}
/**
* Creates an instance of MLPGenerator.
*/
public RNNGenerator() {
this.clear();
}
/**
* Adds a input layer to the current model.
* @param num The number of input neurons.
* @return Layer index.
*/
public int inputLayer(final int num) {
final int layer = MLPGenerator.inputLayer(this.gen, num);
this.updateLastLayer(layer);
return layer;
}
/**
* Links last with current layer.
* <br></br>
* @param layer Current layer index.
*/
private void linkWithLastLayer(final int layer) {
this.gen.weightedLinkLayer(this.lastlayer, layer);
}
/**
* Links current with current layer (recurrent).
* <br></br>
* @param layer Current layer index.
*/
private void linkWithCurrentLayer(final int layer) {
this.gen.weightedLinkLayer(layer, layer);
}
/**
* Check if hidden layer is well defined.
*/
private void hiddenLayerCheck() {
if (this.lastlayer < 0) {
throw new NoInputLayerDefined();
}
if (this.sealed) {
throw new NoModificationAllowed();
}
}
/**
* Updates the last layer index to the current layer index.
* @param layer Layer index.
*/
private void updateLastLayer(final int layer) {
this.lastlayer = layer;
}
/**
* Checks if output layer if well defined.
*/
private void outputLayerCheck() {
this.hiddenLayerCheck();
}
/**
* Adds a new hidden layer (with sigmoid cells) to the current model.
* @param num Number of hidden neurons.
* @return Layer index.
*/
public int hiddenLayer(final int num) {
return this.hiddenLayer(num, CellType.SIGMOID);
}
/**
* Adds a new recurrent hidden layer to the current model of given cell type.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public int hiddenLayer(final int num, final CellType type) {
return this.hiddenLayer(num, type, false, 0.0);
}
/**
* Adds a new recurrent hidden layer to the current model of given cell type with bias support.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @param usebias True if bias sould be used.
* @param bias Value of the bias.
* @return Layer index.
*/
public int hiddenLayer(
final int num, final CellType type,
final boolean usebias, final double bias
) {
this.hiddenLayerCheck();
final int layer = MLPGenerator.hiddenLayer(
this.gen, num, type, usebias, bias
);
//
// create feed forward links.
//
this.linkWithLastLayer(layer);
//
// create recurrent links.
//
this.linkWithCurrentLayer(layer);
this.updateLastLayer(layer);
return layer;
}
/**
* Adds a output layer (with sigmoid cells) to the current model.
* @param num Number of hidden neurons.
* @return Layer index.
*/
public int outputLayer(final int num) {
return this.outputLayer(num, CellType.SIGMOID);
}
/**
* Adds a new hidden layer to the current model of given cell type.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public int outputLayer(final int num, final CellType type) {
return this.outputLayer(num, type, false, 0.0);
}
/**
* Adds a output layer to the current model of given cell type with bias support.
* @param num Number if hidden neurons.
* @param type Instance of CellType.
* @param usebias True if bias sould be used.
* @param bias Value of the bias.
* @return Layer index.
*/
public int outputLayer(
final int num, final CellType type,
final boolean usebias, final double bias
) {
this.outputLayerCheck();
final int layer = MLPGenerator.outputLayer(
this.gen, num, type, usebias, bias
);
this.linkWithLastLayer(layer);
this.updateLastLayer(layer);
this.sealed = true;
return layer;
}
/**
* {@inheritDoc}
*/
@Override
public Net generate() {
//
Net result = this.gen.generate();
return result;
}
//-------------------------------------------------------------------------
// STATIC MACRO METHODS.
//-------------------------------------------------------------------------
/**
* Macro for building a bias neuron.
* @param gen Instance of NetCoreGenerator.
* @param value Value of the bias neuron.
* @return Cell index.
*/
public static int bias(final NetCoreGenerator gen, final double value) {
return MLPGenerator.bias(gen, value);
}
/**
* Creates an input layer with the given NetCoreGenerator.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @return Layer index.
*/
public static int inputLayer(final NetCoreGenerator gen, final int num) {
return MLPGenerator.inputLayer(gen, num);
}
/**
* Creates a hidden layer with the given NetCoreGenerator.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public static int hiddenLayer(
final NetCoreGenerator gen,
final int num,
final CellType type
) {
return MLPGenerator.hiddenLayer(gen, num, type);
}
/**
* Creates a hidden layer with the given NetCoreGenerator with bias support.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @param usebias Enable a bias neuron for this layer.
* @param bias Gives the value for the bias neuron.
* @return Layer index.
*/
public static int hiddenLayer(
final NetCoreGenerator gen,
final int num,
final CellType type,
final boolean usebias,
final double bias
) {
return MLPGenerator.hiddenLayer(gen, num, type, usebias, bias);
}
/**
* Creates an output layer with the given NetCoreGenerator.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @return Layer index.
*/
public static int outputLayer(
final NetCoreGenerator gen,
final int num,
final CellType type
) {
return MLPGenerator.outputLayer(gen, num, type);
}
/**
* Creates a output layer with the given NetCoreGenerator with bias support.
* @param gen Instance of NetCoreGenerator.
* @param num Number of input neurons.
* @param type Instance of CellType.
* @param usebias Enable a bias neuron for this layer.
* @param bias Gives the value for the bias neuron.
* @return Layer index.
*/
public static int outputLayer(
final NetCoreGenerator gen,
final int num,
final CellType type,
final boolean usebias,
final double bias
) {
return MLPGenerator.outputLayer(gen, num, type, usebias, bias);
}
}
| 9,405 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
NoModificationAllowed.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/exception/NoModificationAllowed.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator.exception;
/**
* No more modification allowed during modeling.
* <br></br>
* @author Sebastian Otte
*/
public class NoModificationAllowed extends NetGeneratorException {
private static final long serialVersionUID = -678831812096197825L;
public NoModificationAllowed() { super("no modification allowed."); }
}
| 1,252 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
NetGeneratorException.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/exception/NetGeneratorException.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator.exception;
import de.jannlab.exception.JANNLabException;
/**
* This is a special Exception thrown by NetGenerators.
* <br></br>
* @author Sebastian Otte
*
*/
public class NetGeneratorException extends JANNLabException {
private static final long serialVersionUID = -2617133140994250533L;
public NetGeneratorException(
final String msg
) {
super(msg);
}
public NetGeneratorException() {};
}
| 1,368 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
LinkArrayCorrupt.java | /FileExtraction/Java_unseen/JANNLab_JANNLab/src/main/java/de/jannlab/generator/exception/LinkArrayCorrupt.java | /*******************************************************************************
* JANNLab Neural Network Framework for Java
* Copyright (C) 2012-2013 Sebastian Otte
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package de.jannlab.generator.exception;
/**
* This exception will be thrown when one of the link
* array contains invalid values.
* <br></br>
* @author Sebastian Otte
*/
public class LinkArrayCorrupt extends NetGeneratorException {
private static final long serialVersionUID = -678831812096197825L;
public LinkArrayCorrupt() { super("link array corrupt."); }
}
| 1,276 | Java | .java | JANNLab/JANNLab | 31 | 14 | 3 | 2013-06-26T21:04:08Z | 2015-08-01T21:01:44Z |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.