conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
import java.io.IOException;
import java.security.Key;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.EnumSet;
import java.util.concurrent.Future;
import java.util.Arrays;
import javax.crypto.spec.SecretKeySpec;
import javax.security.auth.Subject;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hierynomus.mserref.NtStatus;
import com.hierynomus.mssmb2.SMB2Header;
import com.hierynomus.mssmb2.messages.SMB2SessionSetup;
import com.hierynomus.protocol.commons.ByteArrayUtils;
import com.hierynomus.protocol.commons.concurrent.Futures;
import com.hierynomus.smbj.common.MessageSigning;
import com.hierynomus.smbj.connection.Connection;
import com.hierynomus.smbj.session.Session;
import com.hierynomus.smbj.transport.TransportException;
import com.hierynomus.spnego.SpnegoException;
import com.sun.security.jgss.ExtendedGSSContext;
import com.sun.security.jgss.InquireType;
=======
import java.io.IOException;
import com.hierynomus.smbj.session.Session;
>>>>>>>
import java.io.IOException;
import java.security.Key;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import javax.security.auth.Subject;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hierynomus.mssmb2.SMB2Header;
import com.hierynomus.protocol.commons.ByteArrayUtils;
import com.hierynomus.smbj.connection.Connection;
import com.hierynomus.smbj.session.Session;
import com.hierynomus.smbj.transport.TransportException;
import com.sun.security.jgss.ExtendedGSSContext;
import com.sun.security.jgss.InquireType;
<<<<<<<
private static final Logger logger = LoggerFactory.getLogger(SpnegoAuthenticator.class);
private byte[] sessionKey;
=======
>>>>>>>
private static final Logger logger = LoggerFactory.getLogger(SpnegoAuthenticator.class); |
<<<<<<<
menuBar.getMenus().addAll(menuFile, menuGeneration, menuHelp);
=======
menuBar.getMenus().addAll(menuFile, menuCapture, menuHelp);
>>>>>>>
menuBar.getMenus().addAll(menuFile, menuCapture, menuGeneration, menuHelp); |
<<<<<<<
public Media media;
=======
public Keyboard keyboard;
>>>>>>>
public Keyboard keyboard;
<<<<<<<
media = new Media();
=======
keyboard = new Keyboard(webEngineRuntime);
>>>>>>>
<<<<<<<
media.load();
=======
keyboard.load();
>>>>>>>
media.load();
keyboard.load();
<<<<<<<
media.unload();
=======
keyboard.unload();
>>>>>>>
media.unload();
keyboard.unload(); |
<<<<<<<
import org.fossasia.pslab.experimentsetup.AstableMultivibratorExperiment;
=======
import org.fossasia.pslab.experimentsetup.ACGeneratorExperiment;
>>>>>>>
import org.fossasia.pslab.experimentsetup.AstableMultivibratorExperiment;
import org.fossasia.pslab.experimentsetup.ACGeneratorExperiment;
<<<<<<<
if (experimentTitle.equals(context.getString(R.string.astable_multivibrator)))
return ExperimentDocFragment.newInstance("J_Astable.md");
=======
if (experimentTitle.equals(context.getString(R.string.light_dependent_resistor)))
return ExperimentDocFragment.newInstance("K_LDR.md");
>>>>>>>
if (experimentTitle.equals(context.getString(R.string.light_dependent_resistor)))
return ExperimentDocFragment.newInstance("K_LDR.md");
if (experimentTitle.equals(context.getString(R.string.astable_multivibrator)))
return ExperimentDocFragment.newInstance("J_Astable.md");
<<<<<<<
if (experimentTitle.equals(context.getString(R.string.astable_multivibrator)))
return AstableMultivibratorExperiment.newInstance();
=======
if (experimentTitle.equals(context.getString(R.string.light_dependent_resistor)))
return LightDependentResistorExperiment.newInstance();
>>>>>>>
if (experimentTitle.equals(context.getString(R.string.light_dependent_resistor)))
return LightDependentResistorExperiment.newInstance();
if (experimentTitle.equals(context.getString(R.string.astable_multivibrator)))
return AstableMultivibratorExperiment.newInstance(); |
<<<<<<<
DiodeClippingExperiment.OnFragmentInteractionListener,
AstableMultivibratorFragment.OnFragmentInteractionListener,
=======
DiodeClippingClampingExperiment.OnFragmentInteractionListener,
>>>>>>>
DiodeClippingClampingExperiment.OnFragmentInteractionListener,
AstableMultivibratorFragment.OnFragmentInteractionListener,
<<<<<<<
public boolean isDiodeClippingExperiment;
public boolean isAstableMultivibratorExperiment;
=======
public boolean isDiodeClippingClampingExperiment;
>>>>>>>
public boolean isDiodeClippingClampingExperiment;
public boolean isAstableMultivibratorExperiment;
<<<<<<<
Fragment diodeClippingFragment;
Fragment astableMultivibratorFragment;
=======
Fragment diodeClippingClampingFragment;
>>>>>>>
Fragment astableMultivibratorFragment;
Fragment diodeClippingClampingFragment;
<<<<<<<
diodeClippingFragment = new DiodeClippingExperiment();
astableMultivibratorFragment = new AstableMultivibratorFragment();
=======
diodeClippingClampingFragment = new DiodeClippingClampingExperiment();
>>>>>>>
diodeClippingClampingFragment = new DiodeClippingClampingExperiment();
astableMultivibratorFragment = new AstableMultivibratorFragment();
<<<<<<<
} else if (isDiodeClippingExperiment) {
addFragment(R.id.layout_dock_os2, diodeClippingFragment, "DiodeClippingFragment");
} else if (isAstableMultivibratorExperiment) {
addFragment(R.id.layout_dock_os2, astableMultivibratorFragment, "DiodeClippingFragment");
=======
} else if (isDiodeClippingClampingExperiment) {
addFragment(R.id.layout_dock_os2, diodeClippingClampingFragment, "DiodeClippingClampingFragment");
>>>>>>>
} else if (isAstableMultivibratorExperiment) {
addFragment(R.id.layout_dock_os2, astableMultivibratorFragment, "DiodeClippingFragment");
} else if (isDiodeClippingClampingExperiment) {
addFragment(R.id.layout_dock_os2, diodeClippingClampingFragment, "DiodeClippingClampingFragment");
<<<<<<<
if (isHalfWaveRectifierExperiment || isDiodeClippingExperiment || isAstableMultivibratorExperiment) {
=======
if (isHalfWaveRectifierExperiment || isDiodeClippingClampingExperiment) {
>>>>>>>
if (isHalfWaveRectifierExperiment || isDiodeClippingClampingExperiment || isAstableMultivibratorExperiment) {
<<<<<<<
if (isHalfWaveRectifierExperiment || isFullWaveRectifierExperiment || isDiodeClippingExperiment) {
=======
if (isHalfWaveRectifierExperiment || isFullWaveRectifierExperiment || isDiodeClippingClampingExperiment) {
>>>>>>>
if (isHalfWaveRectifierExperiment || isFullWaveRectifierExperiment || isDiodeClippingClampingExperiment) { |
<<<<<<<
import me.xiaoapn.easy.imageloader.execute.DisplayTask;
=======
import me.xiaoapn.easy.imageloader.execute.DisplayBitmapTask;
>>>>>>>
import me.xiaoapn.easy.imageloader.execute.DisplayBitmapTask;
<<<<<<<
* @param imageUrl 图片下载地址
* @param cacheFile 缓存文件
* @param imageView 显示图片的视图
* @param options 加载选项
*/
public void display(String imageUrl, File cacheFile, ImageView imageView, Options options){
if(options == null){
options = getConfiguration().getDefaultOptions();
}
if(GeneralUtils.isEmpty(imageUrl) && cacheFile == null){
exceptionHandle(imageView, options);
if(getConfiguration().isDebugMode()){
Log.e(getConfiguration().getLogTag(), "imageUrl和cacheFile不能同时为null");
}
return;
}
if(imageView == null){
exceptionHandle(imageView, options);
if(getConfiguration().isDebugMode()){
Log.e(getConfiguration().getLogTag(), "imageView不能为null");
}
return;
}
UrlRequest urlRequest = new UrlRequest(GeneralUtils.encodeUrl(imageUrl), imageUrl, imageUrl, cacheFile, imageView, options);
if(!tryShow(urlRequest)){
load(urlRequest);
}
}
/**
* 显示图片
* @param imageUrl 图片下载地址
* @param cacheFile 缓存文件
* @param imageView 显示图片的视图
*/
public void display(String imageUrl, File cacheFile, ImageView imageView){
display(imageUrl, cacheFile, imageView, null);
}
/**
* 显示图片
=======
>>>>>>> |
<<<<<<<
@RequestParam(required = false) Long endTime) throws ThingsboardException {
=======
@RequestParam(required = false) Long endTime,
@RequestParam(required = false, defaultValue = "false") boolean ascOrder,
@RequestParam(required = false) String offset,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
>>>>>>>
@RequestParam(required = false) Long endTime,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
<<<<<<<
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime);
return checkNotNull(auditLogService.findAuditLogsByTenantIdAndCustomerId(tenantId, new CustomerId(UUID.fromString(strCustomerId)), pageLink));
=======
TimePageLink pageLink = createPageLink(limit, startTime, endTime, ascOrder, offset);
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
return checkNotNull(auditLogService.findAuditLogsByTenantIdAndCustomerId(tenantId, new CustomerId(UUID.fromString(strCustomerId)), actionTypes, pageLink));
>>>>>>>
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime);
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
return checkNotNull(auditLogService.findAuditLogsByTenantIdAndCustomerId(tenantId, new CustomerId(UUID.fromString(strCustomerId)), actionTypes, pageLink));
<<<<<<<
@RequestParam(required = false) Long endTime) throws ThingsboardException {
=======
@RequestParam(required = false) Long endTime,
@RequestParam(required = false, defaultValue = "false") boolean ascOrder,
@RequestParam(required = false) String offset,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
>>>>>>>
@RequestParam(required = false) Long endTime,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
<<<<<<<
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime); return checkNotNull(auditLogService.findAuditLogsByTenantIdAndUserId(tenantId, new UserId(UUID.fromString(strUserId)), pageLink));
=======
TimePageLink pageLink = createPageLink(limit, startTime, endTime, ascOrder, offset);
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
return checkNotNull(auditLogService.findAuditLogsByTenantIdAndUserId(tenantId, new UserId(UUID.fromString(strUserId)), actionTypes, pageLink));
>>>>>>>
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime);
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
return checkNotNull(auditLogService.findAuditLogsByTenantIdAndUserId(tenantId, new UserId(UUID.fromString(strUserId)), actionTypes, pageLink));
<<<<<<<
@RequestParam(required = false) Long endTime) throws ThingsboardException {
=======
@RequestParam(required = false) Long endTime,
@RequestParam(required = false, defaultValue = "false") boolean ascOrder,
@RequestParam(required = false) String offset,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
>>>>>>>
@RequestParam(required = false) Long endTime,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
<<<<<<<
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime); return checkNotNull(auditLogService.findAuditLogsByTenantIdAndEntityId(tenantId, EntityIdFactory.getByTypeAndId(strEntityType, strEntityId), pageLink));
=======
TimePageLink pageLink = createPageLink(limit, startTime, endTime, ascOrder, offset);
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
return checkNotNull(auditLogService.findAuditLogsByTenantIdAndEntityId(tenantId, EntityIdFactory.getByTypeAndId(strEntityType, strEntityId), actionTypes, pageLink));
>>>>>>>
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime);
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
return checkNotNull(auditLogService.findAuditLogsByTenantIdAndEntityId(tenantId, EntityIdFactory.getByTypeAndId(strEntityType, strEntityId), actionTypes, pageLink));
<<<<<<<
@RequestParam(required = false) Long endTime) throws ThingsboardException {
=======
@RequestParam(required = false) Long endTime,
@RequestParam(required = false, defaultValue = "false") boolean ascOrder,
@RequestParam(required = false) String offset,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
>>>>>>>
@RequestParam(required = false) Long endTime,
@RequestParam(name = "actionTypes", required = false) String actionTypesStr) throws ThingsboardException {
<<<<<<<
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime);
return checkNotNull(auditLogService.findAuditLogsByTenantId(tenantId, pageLink));
=======
TimePageLink pageLink = createPageLink(limit, startTime, endTime, ascOrder, offset);
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
return checkNotNull(auditLogService.findAuditLogsByTenantId(tenantId, actionTypes, pageLink));
>>>>>>>
List<ActionType> actionTypes = parseActionTypesStr(actionTypesStr);
TimePageLink pageLink = createTimePageLink(pageSize, page, textSearch, sortProperty, sortOrder, startTime, endTime);
return checkNotNull(auditLogService.findAuditLogsByTenantId(tenantId, actionTypes, pageLink)); |
<<<<<<<
projectExistsForOwner(newProjectName, user)) {
=======
projectExists(newProjectName)) {
>>>>>>>
projectExists(newProjectName)) {
<<<<<<<
String resultsPath = projectPath + File.separator
+ Constants.DIR_RESULTS;
String cuneiformPath = projectPath + File.separator
+ Constants.DIR_CUNEIFORM;
String samplesPath = projectPath + File.separator
+ Constants.DIR_SAMPLES;
String dataSetPath = projectPath + File.separator
+ Constants.DIR_DATASET;
fileOps.mkDir(projectPath, -1);
fileOps.mkDir(resultsPath, -1);
fileOps.mkDir(cuneiformPath, -1);
fileOps.mkDir(samplesPath, -1);
fileOps.mkDir(dataSetPath, -1);
=======
fileOps.mkDir(projectPath);
>>>>>>>
fileOps.mkDir(projectPath, -1); |
<<<<<<<
=======
public String getOwnerRole() {
return owner;
}
public void setOwnerRole(String owner) {
this.owner = owner;
}
public String getNewTeamRole() {
return newTeamRole;
}
public void setNewTeamRole(String newTeamRole) {
this.newTeamRole = newTeamRole;
}
public String getChangedRole() {
return newChangedRole;
}
public void setChangedRole(String newChangedRole) {
this.newChangedRole = newChangedRole;
}
public String getNewRole() {
return newRole;
}
public void setNewRole(String newRole) {
this.newRole = newRole;
}
>>>>>>>
<<<<<<<
=======
>>>>>>>
<<<<<<<
=======
public DatasetStudy getDatasetStudy() {
if (dsStudy == null) {
dsStudy = new DatasetStudy();
}
return dsStudy;
}
public void setDatasetStudy(DatasetStudy dsStudy) {
this.dsStudy = dsStudy;
}
public Dataset getDataset() {
if (dataset == null) {
dataset = new Dataset();
}
return dataset;
}
public void setDataset(Dataset dataset) {
this.dataset = dataset;
}
>>>>>>>
<<<<<<<
=======
public String getStudyTeamRole(String email) {
this.setTeamRole = studyTeamController.findByPrimaryKey(studyName, email).getTeamRole();
return setTeamRole;
}
public void setStudyTeamRole(String email, String role) {
this.setTeamRole = role;
}
>>>>>>>
<<<<<<<
=======
public List<StudyRoleTypes> getTeamForResearchList() {
List<StudyRoleTypes> reOrder = new ArrayList<>();
reOrder.add(StudyRoleTypes.RESEARCHER);
reOrder.add(StudyRoleTypes.MASTER);
reOrder.add(StudyRoleTypes.AUDITOR);
return reOrder;
}
public List<StudyRoleTypes> getTeamForGuestList() {
List<StudyRoleTypes> reOrder = new ArrayList<>();
reOrder.add(StudyRoleTypes.AUDITOR);
reOrder.add(StudyRoleTypes.MASTER);
reOrder.add(StudyRoleTypes.RESEARCHER);
return reOrder;
}
>>>>>>>
<<<<<<<
=======
>>>>>>>
<<<<<<<
//Set the study owner as study master in StudyTeam table
=======
>>>>>>>
//Set the study owner as study master in StudyTeam table
<<<<<<<
boolean rec = sampleFilesController.checkForExistingSampleFiles(getSampleID(), fileName);
=======
boolean rec = sampleFilesController.checkForExistingSampleFiles(getSampleID(), fileName);
>>>>>>>
boolean rec = sampleFilesController.checkForExistingSampleFiles(getSampleID(), fileName);
<<<<<<<
public void mkDIRS(String sampleID, String fileType, String fileName) throws IOException {
=======
public void mkDIRS(String fileType, String fileName, String sampleID) throws IOException, URISyntaxException {
>>>>>>>
public void mkDIRS(String sampleID, String fileType, String fileName) throws IOException {
<<<<<<<
fs.mkdirs(path.suffix(File.separator + sampleID + File.separator + fileType.toUpperCase().trim()), null);
copyFromLocal(fileType, fileName, sampleID);
} catch (URISyntaxException uri) {
logger.log(Level.SEVERE, "Directories were not created in HDFS...{0}", uri.getMessage());
=======
fs.mkdirs(path.suffix(File.separator + sampleID + File.separator + fileType.toUpperCase().trim()), null);
copyFromLocal(fileType, fileName, sampleID);
} catch (IOException ioe) {
System.err.println("IOException during operation" + ioe.getMessage());
>>>>>>>
fs.mkdirs(path.suffix(File.separator + sampleID + File.separator + fileType.toUpperCase().trim()), null);
copyFromLocal(fileType, fileName, sampleID);
} catch (URISyntaxException uri) {
logger.log(Level.SEVERE, "Directories were not created in HDFS...{0}", uri.getMessage());
<<<<<<<
logger.log(Level.INFO, "Sample file status updated in TreeTable, ID: {0}, Name: {1}", new Object[]{id,filename});
=======
for (FileStructureListener l : fileListeners) {
l.updateStatus(id, filename.substring(filename.lastIndexOf('.') + 1), filename, "available");
}
>>>>>>>
for (FileStructureListener l : fileListeners) {
l.updateStatus(id, filename.substring(filename.lastIndexOf('.') + 1), filename, "available");
}
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId);
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - Sample was deleted from {1} in HDFS", new Object[]{sampleId, studyName});
} else {
logger.log(Level.SEVERE, "Sample id {0} does not exist", sampleId);
}
//remove the sample from SampleIds
deleteSamples(sampleId);
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId);
if (fs.exists(build)) {
fs.delete(build, true);
} else {
System.out.println("Sample ID does not exist");
}
*/
//remove the sample from SampleIds
deleteSamples(sampleId);
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId);
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - Sample was deleted from {1} in HDFS", new Object[]{sampleId, studyName});
} else {
logger.log(Level.SEVERE, "Sample id {0} does not exist", sampleId);
}
//remove the sample from SampleIds
deleteSamples(sampleId);
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim());
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - File type folder was deleted from {1} in HDFS", new Object[]{fileType.toUpperCase(), studyName});
} else {
logger.log(Level.SEVERE, "{0} - File type folder does not exist", fileType.toUpperCase());
}
//remove file type records
deleteFileTypes(sampleId, fileType);
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim());
if (fs.exists(build)) {
fs.delete(build, true);
} else {
System.out.println("File Type folder does not exist");
}
*/
//remove file type records
deleteFileTypes(sampleId, fileType);
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim());
if (fs.exists(build)) {
fs.delete(build, true);
logger.log(Level.INFO, "{0} - File type folder was deleted from {1} in HDFS", new Object[]{fileType.toUpperCase(), studyName});
} else {
logger.log(Level.SEVERE, "{0} - File type folder does not exist", fileType.toUpperCase());
}
//remove file type records
deleteFileTypes(sampleId, fileType);
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + filename);
//check the file count inside a directory, if it is only one then recursive delete
if (fs.exists(build)) {
//if(fs.getFileStatus(build))
fs.delete(build, false);
} else {
System.out.println("File does not exist");
}
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + filename);
if (fs.exists(build)) {
fs.delete(build, false);
} else {
System.out.println("File does not exist");
}
*/
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path build = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + filename);
//check the file count inside a directory, if it is only one then recursive delete
if (fs.exists(build)) {
//if(fs.getFileStatus(build))
fs.delete(build, false);
} else {
System.out.println("File does not exist");
}
<<<<<<<
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path outputPath = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + fileName.trim());
try {
if (!fs.exists(outputPath)) {
System.out.println("Error: File does not exist for downloading" + fileName);
return;
}
InputStream inStream = fs.open(outputPath, 1048576);
file = new DefaultStreamedContent(inStream, "fastq/fasta/bam/sam/vcf", fileName);
} finally {
//inStream.close();
}
=======
/*
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path outputPath = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + fileName.trim());
//System.out.println("download path "+outputPath.toString());
try {
if (!fs.exists(outputPath)) {
System.out.println("Error: File does not exist for downloading" + fileName);
return;
}
InputStream inStream = fs.open(outputPath, 1048576);
file = new DefaultStreamedContent(inStream, "fastq/fasta/bam/sam/vcf", fileName);
} finally {
//inStream.close();
}*/
System.out.println("Called download");
>>>>>>>
Configuration conf = new Configuration();
conf.set("fs.defaultFS", this.nameNodeURI);
FileSystem fs = FileSystem.get(conf);
String rootDir = "Projects";
String buildPath = File.separator + rootDir + File.separator + studyName;
Path outputPath = new Path(buildPath + File.separator + sampleId + File.separator + fileType.toUpperCase().trim() + File.separator + fileName.trim());
try {
if (!fs.exists(outputPath)) {
System.out.println("Error: File does not exist for downloading" + fileName);
return;
}
InputStream inStream = fs.open(outputPath, 1048576);
file = new DefaultStreamedContent(inStream, "fastq/fasta/bam/sam/vcf", fileName);
} finally {
//inStream.close();
}
<<<<<<<
=======
public String onComplete() {
return "indexPage";
}
public void save(ActionEvent actionEvent) {
createStudy();
}
public String onFlowProcess(FlowEvent event) {
logger.info(event.getOldStep());
logger.info(event.getNewStep());
return event.getNewStep();
}
public void showNewStudyDialog() {
RequestContext.getCurrentInstance().update("formNewStudy");
RequestContext.getCurrentInstance().reset("formNewStudy");
RequestContext.getCurrentInstance().execute("dlgNewStudy.show()");
}
public void showNewStudyMemberDialog() {
RequestContext.getCurrentInstance().update("formNewStudyMember");
RequestContext.getCurrentInstance().reset("formNewStudyMember");
RequestContext.getCurrentInstance().execute("dlgNewStudyMember.show()");
}
>>>>>>>
<<<<<<<
/*public void deleteDataDlg() {
Map<String, Object> options = new HashMap<>();
options.put("modal", true);
options.put("draggable", false);
options.put("resizable", false);
options.put("width", 320);
options.put("contentWidth", 300);
options.put("height", 100);
RequestContext.getCurrentInstance().openDialog("confirmDelete", options, null);
}
public void closeConfirmDelete() {
RequestContext.getCurrentInstance().closeDialog(null);
}
public void onDeleteDlgDone(SelectEvent event) {
FacesMessage mess = (FacesMessage) (event.getObject());
FacesContext.getCurrentInstance().addMessage("remove", mess);
}*/
=======
public void registerFileListener(FileStructureListener listener) {
if (!fileListeners.contains(listener)) {
fileListeners.add(listener);
}
}
>>>>>>>
public void registerFileListener(FileStructureListener listener) {
if (!fileListeners.contains(listener)) {
fileListeners.add(listener);
}
} |
<<<<<<<
=======
import java.util.ArrayList;
import java.util.Collection;
>>>>>>>
import java.util.ArrayList;
import java.util.Collection;
<<<<<<<
import org.apache.zeppelin.user.AuthenticationInfo;
=======
import org.apache.zeppelin.scheduler.JobListener;
import org.apache.zeppelin.search.SearchService;
>>>>>>>
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.scheduler.JobListener;
import org.apache.zeppelin.search.SearchService;
<<<<<<<
JobListenerFactory, AngularObjectRegistryListener,
RemoteInterpreterProcessListener {
=======
JobListenerFactory, AngularObjectRegistryListener, SearchService {
>>>>>>>
SearchService,
JobListenerFactory, AngularObjectRegistryListener, RemoteInterpreterProcessListener{
<<<<<<<
private void sendAllConfigurations(Session conn,
Notebook notebook) throws IOException {
ZeppelinConfiguration conf = notebook.getConf();
Map<String, String> configurations = conf.dumpConfigurations(conf,
new ZeppelinConfiguration.ConfigurationKeyPredicate() {
@Override
public boolean apply(String key) {
return !key.contains("password") && !key.equals(
ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_AZURE_CONNECTION_STRING.
getVarName());
}
});
conn.getBasicRemote().sendText(serializeMessage(new Message(
OP.CONFIGURATIONS_INFO)
.put("configurations", configurations)));
}
private void checkpointNotebook(Session conn, Notebook notebook,
Message fromMessage) throws IOException {
String noteId = (String) fromMessage.get("noteId");
String commitMessage = (String) fromMessage.get("commitMessage");
notebook.checkpointNote(noteId, commitMessage);
}
=======
@Override
public List<Map<String, String>> query(String string) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void updateIndexDoc(Note note) throws IOException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDocs(Collection<Note> clctn) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDoc(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDocs(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDoc(Note note, Paragraph prgrph) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void close() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
>>>>>>>
private void sendAllConfigurations(Session conn,
Notebook notebook) throws IOException {
ZeppelinConfiguration conf = notebook.getConf();
Map<String, String> configurations = conf.dumpConfigurations(conf,
new ZeppelinConfiguration.ConfigurationKeyPredicate() {
@Override
public boolean apply(String key) {
return !key.contains("password") && !key.equals(
ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_AZURE_CONNECTION_STRING.
getVarName());
}
});
conn.getBasicRemote().sendText(serializeMessage(new Message(
OP.CONFIGURATIONS_INFO)
.put("configurations", configurations)));
}
private void checkpointNotebook(Session conn, Notebook notebook,
Message fromMessage) throws IOException {
String noteId = (String) fromMessage.get("noteId");
String commitMessage = (String) fromMessage.get("commitMessage");
notebook.checkpointNote(noteId, commitMessage);
}
@Override
public List<Map<String, String>> query(String string) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void updateIndexDoc(Note note) throws IOException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDocs(Collection<Note> clctn) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void addIndexDoc(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDocs(Note note) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void deleteIndexDoc(Note note, Paragraph prgrph) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void close() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
} |
<<<<<<<
if (StringUtils.isEmpty(device.getType())) {
throw new DataValidationException("Device type should be specified!");
}
if (StringUtils.isEmpty(device.getName()) || device.getName().trim().length() == 0) {
=======
if (StringUtils.isEmpty(device.getName())) {
>>>>>>>
if (StringUtils.isEmpty(device.getName()) || device.getName().trim().length() == 0) { |
<<<<<<<
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
=======
import java.util.Collections;
import java.util.Comparator;
>>>>>>>
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.Comparator;
<<<<<<<
logger.log(Level.SEVERE,
ResponseMessages.PROJECT_SERVICE_NOT_FOUND, iex);
=======
LOGGER.log(Level.SEVERE,
ResponseMessages.PROJECT_SERVICE_NOT_FOUND, iex);
>>>>>>>
LOGGER.log(Level.SEVERE,
ResponseMessages.PROJECT_SERVICE_NOT_FOUND, iex);
<<<<<<<
} catch (Exception ex) {
logger.log(Level.SEVERE, null, ex);
=======
}catch (Exception ex) {
LOGGER.log(Level.SEVERE, null, ex);
>>>>>>>
} catch (Exception ex) {
LOGGER.log(Level.SEVERE, null, ex);
<<<<<<<
|| !noExistingGroup(project.getName())
|| !verifyQuota(project.getName()) || !verifyLogs(dfso, project.
getName())
|| !noExistingCertificates(project.getName())) {
logger.log(Level.WARNING,
"some elements of project {0} already exist in the system "
+ "Possible inconsistency!",
project.getName());
=======
|| !noExistingGroup(project.getName())
|| !verifyQuota(project.getName()) || !verifyLogs(dfso, project.getName())
|| !noExistingCertificates(project.getName())) {
LOGGER.log(Level.WARNING,
"some elements of project {0} already exist in the system "
+ "Possible inconsistency!",
project.getName());
>>>>>>>
|| !noExistingGroup(project.getName())
|| !verifyQuota(project.getName()) || !verifyLogs(dfso, project.
getName())
|| !noExistingCertificates(project.getName())) {
LOGGER.log(Level.WARNING,
"some elements of project {0} already exist in the system "
+ "Possible inconsistency!",
project.getName());
<<<<<<<
logger.log(Level.INFO, "Project with name {0} already exists!",
projectName);
=======
LOGGER.log(Level.INFO, "Project with name {0} already exists!",
projectName);
>>>>>>>
LOGGER.log(Level.INFO, "Project with name {0} already exists!",
projectName);
<<<<<<<
if (projectInode != null) {
=======
if(projectInode!=null){
LOGGER.log(Level.WARNING, "project folder existing for project {0}", project.getName());
>>>>>>>
if (projectInode != null) {
LOGGER.log(Level.WARNING, "project folder existing for project {0}",
project.getName());
<<<<<<<
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
=======
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"something went wrong when adding the example jar to the project");
>>>>>>>
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
<<<<<<<
logger.log(Level.WARNING,
"More than one spark-examples*.jar found in {0}.", dir.
=======
LOGGER.log(Level.WARNING,
"More than one spark-examples*.jar found in {0}.", dir.
>>>>>>>
LOGGER.log(Level.WARNING,
"More than one spark-examples*.jar found in {0}.", dir.
<<<<<<<
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
=======
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"something went wrong when adding the example jar to the project");
>>>>>>>
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
<<<<<<<
logger.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project");
=======
LOGGER.log(Level.SEVERE, null, ex);
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
"something went wrong when adding the example jar to the project");
>>>>>>>
throw new AppException(Response.Status.INTERNAL_SERVER_ERROR.
getStatusCode(),
"something went wrong when adding the example jar to the project"); |
<<<<<<<
public boolean mkDir(String path) throws IOException {
if (!path.startsWith("/")) {
path = "/" + path;
}
String[] pathParts = path.substring(1).split("/");
for (String s : pathParts) {
try {
FolderNameValidator.isValidName(s);
} catch (ValidationException e) {
throw new IllegalArgumentException("Illegal folder name: " + s
+ ". Reason: " + e.getLocalizedMessage(), e);
}
}
=======
public boolean mkDirs(String path) throws IOException {
>>>>>>>
public boolean mkDir(String path) throws IOException {
if (!path.startsWith("/")) {
path = "/" + path;
}
String[] pathParts = path.substring(1).split("/");
for (String s : pathParts) {
try {
FolderNameValidator.isValidName(s);
} catch (ValidationException e) {
throw new IllegalArgumentException("Illegal folder name: " + s
+ ". Reason: " + e.getLocalizedMessage(), e);
}
}
<<<<<<<
mkDir(destDir);
=======
mkDirs("hdfs://" + destDir);
>>>>>>>
mkDir(destDir);
<<<<<<<
/**
* Get a list of the names of the child files (so no directories) of the given
* path.
* <p>
* @param path
* @return A list of filenames, empty if the given path does not have
* children.
*/
public List<String> getChildNames(String path) {
Inode inode = inodes.getInodeAtPath(path);
if (inode.isDir()) {
List<Inode> inodekids = inodes.getChildren(inode);
ArrayList<String> retList = new ArrayList<>(inodekids.size());
for (Inode i : inodekids) {
if (!i.isDir()) {
retList.add(i.getInodePK().getName());
}
}
return retList;
} else {
return Collections.EMPTY_LIST;
}
}
=======
/**
* Marks a file/folder in location as metadata enabled
* <p>
* @param location
* @throws IOException
*/
public void setMetaEnabled(String location) throws IOException {
Path path = new Path(location);
this.fsOps.setMetaEnabled(path);
}
/**
* Create the folder on the given path only if the parent folders exist.
* <p>
* @param path
* @return
* @throws IOException
*/
public boolean mkDir(String path) throws IOException {
Path location = new Path(path);
return fsOps.mkdir(location);
}
>>>>>>>
/**
* Get a list of the names of the child files (so no directories) of the given
* path.
* <p>
* @param path
* @return A list of filenames, empty if the given path does not have
* children.
*/
public List<String> getChildNames(String path) {
Inode inode = inodes.getInodeAtPath(path);
if (inode.isDir()) {
List<Inode> inodekids = inodes.getChildren(inode);
ArrayList<String> retList = new ArrayList<>(inodekids.size());
for (Inode i : inodekids) {
if (!i.isDir()) {
retList.add(i.getInodePK().getName());
}
}
return retList;
} else {
return Collections.EMPTY_LIST;
}
}
/**
* Marks a file/folder in location as metadata enabled
* <p>
* @param location
* @throws IOException
*/
public void setMetaEnabled(String location) throws IOException {
Path path = new Path(location);
this.fsOps.setMetaEnabled(path);
} |
<<<<<<<
@EJB
private ExecutionInputfilesFacade execInputFilesFacade;
final Logger logger = LoggerFactory.getLogger(ExecutionController.class);
=======
>>>>>>>
@EJB
private ExecutionInputfilesFacade execInputFilesFacade;
final Logger logger = LoggerFactory.getLogger(ExecutionController.class);
<<<<<<<
Matcher m = p.matcher(path);
//execInputFilesFacade.create(execId, 8, "myFirstProject");
int mCount = m.groupCount();
if(m.groupCount()>0){
// for (int i = 0; i < m.groupCount(); i++) { // for each filename, resolve Inode from HDFS filename
// int mC;
// //String filename = m.group();
// mC = m.groupCount();
// Inode inode = inodes.getInodeAtPath(path);
// mC = m.groupCount();
// int parentID = inode.getInodePK().getParentId();
// mC = m.groupCount();
// String name = inode.getInodePK().getName();
// mC = m.groupCount();
// execInputFilesFacade.create(execId, inode.getInodePK().getParentId(), inode.getInodePK().getName());
// // insert into inputfiles_executions (inode, execId).
// }
=======
Matcher m = p.matcher(args);
for (int i = 0; i < m.groupCount(); i++) { // for each filename, resolve Inode from HDFS filename
// String filename = m.group(i);
// Inode inode = inodes.getInodeAtPath("hdfs://" + filename);
// insert into inputfiles_executions (inode, execId).
>>>>>>>
Matcher m = p.matcher(path);
//execInputFilesFacade.create(execId, 8, "myFirstProject");
int mCount = m.groupCount();
if(m.groupCount()>0){
// for (int i = 0; i < m.groupCount(); i++) { // for each filename, resolve Inode from HDFS filename
// int mC;
// //String filename = m.group();
// mC = m.groupCount();
// Inode inode = inodes.getInodeAtPath(path);
// mC = m.groupCount();
// int parentID = inode.getInodePK().getParentId();
// mC = m.groupCount();
// String name = inode.getInodePK().getName();
// mC = m.groupCount();
// execInputFilesFacade.create(execId, inode.getInodePK().getParentId(), inode.getInodePK().getName());
// // insert into inputfiles_executions (inode, execId).
// }
for (int i = 0; i < m.groupCount(); i++) { // for each filename, resolve Inode from HDFS filename
// String filename = m.group(i);
// Inode inode = inodes.getInodeAtPath("hdfs://" + filename);
// insert into inputfiles_executions (inode, execId).
} |
<<<<<<<
import org.thingsboard.server.common.data.id.AssetId;
=======
import org.thingsboard.server.common.data.HasName;
>>>>>>>
import org.thingsboard.server.common.data.id.AssetId;
import org.thingsboard.server.common.data.HasName;
<<<<<<<
@Builder
@AllArgsConstructor
public class Alarm extends BaseData<AlarmId> {
=======
public class Alarm extends BaseData<AlarmId> implements HasName {
>>>>>>>
@Builder
@AllArgsConstructor
public class Alarm extends BaseData<AlarmId> implements HasName {
<<<<<<<
public Alarm() {
super();
}
public Alarm(AlarmId id) {
super(id);
}
=======
@Override
public String getName() {
return type;
}
>>>>>>>
public Alarm() {
super();
}
public Alarm(AlarmId id) {
super(id);
}
@Override
public String getName() {
return type;
} |
<<<<<<<
=======
public static final String SPARK_CONFIG_FILE = "spark-defaults.conf";
public static final int SPARK_MIN_EXECS = 1;
public static final int SPARK_MAX_EXECS = 8;
public static final int SPARK_INIT_EXECS = 1;
>>>>>>>
public static final String SPARK_CONFIG_FILE = "spark-defaults.conf";
public static final int SPARK_MIN_EXECS = 1;
public static final int SPARK_MAX_EXECS = 8;
public static final int SPARK_INIT_EXECS = 1; |
<<<<<<<
import java.nio.charset.StandardCharsets;
=======
import javax.annotation.PreDestroy;
>>>>>>>
import javax.annotation.PreDestroy;
import java.nio.charset.StandardCharsets;
<<<<<<<
TbServiceInfoProvider serviceInfoProvider,
TbQueueRemoteJsInvokeSettings jsInvokeSettings) {
=======
TbServiceInfoProvider serviceInfoProvider,
TbPubSubSubscriptionSettings pubSubSubscriptionSettings) {
>>>>>>>
TbServiceInfoProvider serviceInfoProvider,
TbPubSubSubscriptionSettings pubSubSubscriptionSettings,
TbQueueRemoteJsInvokeSettings jsInvokeSettings) {
<<<<<<<
this.jsInvokeSettings = jsInvokeSettings;
=======
this.coreAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getCoreSettings());
this.ruleEngineAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getRuleEngineSettings());
this.jsExecutorAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getJsExecutorSettings());
this.transportApiAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getTransportApiSettings());
this.notificationAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getNotificationsSettings());
>>>>>>>
this.coreAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getCoreSettings());
this.ruleEngineAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getRuleEngineSettings());
this.jsExecutorAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getJsExecutorSettings());
this.transportApiAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getTransportApiSettings());
this.notificationAdmin = new TbPubSubAdmin(pubSubSettings, pubSubSubscriptionSettings.getNotificationsSettings());
this.jsInvokeSettings = jsInvokeSettings; |
<<<<<<<
import org.apache.oozie.client.OozieClient;
=======
import org.apache.zookeeper.ZooKeeper;
>>>>>>>
import org.apache.oozie.client.OozieClient;
import org.apache.zookeeper.ZooKeeper;
<<<<<<<
private static long INTERVAL_MS_BETWEEN_SERVICE_CHECKS = 30 * 1000l;
=======
private static long INTERVAL_MS_BETWEEN_SERVICE_CHECKS = 10 * 1000l;
public int zkSessionTimeoutMs = 30 * 1000;//30 seconds
>>>>>>>
private static long INTERVAL_MS_BETWEEN_SERVICE_CHECKS = 30 * 1000l;
public int zkSessionTimeoutMs = 30 * 1000;//30 seconds
<<<<<<<
// Check P2P Downloader
// TODO - Call some REST API
// Check Livy
// TODO
// Check Ooozie
// TODO
try{
OozieClient oozieClient = new OozieClient("http://" + this.settings.getOozieIp() + ":11000/oozie/");
oozieClient.getSystemMode();
this.oozie = true;
}catch(Exception e){
this.oozie = false;
}
}
=======
try {
//Check Kafka
Set<String> kafkaBrokerEndpoints = kafkaFacade.getBrokerEndpoints();
if (!kafkaBrokerEndpoints.isEmpty()) {
kafka = true;
} else {
kafka = false;
}
} catch (AppException ex) {
kafka = false;
}
try {
ZooKeeper zk = new ZooKeeper(settings.getZkConnectStr(), zkSessionTimeoutMs, null);
zookeeper = true;
} catch (IOException ex) {
zookeeper = false;
}
}
>>>>>>>
// Check P2P Downloader
// TODO - Call some REST API
// Check Livy
// TODO
// Check Ooozie
// TODO
try{
OozieClient oozieClient = new OozieClient("http://" + this.settings.getOozieIp() + ":11000/oozie/");
oozieClient.getSystemMode();
this.oozie = true;
}catch(Exception e){
this.oozie = false;
}
}
try {
//Check Kafka
Set<String> kafkaBrokerEndpoints = kafkaFacade.getBrokerEndpoints();
if (!kafkaBrokerEndpoints.isEmpty()) {
kafka = true;
} else {
kafka = false;
}
} catch (AppException ex) {
kafka = false;
}
try {
ZooKeeper zk = new ZooKeeper(settings.getZkConnectStr(), zkSessionTimeoutMs, null);
zookeeper = true;
} catch (IOException ex) {
zookeeper = false;
}
} |
<<<<<<<
@EJB
private ProjectFacade projectFacade;
@EJB
private ProjectController projectController;
@EJB
private NoCacheResponse noCacheResponse;
@Inject
private ProjectMembers projectMembers;
@Inject
private KafkaService kafka;
@Inject
private DataSetService dataSet;
@Inject
private LocalFsService localFs;
@Inject
private JobService jobs;
@Inject
private BiobankingService biobanking;
@Inject
private CharonService charon;
@EJB
private DatasetFacade datasetFacade;
@EJB
private InodeFacade inodes;
@EJB
private HdfsUsersController hdfsUsersBean;
@EJB
private ActivityFacade activityController;
@EJB
private UsersController usersController;
private final static Logger logger = Logger.getLogger(ProjectService.class.
getName());
@GET
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response findAllByUser(@Context SecurityContext sc,
@Context HttpServletRequest req) {
// Get the user according to current session and then get all its projects
String email = sc.getUserPrincipal().getName();
List<ProjectTeam> list = projectController.findProjectByUser(email);
GenericEntity<List<ProjectTeam>> projects
= new GenericEntity<List<ProjectTeam>>(list) {
};
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
projects).build();
}
@GET
@Path("/getAll")
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response getAllProjects(@Context SecurityContext sc,
@Context HttpServletRequest req) {
List<Project> list = projectFacade.findAll();
GenericEntity<List<Project>> projects
= new GenericEntity<List<Project>>(list) {
};
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
projects).build();
}
@GET
@Path("/getProjectInfo/{projectName}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response getProjectByName(@PathParam("projectName") String projectName,
@Context SecurityContext sc,
@Context HttpServletRequest req) throws AppException {
ProjectDTO proj = projectController.getProjectByName(projectName);
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
proj).build();
}
@GET
@Path("getDatasetInfo/{inodeId}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response getDatasetInfo(
@PathParam("inodeId") Integer inodeId,
@Context SecurityContext sc,
@Context HttpServletRequest req) throws AppException {
Inode inode = inodes.findById(inodeId);
if (inode == null) {
throw new AppException(Response.Status.BAD_REQUEST.getStatusCode(),
ResponseMessages.DATASET_NOT_FOUND);
=======
@EJB
private ProjectFacade projectFacade;
@EJB
private ProjectController projectController;
@EJB
private NoCacheResponse noCacheResponse;
@Inject
private ProjectMembers projectMembers;
@Inject
private DataSetService dataSet;
@Inject
private LocalFsService localFs;
@Inject
private JobService jobs;
@Inject
private BiobankingService biobanking;
@Inject
private CharonService charon;
@EJB
private ActivityFacade activityFacade;
@EJB
private DatasetFacade datasetFacade;
@EJB
private DatasetController datasetController;
@EJB
private InodeFacade inodes;
@EJB
private HdfsUsersController hdfsUsersBean;
@EJB
private ActivityFacade activityController;
@EJB
private UsersController usersController;
@EJB
private UserManager userManager;
private final static Logger logger = Logger.getLogger(ProjectService.class.
getName());
@GET
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response findAllByUser(@Context SecurityContext sc,
@Context HttpServletRequest req) {
// Get the user according to current session and then get all its projects
String email = sc.getUserPrincipal().getName();
List<ProjectTeam> list = projectController.findProjectByUser(email);
GenericEntity<List<ProjectTeam>> projects
= new GenericEntity<List<ProjectTeam>>(list) {
};
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
projects).build();
>>>>>>>
@EJB
private ProjectFacade projectFacade;
@EJB
private ProjectController projectController;
@EJB
private NoCacheResponse noCacheResponse;
@Inject
private ProjectMembers projectMembers;
@Inject
private KafkaService kafka;
@Inject
private DataSetService dataSet;
@Inject
private LocalFsService localFs;
@Inject
private JobService jobs;
@Inject
private BiobankingService biobanking;
@Inject
private CharonService charon;
@EJB
private ActivityFacade activityFacade;
@EJB
private DatasetFacade datasetFacade;
@EJB
private DatasetController datasetController;
@EJB
private InodeFacade inodes;
@EJB
private HdfsUsersController hdfsUsersBean;
@EJB
private ActivityFacade activityController;
@EJB
private UsersController usersController;
@EJB
private UserManager userManager;
private final static Logger logger = Logger.getLogger(ProjectService.class.
getName());
@GET
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response findAllByUser(@Context SecurityContext sc,
@Context HttpServletRequest req) {
// Get the user according to current session and then get all its projects
String email = sc.getUserPrincipal().getName();
List<ProjectTeam> list = projectController.findProjectByUser(email);
GenericEntity<List<ProjectTeam>> projects
= new GenericEntity<List<ProjectTeam>>(list) {
};
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(
projects).build();
<<<<<<<
// Update the retention period if it have been chenged
if (project.getRetentionPeriod() == null || !project.getRetentionPeriod().equals(
projectDTO.getRetentionPeriod())) {
projectController.updateProject(project, projectDTO, userEmail);
activityController.persistActivity("Changed retention period to " + projectDTO.getRetentionPeriod(), project, userEmail);
json.setSuccessMessage(ResponseMessages.PROJECT_RETENTON_CHANGED);
updated = true;
}
=======
@GET
@Path("/getProjectInfo/{projectName}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response getProjectByName(@PathParam("projectName") String projectName,
@Context SecurityContext sc,
@Context HttpServletRequest req) throws AppException {
>>>>>>>
@GET
@Path("/getProjectInfo/{projectName}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedRoles(roles = {AllowedRoles.ALL})
public Response getProjectByName(@PathParam("projectName") String projectName,
@Context SecurityContext sc,
@Context HttpServletRequest req) throws AppException { |
<<<<<<<
/*
* Copyright (C) 2012-2019 52°North Initiative for Geospatial Open Source
=======
/**
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source
>>>>>>>
/*
* Copyright (C) 2012-2020 52°North Initiative for Geospatial Open Source |
<<<<<<<
//TODO: runnerbuilder.setExtraFiles(config.getExtraFiles());
=======
runnerbuilder.setSessionId(jobconfig.getSessionId());
>>>>>>>
runnerbuilder.setSessionId(jobconfig.getSessionId());
<<<<<<<
=======
if(jobSystemProperties != null && !jobSystemProperties.isEmpty()){
for(Entry<String,String> jobSystemProperty: jobSystemProperties.entrySet()){
runnerbuilder.addSystemProperty(jobSystemProperty.getKey(), jobSystemProperty.getValue());
}
}
>>>>>>>
if(jobSystemProperties != null && !jobSystemProperties.isEmpty()){
for(Entry<String,String> jobSystemProperty: jobSystemProperties.entrySet()){
runnerbuilder.addSystemProperty(jobSystemProperty.getKey(), jobSystemProperty.getValue());
}
} |
<<<<<<<
import java.util.Iterator;
=======
import java.util.HashSet;
>>>>>>>
import java.util.HashSet;
import java.util.Iterator;
<<<<<<<
import java.util.Optional;
=======
import java.util.Set;
>>>>>>>
import java.util.Optional;
import java.util.Set; |
<<<<<<<
public List<YarnApplicationstate> findByAppuserAndAppState(String appUser,
String appState) {
TypedQuery<YarnApplicationstate> query = em.createNamedQuery(
"YarnApplicationstate.findByAppuserAndAppsmstate",
YarnApplicationstate.class).setParameter("appuser", appUser).
setParameter("appsmstate", appState);
return query.getResultList();
}
=======
public YarnApplicationstate findByAppId(String appId) {
TypedQuery<YarnApplicationstate> query = em.createNamedQuery("YarnApplicationstate.findByApplicationid",
YarnApplicationstate.class).setParameter(
"applicationid", appId);
return query.getSingleResult();
}
>>>>>>>
public List<YarnApplicationstate> findByAppuserAndAppState(String appUser,
String appState) {
TypedQuery<YarnApplicationstate> query = em.createNamedQuery(
"YarnApplicationstate.findByAppuserAndAppsmstate",
YarnApplicationstate.class).setParameter("appuser", appUser).
setParameter("appsmstate", appState);
return query.getResultList();
}
public YarnApplicationstate findByAppId(String appId) {
TypedQuery<YarnApplicationstate> query = em.createNamedQuery("YarnApplicationstate.findByApplicationid",
YarnApplicationstate.class).setParameter(
"applicationid", appId);
return query.getSingleResult();
} |
<<<<<<<
// Status of new Yubikey users requests
YUBIKEY_ACCOUNT_INACTIVE(1),
=======
// Status of new Mobile users requests
// Status of new Mobile users requests
NEW_MOBILE_ACCOUNT(1),
>>>>>>>
// Status of new Mobile users requests
NEW_MOBILE_ACCOUNT(1),
<<<<<<<
ACCOUNT_ACTIVATED(4),
=======
ACTIVATED_ACCOUNT(4),
// Users that are no longer granted to access the platform.
// Users with this state, can not login, change password even as guest users
DEACTIVATED_ACCOUNT(5),
>>>>>>>
ACTIVATED_ACCOUNT(4),
// Users that are no longer granted to access the platform.
// Users with this state, can not login, change password even as guest users
DEACTIVATED_ACCOUNT(5), |
<<<<<<<
import se.kth.bbc.study.fb.Inode;
import se.kth.bbc.study.fb.InodeFacade;
=======
import se.kth.bbc.study.privacy.StudyPrivacyManager;
import se.kth.bbc.study.privacy.model.Consent;
>>>>>>>
import se.kth.bbc.study.fb.Inode;
import se.kth.bbc.study.fb.InodeFacade;
import se.kth.bbc.study.privacy.StudyPrivacyManager;
import se.kth.bbc.study.privacy.model.Consent;
<<<<<<<
@EJB
private InodeFacade inodes;
=======
@EJB
private StudyPrivacyManager privacyManager;
>>>>>>>
@EJB
private InodeFacade inodes;
@EJB
private StudyPrivacyManager privacyManager; |
<<<<<<<
import java.util.HashSet;
=======
import java.io.StringReader;
import java.util.ArrayList;
>>>>>>>
import java.util.HashSet;
import java.io.StringReader;
import java.util.ArrayList;
<<<<<<<
import javax.xml.crypto.Data;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.ObjectNode;
=======
import org.apache.hadoop.security.AccessControlException;
import se.kth.bbc.project.Project;
import se.kth.bbc.project.ProjectFacade;
import se.kth.bbc.project.ProjectTeamFacade;
>>>>>>>
import javax.xml.crypto.Data;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.ObjectNode;
import org.apache.hadoop.security.AccessControlException;
import se.kth.bbc.project.Project;
import se.kth.bbc.project.ProjectFacade;
import se.kth.bbc.project.ProjectTeamFacade; |
<<<<<<<
zeppelinConfFactory.deleteZeppelinConfDir(project.getName());
=======
//projectPaymentsHistoryFacade.remove(projectPaymentsHistory);
yarnProjectsQuotaFacade.remove(yarnProjectsQuota);
>>>>>>>
zeppelinConfFactory.deleteZeppelinConfDir(project.getName());
//projectPaymentsHistoryFacade.remove(projectPaymentsHistory);
yarnProjectsQuotaFacade.remove(yarnProjectsQuota); |
<<<<<<<
@EJB
private ZeppelinConfigFactory zeppelinConfFactory;
=======
@EJB
private HdfsLeDescriptorsFacade hdfsLeDescriptorFacade;
>>>>>>>
@EJB
private ZeppelinConfigFactory zeppelinConfFactory;
@EJB
private HdfsLeDescriptorsFacade hdfsLeDescriptorFacade; |
<<<<<<<
=======
ui = ui.replaceAll(
"<li><a ui-sref=\"submit\"[\\s\\S]+new Job</a></li>", "");
>>>>>>>
ui = ui.replaceAll(
"<li><a ui-sref=\"submit\"[\\s\\S]+new Job</a></li>", ""); |
<<<<<<<
public static final String REMOVED_STUDY = " removed study ";
=======
public static final String REMOVED_SAMPLE = " removed a sample ";
public static final String REMOVED_FILE = " removed a file ";
>>>>>>>
public static final String REMOVED_SAMPLE = " removed a sample ";
public static final String REMOVED_FILE = " removed a file ";
public static final String REMOVED_STUDY = " removed study "; |
<<<<<<<
Users user = new Users(uid);
=======
// add the guest default role so if a user can still browse the platform
groups.add(groupBean.findByGroupName(BBCGroup.BBC_GUEST.name()));
Users user = new Users();
>>>>>>>
Users user = new Users();
<<<<<<<
Users user = new Users(uid);
=======
// add the guest default role so if a user can still browse the platform
groups.add(groupBean.findByGroupName(BBCGroup.BBC_GUEST.name()));
Users user = new Users();
>>>>>>>
Users user = new Users(); |
<<<<<<<
=======
private String kafkaAddress;
>>>>>>>
private String kafkaAddress; |
<<<<<<<
import se.kth.hopsworks.hdfs.fileoperations.DistributedFsService;
=======
import se.kth.hopsworks.hdfsUsers.controller.HdfsUsersController;
>>>>>>>
import se.kth.hopsworks.hdfs.fileoperations.DistributedFsService;
import se.kth.hopsworks.hdfsUsers.controller.HdfsUsersController;
<<<<<<<
@EJB
private DistributedFsService dfs;
=======
@EJB
private Settings settings;
@EJB
private YarnApplicationstateFacade yarnApplicationstateFacade;
@EJB
private HdfsUsersController hdfsUsersBean;
>>>>>>>
@EJB
private DistributedFsService dfs;
@EJB
private Settings settings;
@EJB
private YarnApplicationstateFacade yarnApplicationstateFacade;
@EJB
private HdfsUsersController hdfsUsersBean;
<<<<<<<
=======
private boolean hasAppAccessRight(String trackingUrl, JobDescription job){
String appId ="";
if(trackingUrl.contains("application_")){
for(String elem: trackingUrl.split("/")){
if(elem.contains("application_")){
appId = elem;
break;
}
}
}else if (trackingUrl.contains("container_")){
appId ="application_";
for(String elem: trackingUrl.split("/")){
if(elem.contains("container_")){
String[] containerIdElem = elem.split("_");
appId = appId + containerIdElem[1] + "_" + containerIdElem[2];
break;
}
}
}
if (appId != "") {
String appUser = yarnApplicationstateFacade.findByAppId(appId).
getAppuser();
if (!job.getProject().getName().equals(hdfsUsersBean.getProjectName(
appUser))) {
return false;
}
}
return true;
}
>>>>>>>
private boolean hasAppAccessRight(String trackingUrl, JobDescription job){
String appId ="";
if(trackingUrl.contains("application_")){
for(String elem: trackingUrl.split("/")){
if(elem.contains("application_")){
appId = elem;
break;
}
}
}else if (trackingUrl.contains("container_")){
appId ="application_";
for(String elem: trackingUrl.split("/")){
if(elem.contains("container_")){
String[] containerIdElem = elem.split("_");
appId = appId + containerIdElem[1] + "_" + containerIdElem[2];
break;
}
}
}
if (appId != "") {
String appUser = yarnApplicationstateFacade.findByAppId(appId).
getAppuser();
if (!job.getProject().getName().equals(hdfsUsersBean.getProjectName(
appUser))) {
return false;
}
}
return true;
} |
<<<<<<<
import org.thingsboard.server.queue.pubsub.TbPubSubConsumerTemplate;
import org.thingsboard.server.queue.pubsub.TbPubSubProducerTemplate;
=======
import org.thingsboard.server.queue.rabbitmq.TbRabbitMqAdmin;
>>>>>>>
import org.thingsboard.server.queue.rabbitmq.TbRabbitMqAdmin;
<<<<<<<
import java.nio.charset.StandardCharsets;
=======
import javax.annotation.PreDestroy;
>>>>>>>
import javax.annotation.PreDestroy;
import java.nio.charset.StandardCharsets;
<<<<<<<
private final TbQueueRemoteJsInvokeSettings jsInvokeSettings;
private final TbQueueAdmin admin;
=======
private final TbQueueAdmin coreAdmin;
private final TbQueueAdmin ruleEngineAdmin;
private final TbQueueAdmin jsExecutorAdmin;
private final TbQueueAdmin transportApiAdmin;
private final TbQueueAdmin notificationAdmin;
>>>>>>>
private final TbQueueAdmin coreAdmin;
private final TbQueueAdmin ruleEngineAdmin;
private final TbQueueAdmin jsExecutorAdmin;
private final TbQueueAdmin transportApiAdmin;
private final TbQueueAdmin notificationAdmin;
private final TbQueueRemoteJsInvokeSettings jsInvokeSettings;
<<<<<<<
TbQueueRemoteJsInvokeSettings jsInvokeSettings,
TbQueueAdmin admin) {
=======
TbRabbitMqQueueArguments queueArguments) {
>>>>>>>
TbRabbitMqQueueArguments queueArguments,
TbQueueRemoteJsInvokeSettings jsInvokeSettings) {
<<<<<<<
this.jsInvokeSettings = jsInvokeSettings;
this.admin = admin;
=======
this.coreAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getCoreArgs());
this.ruleEngineAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getRuleEngineArgs());
this.jsExecutorAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getJsExecutorArgs());
this.transportApiAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getTransportApiArgs());
this.notificationAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getNotificationsArgs());
>>>>>>>
this.coreAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getCoreArgs());
this.ruleEngineAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getRuleEngineArgs());
this.jsExecutorAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getJsExecutorArgs());
this.transportApiAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getTransportApiArgs());
this.notificationAdmin = new TbRabbitMqAdmin(rabbitMqSettings, queueArguments.getNotificationsArgs());
this.jsInvokeSettings = jsInvokeSettings; |
<<<<<<<
List<Set<CategoryType>> charCategories, List<Integer> charCategoryContinuities,
List<Boolean> canBowList) {
=======
List<EnumSet<CategoryType>> charCategories, List<Integer> charCategoryContinuities) {
>>>>>>>
List<EnumSet<CategoryType>> charCategories, List<Integer> charCategoryContinuities,
List<Boolean> canBowList) { |
<<<<<<<
MessageSyncHttpClient msgSyncHttpClient = new MessageSyncHttpClient(context, syncUrl);
final boolean posted = msgSyncHttpClient
.postSmsToWebService(message, Util.getPhoneNumber(context));
=======
MessageSyncHttpClient client = new MessageSyncHttpClient(
context, syncUrl, message, Util.getPhoneNumber(context)
);
final boolean posted = client.postSmsToWebService();
>>>>>>>
MessageSyncHttpClient client = new MessageSyncHttpClient(
context, syncUrl, message, Util.getPhoneNumber(context)
);
final boolean posted = client.postSmsToWebService();
<<<<<<<
log(context.getString(R.string.sms_sent_to_webserivce, message.getBody(),
syncUrl.getUrl()));
smsServerResponse(msgSyncHttpClient.getServerSuccessResp());
=======
smsServerResponse(client.getServerSuccessResp());
>>>>>>>
log(context.getString(R.string.sms_sent_to_webserivce, message.getBody(),
syncUrl.getUrl()));
smsServerResponse(client.getServerSuccessResp()); |
<<<<<<<
@Test
public void testBoundaryErrorEventSubProcessExceptionMapping() throws Exception {
KieBase kbase = createKnowledgeBase("BPMN2-BoundaryErrorEventSubProcessExceptionMapping.bpmn2");
ksession = createKnowledgeSession(kbase);
ExceptionWorkItemHandler handler = new ExceptionWorkItemHandler();
ksession.getWorkItemManager().registerWorkItemHandler("Human Task", handler);
ProcessInstance processInstance = ksession
.startProcess("com.sample.bpmn.hello");
assertEquals("java.lang.RuntimeException", getProcessVarValue(processInstance, "var1"));
}
=======
@Test
public void testBoundaryErrorEventStructureRef() throws Exception {
KieBase kbase = createKnowledgeBase("BPMN2-BoundaryErrorEventStructureRef.bpmn2");
ksession = createKnowledgeSession(kbase);
ExceptionWorkItemHandler handler = new ExceptionWorkItemHandler();
ksession.getWorkItemManager().registerWorkItemHandler("Human Task", handler);
ProcessInstance processInstance = ksession
.startProcess("com.sample.bpmn.hello");
assertNodeTriggered(processInstance.getId(), "Start", "User Task", "MyBoundaryErrorEvent");
}
>>>>>>>
@Test
public void testBoundaryErrorEventSubProcessExceptionMapping() throws Exception {
KieBase kbase = createKnowledgeBase("BPMN2-BoundaryErrorEventSubProcessExceptionMapping.bpmn2");
ksession = createKnowledgeSession(kbase);
ExceptionWorkItemHandler handler = new ExceptionWorkItemHandler();
ksession.getWorkItemManager().registerWorkItemHandler("Human Task", handler);
ProcessInstance processInstance = ksession
.startProcess("com.sample.bpmn.hello");
assertEquals("java.lang.RuntimeException", getProcessVarValue(processInstance, "var1"));
}
@Test
public void testBoundaryErrorEventStructureRef() throws Exception {
KieBase kbase = createKnowledgeBase("BPMN2-BoundaryErrorEventStructureRef.bpmn2");
ksession = createKnowledgeSession(kbase);
ExceptionWorkItemHandler handler = new ExceptionWorkItemHandler();
ksession.getWorkItemManager().registerWorkItemHandler("Human Task", handler);
ProcessInstance processInstance = ksession
.startProcess("com.sample.bpmn.hello");
assertNodeTriggered(processInstance.getId(), "Start", "User Task", "MyBoundaryErrorEvent");
} |
<<<<<<<
=======
private String domainName;
private ServicesSessionManager sessionManager;
>>>>>>>
private String domainName;
private ServicesSessionManager sessionManager;
<<<<<<<
public void setIdentity(IdentityProvider identity) {
this.identity = identity;
=======
public void setSessionManager(ServicesSessionManager sessionManager) {
this.sessionManager = sessionManager;
>>>>>>>
public void setSessionManager(ServicesSessionManager sessionManager) {
this.sessionManager = sessionManager; |
<<<<<<<
Configuration conf = getConfig();
//build up the initial cluster specification
=======
// build up the initial cluster specification
>>>>>>>
Configuration conf = getConfig();
// build up the initial cluster specification
<<<<<<<
//propagate the filename into the 1.x and 2.x value
String fsDefaultName = conf.get(
CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
clusterSpec.setOptionifUnset(OptionKeys.OPTION_SITE_PREFIX +
CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY,
fsDefaultName);
clusterSpec.setOptionifUnset(OptionKeys.OPTION_SITE_PREFIX +
HoyaKeys.FS_DEFAULT_NAME_CLASSIC,
fsDefaultName);
//patch in the properties related to the principals extracted from
//the running hoya client
propagatePrincipals(clusterSpec, conf);
=======
>>>>>>>
//propagate the filename into the 1.x and 2.x value
String fsDefaultName = conf.get(
CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
clusterSpec.setOptionifUnset(OptionKeys.OPTION_SITE_PREFIX +
CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY,
fsDefaultName);
clusterSpec.setOptionifUnset(OptionKeys.OPTION_SITE_PREFIX +
HoyaKeys.FS_DEFAULT_NAME_CLASSIC,
fsDefaultName);
<<<<<<<
//another sanity check before the cluster dir is created: the config
//dir
FileSystem srcFS = FileSystem.get(appconfdir.toUri(), conf);
=======
// another sanity check before the cluster dir is created: the config
// dir
FileSystem srcFS = FileSystem.get(appconfdir.toUri(), getConfig());
>>>>>>>
// another sanity check before the cluster dir is created: the config
// dir
FileSystem srcFS = FileSystem.get(appconfdir.toUri(), conf);
<<<<<<<
//bulk copy
//first the original from wherever to the DFS
HoyaUtils.copyDirectory(conf, appconfdir, origConfPath);
//then build up the generated path. This d
HoyaUtils.copyDirectory(conf, origConfPath, generatedConfPath);
=======
// bulk copy
// first the original from wherever to the DFS
HoyaUtils.copyDirectory(getConfig(), appconfdir, origConfPath);
// then build up the generated path. This d
HoyaUtils.copyDirectory(getConfig(), origConfPath, generatedConfPath);
>>>>>>>
// bulk copy
// first the original from wherever to the DFS
HoyaUtils.copyDirectory(conf, appconfdir, origConfPath);
// then build up the generated path. This d
HoyaUtils.copyDirectory(conf, origConfPath, generatedConfPath);
<<<<<<<
//build the environment
Map<String, String> env =
HoyaUtils.buildEnvMap(clusterSpec.getOrAddRole(HoyaKeys.ROLE_MASTER));
=======
// build the environment
Map<String, String> env = HoyaUtils.buildEnvMap(clusterSpec.getOrAddRole("master"));
>>>>>>>
// build the environment
Map<String, String> env =
HoyaUtils.buildEnvMap(clusterSpec.getOrAddRole(HoyaKeys.ROLE_MASTER)); |
<<<<<<<
import javax.swing.*;
import java.util.ArrayList;
=======
>>>>>>>
import java.util.ArrayList;
<<<<<<<
private OptionParser parser;
private OptionSet optionSet;
private BootLogger bootLogger;
private String commandName;
public JoptCli(BootLogger bootLogger, OptionParser parser, OptionSet parsed, String commandName) {
this.parser = parser;
this.optionSet = parsed;
this.bootLogger = bootLogger;
this.commandName = commandName;
}
@Override
public String commandName() {
return commandName;
}
@Override
public boolean hasOption(String optionName) {
return optionSet.has(optionName);
}
@Override
public List<String> optionStrings(String name) {
return optionSet.valuesOf(name).stream().map(o -> String.valueOf(o)).collect(toList());
}
@SuppressWarnings("unchecked")
@Override
public List<String> standaloneArguments() {
return (List<String>) optionSet.nonOptionArguments();
}
@Override
public List<OptionSpec<?>> detectedOptions() {
return optionSet != null ? optionSet.specs() : new ArrayList<>();
}
=======
private OptionSet optionSet;
private String commandName;
public JoptCli(OptionSet optionSet, String commandName) {
this.optionSet = optionSet;
this.commandName = commandName;
}
@Override
public String commandName() {
return commandName;
}
@Override
public boolean hasOption(String optionName) {
return optionSet.has(optionName);
}
@Override
public List<String> optionStrings(String name) {
return optionSet.valuesOf(name).stream().map(o -> String.valueOf(o)).collect(toList());
}
@SuppressWarnings("unchecked")
@Override
public List<String> standaloneArguments() {
return (List<String>) optionSet.nonOptionArguments();
}
>>>>>>>
private OptionSet optionSet;
private String commandName;
public JoptCli(OptionSet parsed, String commandName) {
this.optionSet = parsed;
this.commandName = commandName;
}
@Override
public String commandName() {
return commandName;
}
@Override
public boolean hasOption(String optionName) {
return optionSet.has(optionName);
}
@Override
public List<String> optionStrings(String name) {
return optionSet.valuesOf(name).stream().map(o -> String.valueOf(o)).collect(toList());
}
@SuppressWarnings("unchecked")
@Override
public List<String> standaloneArguments() {
return (List<String>) optionSet.nonOptionArguments();
}
@Override
public List<OptionSpec<?>> detectedOptions() {
return optionSet != null ? optionSet.specs() : new ArrayList<>();
} |
<<<<<<<
sp.store(sp.minus(8), Offset.zero().plus(24)); // FP
contextRegisters.ip = ip;
contextRegisters.fp = sp.plus(24);
VM.sysWriteln("fp: ", contextRegisters.fp);
=======
sp.store(sp.minus(8), Offset.zero().plus(24)); // FP
sp.store(THREAD_START_METHOD_ID, Offset.zero().plus(28)); // thread start method id
VM.sysWriteln("sp2: ", sp);
>>>>>>>
sp.store(sp.minus(8), Offset.zero().plus(24)); // FP
sp.store(THREAD_START_METHOD_ID, Offset.zero().plus(28)); // thread start method id
contextRegisters.ip = ip;
contextRegisters.fp = sp.plus(24);
VM.sysWriteln("fp: ", contextRegisters.fp); |
<<<<<<<
import com.akiban.ais.model.TableIndex;
=======
import com.akiban.sql.optimizer.SimplifiedSelectQuery.*;
>>>>>>>
import com.akiban.ais.model.TableIndex;
import com.akiban.sql.optimizer.SimplifiedSelectQuery.*;
<<<<<<<
Set<BinaryOperatorNode> indexConditions = new HashSet<BinaryOperatorNode>();
TableIndex index = null;
if (select.getWhereClause() != null) {
// TODO: Put ColumnReferences on the left of any condition with constant in WHERE,
// changing operand as necessary.
index = pickBestIndex(tables, select.getWhereClause(), indexConditions);
}
=======
// Try to use an index.
IndexUsage index = pickBestIndex(squery);
if (squery.getSortColumns() != null)
throw new UnsupportedSQLException("Unsupported ORDER BY");
Set<ColumnCondition> indexConditions = null;
>>>>>>>
// Try to use an index.
IndexUsage index = pickBestIndex(squery);
if (squery.getSortColumns() != null)
throw new UnsupportedSQLException("Unsupported ORDER BY");
Set<ColumnCondition> indexConditions = null;
<<<<<<<
// TODO: Too much work here dealing with multiple conditions that
// could have been reconciled earlier as part of normalization.
// Not general enough to handle expressions that actually compute, rather
// than fetching a field, constant or parameter.
protected IndexKeyRange getIndexKeyRange(TableIndex index,
Set<BinaryOperatorNode> indexConditions)
throws StandardException {
List<IndexColumn> indexColumns = index.getColumns();
int nkeys = indexColumns.size();
Expression[] keys = new Expression[nkeys];
Expression[] lb = null, ub = null;
boolean lbinc = false, ubinc = false;
for (int i = 0; i < nkeys; i++) {
IndexColumn indexColumn = indexColumns.get(i);
Column column = indexColumn.getColumn();
Expression eqExpr = null, ltExpr = null, gtExpr = null;
Comparison ltOp = null, gtOp = null;
boolean ltConstant = false, gtConstant = false;
for (BinaryOperatorNode condition : indexConditions) {
Expression expr = null;
boolean reverse = false;
boolean[] isConstant = new boolean[1];
if (matchColumnReference(column, condition.getLeftOperand())) {
expr = getIndexComparand(condition.getRightOperand(), isConstant);
}
else if (matchColumnReference(column, condition.getRightOperand())) {
expr = getIndexComparand(condition.getLeftOperand(), isConstant);
reverse = true;
}
if (expr == null)
continue;
Comparison op;
switch (condition.getNodeType()) {
case NodeTypes.BINARY_EQUALS_OPERATOR_NODE:
op = Comparison.EQ;
break;
case NodeTypes.BINARY_GREATER_THAN_OPERATOR_NODE:
op = (reverse) ? Comparison.LT : Comparison.GT;
break;
case NodeTypes.BINARY_GREATER_EQUALS_OPERATOR_NODE:
op = (reverse) ? Comparison.LE : Comparison.GE;
break;
case NodeTypes.BINARY_LESS_THAN_OPERATOR_NODE:
op = (reverse) ? Comparison.GT : Comparison.LT;
break;
case NodeTypes.BINARY_LESS_EQUALS_OPERATOR_NODE:
op = (reverse) ? Comparison.GE : Comparison.LE;
break;
default:
continue;
}
switch (op) {
case EQ:
if (eqExpr == null)
eqExpr = expr;
else if (!eqExpr.equals(expr))
throw new StandardException("Conflicting equality conditions.");
break;
case LT:
case LE:
{
boolean narrower;
if (ltExpr == null)
narrower = true;
else {
if (!(isConstant[0] && ltConstant))
throw new StandardException("Conflicting inequality conditions.");
int comp = ((Comparable)ltExpr.evaluate(null, null))
.compareTo(expr.evaluate(null, null));
narrower = ((comp > 0) ||
// < with same comparand is narrower than <=.
((comp == 0) &&
(op == Comparison.LT) &&
(ltOp == Comparison.LE)));
}
if (narrower) {
ltExpr = expr;
ltOp = op;
ltConstant = isConstant[0];
}
}
break;
case GT:
case GE:
{
boolean narrower;
if (gtExpr == null)
narrower = true;
else {
if (!(isConstant[0] && gtConstant))
throw new StandardException("Conflicting inequality conditions.");
int comp = ((Comparable)gtExpr.evaluate(null, null))
.compareTo(expr.evaluate(null, null));
narrower = ((comp > 0) ||
// > with same comparand is narrower than >=.
((comp == 0) &&
(op == Comparison.GT) &&
(ltOp == Comparison.GE)));
}
if (narrower) {
gtExpr = expr;
gtOp = op;
gtConstant = isConstant[0];
}
}
break;
}
}
if (eqExpr != null) {
keys[i] = eqExpr;
=======
public FlattenState flatten(BaseJoinNode join) {
if (join.isTable()) {
UserTable table = ((TableJoinNode)join).getTable();
Map<UserTable,Integer> fieldOffsets = new HashMap<UserTable,Integer>();
fieldOffsets.put(table, 0);
return new FlattenState(userTableRowType(table),
fieldOffsets,
table.getColumns().size());
>>>>>>>
public FlattenState flatten(BaseJoinNode join) {
if (join.isTable()) {
UserTable table = ((TableJoinNode)join).getTable();
Map<UserTable,Integer> fieldOffsets = new HashMap<UserTable,Integer>();
fieldOffsets.put(table, 0);
return new FlattenState(userTableRowType(table),
fieldOffsets,
table.getColumns().size()); |
<<<<<<<
import com.akiban.qp.operator.API;
=======
import com.akiban.qp.operator.QueryContext;
>>>>>>>
import com.akiban.qp.operator.API;
import com.akiban.qp.operator.QueryContext; |
<<<<<<<
assert keyRange.spatial();
this.keyRange = keyRange;
this.multiCursor = new MultiCursor(openEarly);
this.iterationHelper = iterationHelper;
this.index = keyRange.indexRowType().index();
assert index.isSpatial() : index;
this.loExpressions = keyRange.lo().boundExpressions(context, bindings);
this.hiExpressions = keyRange.hi().boundExpressions(context, bindings);
this.space = index.space();
=======
Index spatialIndex = keyRange.indexRowType().index();
assert spatialIndex != null : keyRange.indexRowType().index();
assert spatialIndex.isSpatial() : spatialIndex;
this.space = spatialIndex.space();
if (keyRange.spatialCoordsIndex()) {
this.firstSpatialColumn = spatialIndex.firstSpatialArgument();
this.lastSpatialColumn = this.firstSpatialColumn + 1;
} else if (keyRange.spatialObjectIndex()) {
this.firstSpatialColumn = spatialIndex.firstSpatialArgument();
this.lastSpatialColumn = this.firstSpatialColumn;
} else {
this.firstSpatialColumn = -1;
this.lastSpatialColumn = -1;
assert false;
}
this.multiCursor = new MultiCursor(openAll);
this.iterationHelper = iterationHelper;
>>>>>>>
this.keyRange = keyRange;
this.index = keyRange.indexRowType().index();
assert keyRange.spatial();
assert index.isSpatial() : index;
this.space = spatialIndex.space();
this.loExpressions = keyRange.lo().boundExpressions(context, bindings);
this.hiExpressions = keyRange.hi().boundExpressions(context, bindings);
this.iterationHelper = iterationHelper;
<<<<<<<
this.indexColumnSelector = new IndexRowPrefixSelector(this.index.firstSpatialArgument() + 1);
GeophileIndex<IndexRow> geophileIndex = new GeophileIndex<>(adapter, keyRange.indexRowType(), openEarly);
GeophileCursor<IndexRow> geophileCursor = new GeophileCursor<>(geophileIndex, openEarly);
for (Map.Entry<Long, IndexKeyRange> entry : zKeyRanges(keyRange).entrySet()) {
long z = entry.getKey();
IndexKeyRange zKeyRange = entry.getValue();
=======
this.indexColumnSelector = new IndexRowPrefixSelector(firstSpatialColumn + 1);
for (IndexKeyRange zKeyRange : zKeyRanges(context, keyRange)) {
>>>>>>>
this.indexColumnSelector = new IndexRowPrefixSelector(this.index.firstSpatialArgument() + 1);
GeophileIndex<IndexRow> geophileIndex = new GeophileIndex<>(adapter, keyRange.indexRowType(), openEarly);
GeophileCursor<IndexRow> geophileCursor = new GeophileCursor<>(geophileIndex, openEarly);
for (Map.Entry<Long, IndexKeyRange> entry : zKeyRanges(keyRange).entrySet()) {
long z = entry.getKey();
IndexKeyRange zKeyRange = entry.getValue();
<<<<<<<
Map<Long, IndexKeyRange> zKeyRanges = new HashMap<>();
SpatialObject spatialObject = spatialObject();
long[] zValues = new long[MAX_Z];
space.decompose(spatialObject, zValues);
int zColumn = index.firstSpatialArgument();
Value hiValue = new Value(InternalIndexTypes.LONG.instance(false));
hiValue.putInt64(Long.MAX_VALUE);
for (int i = 0; i < zValues.length && zValues[i] != SpaceImpl.Z_NULL; i++) {
IndexRowType physicalRowType = keyRange.indexRowType().physicalRowType();
int indexRowFields = physicalRowType.nFields();
SpatialIndexValueRecord zLoRow = new SpatialIndexValueRecord(indexRowFields);
SpatialIndexValueRecord zHiRow = new SpatialIndexValueRecord(indexRowFields);
IndexBound zLo = new IndexBound(zLoRow, indexColumnSelector);
IndexBound zHi = new IndexBound(zHiRow, indexColumnSelector);
// Take care of any equality restrictions before the spatial fields
for (int f = 0; f < zColumn; f++) {
ValueSource eqValueSource = loExpressions.value(f);
zLoRow.value(f, eqValueSource);
zHiRow.value(f, eqValueSource);
=======
List<IndexKeyRange> zKeyRanges = new ArrayList<>();
IndexBound loBound = keyRange.lo();
IndexBound hiBound = keyRange.hi();
ValueRecord loExpressions = loBound.boundExpressions(context, bindings);
ValueRecord hiExpressions = hiBound == null ? null : hiBound.boundExpressions(context, bindings);
SpatialObject spatialObject = null;
if (keyRange.spatialCoordsIndex()) {
spatialObject = spatialObjectFromCoords(loExpressions, hiExpressions, keyRange);
} else if (keyRange.spatialObjectIndex()) {
spatialObject = spatialObject(loExpressions, keyRange);
} else {
assert false;
}
long[] zValues = new long[spatialObject.maxZ()];
space.decompose(spatialObject, zValues);
for (int i = 0; i < zValues.length; i++) {
long z = zValues[i];
if (z != -1L) {
IndexRowType physicalRowType = keyRange.indexRowType().physicalRowType();
int indexRowFields = physicalRowType.nFields();
SpatialIndexValueRecord zLoRow = new SpatialIndexValueRecord(indexRowFields);
SpatialIndexValueRecord zHiRow = new SpatialIndexValueRecord(indexRowFields);
IndexBound zLo = new IndexBound(zLoRow, indexColumnSelector);
IndexBound zHi = new IndexBound(zHiRow, indexColumnSelector);
// Take care of any equality restrictions before the spatial fields
for (int f = 0; f < firstSpatialColumn; f++) {
ValueSource eqValueSource = loExpressions.value(f);
zLoRow.value(f, eqValueSource);
zHiRow.value(f, eqValueSource);
}
// lo and hi bounds
Value loValue = new Value(InternalIndexTypes.LONG.instance(false));
Value hiValue = new Value(InternalIndexTypes.LONG.instance(false));
loValue.putInt64(Space.zLo(z));
hiValue.putInt64(Space.zHi(z));
zLoRow.value(firstSpatialColumn, loValue);
zHiRow.value(firstSpatialColumn, hiValue);
IndexKeyRange zKeyRange = IndexKeyRange.bounded(physicalRowType, zLo, true, zHi, true);
zKeyRanges.add(zKeyRange);
>>>>>>>
Map<Long, IndexKeyRange> zKeyRanges = new HashMap<>();
SpatialObject spatialObject = spatialObject();
long[] zValues = new long[MAX_Z];
space.decompose(spatialObject, zValues);
int zColumn = index.firstSpatialArgument();
Value hiValue = new Value(InternalIndexTypes.LONG.instance(false));
hiValue.putInt64(Long.MAX_VALUE);
for (int i = 0; i < zValues.length && zValues[i] != SpaceImpl.Z_NULL; i++) {
IndexRowType physicalRowType = keyRange.indexRowType().physicalRowType();
int indexRowFields = physicalRowType.nFields();
SpatialIndexValueRecord zLoRow = new SpatialIndexValueRecord(indexRowFields);
SpatialIndexValueRecord zHiRow = new SpatialIndexValueRecord(indexRowFields);
IndexBound zLo = new IndexBound(zLoRow, indexColumnSelector);
IndexBound zHi = new IndexBound(zHiRow, indexColumnSelector);
// Take care of any equality restrictions before the spatial fields
for (int f = 0; f < zColumn; f++) {
ValueSource eqValueSource = loExpressions.value(f);
zLoRow.value(f, eqValueSource);
zHiRow.value(f, eqValueSource);
<<<<<<<
private SpatialObject spatialObject()
{
SpatialObject spatialObject;
int nSpatialColumns = index.lastSpatialArgument() - index.firstSpatialArgument() + 1;
if (nSpatialColumns == 1) {
// Spatial object
ValueRecord expressions = keyRange.lo().boundExpressions(context, bindings);
spatialObject = (SpatialObject) expressions.value(index.firstSpatialArgument()).getObject();
} else {
// lat/lon columns
int latColumn = index.firstSpatialArgument();
int lonColumn = latColumn + 1;
TInstance xinst = index.getAllColumns().get(latColumn).getColumn().getType();
double xLo = TBigDecimal.getWrapper(loExpressions.value(latColumn), xinst).asBigDecimal().doubleValue();
double xHi = TBigDecimal.getWrapper(hiExpressions.value(latColumn), xinst).asBigDecimal().doubleValue();
TInstance yinst = index.getAllColumns().get(lonColumn).getColumn().getType();
double yLo = TBigDecimal.getWrapper(loExpressions.value(lonColumn), yinst).asBigDecimal().doubleValue();
double yHi = TBigDecimal.getWrapper(hiExpressions.value(lonColumn), yinst).asBigDecimal().doubleValue();
spatialObject = BoxLatLon.newBox(xLo, xHi, yLo, yHi);
}
return spatialObject;
}
private SpatialIndex<IndexRow> spatialIndex(boolean openEarly) throws IOException, InterruptedException
{
GeophileIndex<IndexRow> geophileIndex = new GeophileIndex<>(adapter, keyRange.indexRowType(), openEarly);
return SpatialIndex.newSpatialIndex(space, geophileIndex);
}
private RowCursorImpl spatialJoinIterator(SpatialIndex<IndexRow> spatialIndex, SpatialObject queryObject)
throws IOException, InterruptedException
{
SpatialJoin spatialJoin = SpatialJoin.newSpatialJoin(SPATIAL_JOIN_DUPLICATION);
Iterator<IndexRow> spatialJoinIterator = spatialJoin.iterator(queryObject, spatialIndex);
return new IteratorToCursorAdapter(spatialJoinIterator);
}
// Class state
=======
private SpatialObject spatialObjectFromCoords(ValueRecord loExpressions,
ValueRecord hiExpressions,
IndexKeyRange keyRange)
{
Index index = keyRange.indexRowType().index();
// Only 2d, lat/lon supported for now
double xLo, xHi, yLo, yHi;
TInstance xinst = index.getAllColumns().get(firstSpatialColumn).getColumn().getType();
TInstance yinst = index.getAllColumns().get(lastSpatialColumn).getColumn().getType();
xLo = TBigDecimal.getWrapper(loExpressions.value(firstSpatialColumn), xinst).asBigDecimal().doubleValue();
xHi = TBigDecimal.getWrapper(hiExpressions.value(firstSpatialColumn), xinst).asBigDecimal().doubleValue();
yLo = TBigDecimal.getWrapper(loExpressions.value(lastSpatialColumn), yinst).asBigDecimal().doubleValue();
yHi = TBigDecimal.getWrapper(hiExpressions.value(lastSpatialColumn), yinst).asBigDecimal().doubleValue();
return BoxLatLon.newBox(xLo, xHi, yLo, yHi);
}
private SpatialObject spatialObject(ValueRecord expressions, IndexKeyRange keyRange)
{
return (SpatialObject) expressions.value(firstSpatialColumn).getObject();
}
>>>>>>>
private SpatialObject spatialObject()
{
SpatialObject spatialObject;
if (index.spatialColumns() == 1) {
// Spatial object
ValueRecord expressions = keyRange.lo().boundExpressions(context, bindings);
spatialObject = (SpatialObject) expressions.value(index.firstSpatialArgument()).getObject();
} else {
// lat/lon columns
int latColumn = index.firstSpatialArgument();
int lonColumn = latColumn + 1;
TInstance xinst = index.getAllColumns().get(latColumn).getColumn().getType();
double xLo = TBigDecimal.getWrapper(loExpressions.value(latColumn), xinst).asBigDecimal().doubleValue();
double xHi = TBigDecimal.getWrapper(hiExpressions.value(latColumn), xinst).asBigDecimal().doubleValue();
TInstance yinst = index.getAllColumns().get(lonColumn).getColumn().getType();
double yLo = TBigDecimal.getWrapper(loExpressions.value(lonColumn), yinst).asBigDecimal().doubleValue();
double yHi = TBigDecimal.getWrapper(hiExpressions.value(lonColumn), yinst).asBigDecimal().doubleValue();
spatialObject = BoxLatLon.newBox(xLo, xHi, yLo, yHi);
}
return spatialObject;
}
private SpatialIndex<IndexRow> spatialIndex(boolean openEarly) throws IOException, InterruptedException
{
GeophileIndex<IndexRow> geophileIndex = new GeophileIndex<>(adapter, keyRange.indexRowType(), openEarly);
return SpatialIndex.newSpatialIndex(space, geophileIndex);
}
private RowCursorImpl spatialJoinIterator(SpatialIndex<IndexRow> spatialIndex, SpatialObject queryObject)
throws IOException, InterruptedException
{
SpatialJoin spatialJoin = SpatialJoin.newSpatialJoin(SPATIAL_JOIN_DUPLICATION);
Iterator<IndexRow> spatialJoinIterator = spatialJoin.iterator(queryObject, spatialIndex);
return new IteratorToCursorAdapter(spatialJoinIterator);
}
// Class state
<<<<<<<
=======
private final int firstSpatialColumn;
private final int lastSpatialColumn;
>>>>>>> |
<<<<<<<
public TClass widestComparable()
{
return BIGINT;
}
=======
@Override
protected boolean tryFromObject(TExecutionContext context, PValueSource in, PValueTarget out) {
if (in.tInstance().typeClass() == AkBool.INSTANCE) {
byte asInt = (byte)(in.getBoolean() ? 1 : 0);
switch (out.tInstance().typeClass().underlyingType()) {
case INT_8:
out.putInt8(asInt);
return true;
case INT_16:
out.putInt16(asInt);
return true;
case UINT_16:
out.putUInt16((char)asInt);
return true;
case INT_32:
out.putInt32(asInt);
return true;
case INT_64:
out.putInt64(asInt);
return true;
default:
// fall through and keep trying the standard ways
}
}
return super.tryFromObject(context, in, out);
}
>>>>>>>
public TClass widestComparable()
{
return BIGINT;
}
@Override
protected boolean tryFromObject(TExecutionContext context, PValueSource in, PValueTarget out) {
if (in.tInstance().typeClass() == AkBool.INSTANCE) {
byte asInt = (byte)(in.getBoolean() ? 1 : 0);
switch (out.tInstance().typeClass().underlyingType()) {
case INT_8:
out.putInt8(asInt);
return true;
case INT_16:
out.putInt16(asInt);
return true;
case UINT_16:
out.putUInt16((char)asInt);
return true;
case INT_32:
out.putInt32(asInt);
return true;
case INT_64:
out.putInt64(asInt);
return true;
default:
// fall through and keep trying the standard ways
}
}
return super.tryFromObject(context, in, out);
} |
<<<<<<<
try {
super.open(bindings);
} finally {
CURSOR_SETUP_TAP.out();
}
=======
super.open();
CURSOR_SETUP_TAP.out();
>>>>>>>
try {
super.open();
} finally {
CURSOR_SETUP_TAP.out();
} |
<<<<<<<
indexRow.reset(exchange.getKey(), exchange.getValue());
if (Types3Switch.ON)
pTarget.attach(exchange.getKey());
=======
indexRow.reset(groupIndex, exchange.getKey(), exchange.getValue());
>>>>>>>
indexRow.reset(groupIndex, exchange.getKey(), exchange.getValue());
if (Types3Switch.ON)
pTarget.attach(exchange.getKey()); |
<<<<<<<
import com.akiban.server.error.RowDefNotFoundException;
=======
import com.akiban.server.types.Converters;
import com.akiban.server.types.FromObjectConversionSource;
import com.akiban.server.util.RowDefNotFoundException;
>>>>>>>
import com.akiban.server.error.RowDefNotFoundException;
import com.akiban.server.types.Converters;
import com.akiban.server.types.FromObjectConversionSource; |
<<<<<<<
this.range = maxValue - minValue + 1;
this.cacheSize = 20;
=======
>>>>>>>
this.cacheSize = 20;
<<<<<<<
private final long cacheSize;
private final long range;
=======
>>>>>>>
private final long cacheSize; |
<<<<<<<
import com.foundationdb.server.service.metrics.MetricsService;
import com.foundationdb.server.service.monitor.MonitorService;
=======
>>>>>>>
import com.foundationdb.server.service.metrics.MetricsService;
import com.foundationdb.server.service.monitor.MonitorService;
<<<<<<<
MetricsService metricsService,
=======
CostModelFactory costModel,
>>>>>>>
CostModelFactory costModel,
MetricsService metricsService,
<<<<<<<
routineLoader, txnService, securityService, metricsService,
serviceManager);
=======
routineLoader, txnService, securityService, costModel,
serviceManager);
>>>>>>>
routineLoader, txnService, securityService, costModel,
metricsService,
serviceManager); |
<<<<<<<
IndexKeyRange restart = new IndexKeyRange(this);
restart.boundColumns = boundColumns(indexRowType, newLo);
restart.lo = newLo;
restart.loInclusive = true;
return restart;
}
public IndexKeyRange resetHi(IndexBound newHi)
{
IndexKeyRange restart = new IndexKeyRange(this);
restart.boundColumns = boundColumns(indexRowType, newHi);
restart.hi = newHi;
restart.hiInclusive = true;
return restart;
=======
this.boundColumns = 0;
this.indexRowType = indexRowType;
this.lo = null;
this.loInclusive = false;
this.hi = null;
this.hiInclusive = false;
this.lexicographic = false;
>>>>>>>
IndexKeyRange restart = new IndexKeyRange(this);
restart.boundColumns = boundColumns(indexRowType, newLo);
restart.lo = newLo;
restart.loInclusive = true;
return restart;
}
public IndexKeyRange resetHi(IndexBound newHi)
{
IndexKeyRange restart = new IndexKeyRange(this);
restart.boundColumns = boundColumns(indexRowType, newHi);
restart.hi = newHi;
restart.hiInclusive = true;
return restart;
<<<<<<<
private IndexKeyRange(IndexKeyRange indexKeyRange)
{
this.indexRowType = indexKeyRange.indexRowType;
this.boundColumns = indexKeyRange.boundColumns;
this.lo = indexKeyRange.lo;
this.loInclusive = indexKeyRange.loInclusive;
this.hi = indexKeyRange.hi;
this.hiInclusive = indexKeyRange.hiInclusive;
this.lexicographic = indexKeyRange.lexicographic;
}
private static int boundColumns(IndexRowType indexRowType, IndexBound lo, IndexBound hi)
=======
private static int boundColumns(IndexRowType indexRowType, IndexBound lo, IndexBound hi, boolean lexicographic)
>>>>>>>
private IndexKeyRange(IndexKeyRange indexKeyRange)
{
this.indexRowType = indexKeyRange.indexRowType;
this.boundColumns = indexKeyRange.boundColumns;
this.lo = indexKeyRange.lo;
this.loInclusive = indexKeyRange.loInclusive;
this.hi = indexKeyRange.hi;
this.hiInclusive = indexKeyRange.hiInclusive;
this.lexicographic = indexKeyRange.lexicographic;
}
private static int boundColumns(IndexRowType indexRowType, IndexBound lo, IndexBound hi, boolean lexicographic)
<<<<<<<
private int boundColumns;
private IndexBound lo;
private boolean loInclusive;
private IndexBound hi;
private boolean hiInclusive;
private boolean lexicographic = false;
=======
private final int boundColumns;
private final IndexBound lo;
private final boolean loInclusive;
private final IndexBound hi;
private final boolean hiInclusive;
// An Akiban index scan normally allows a range for only the last specified part of the bound. E.g.,
// (1, 10, 800) - (1, 10, 888) is legal, but (1, 10, 800) - (1, 20, 888) is not, because there are two ranges,
// 10-20 and 800-888. MySQL support requires a different approach in which we start at the lower bound and
// scan everything in the index up to the upper bound. So (1, 10, 800) - (1, 20, 888) is legal, and could return
// a row that is lexicographically between these bounds, but outside some range, e.g. (1, 11, 900). This will
// also be useful in supporting queries such as select * from t where (x, y) > (5, 7).
private final boolean lexicographic;
>>>>>>>
private int boundColumns;
private IndexBound lo;
private boolean loInclusive;
private IndexBound hi;
private boolean hiInclusive;
// An Akiban index scan normally allows a range for only the last specified part of the bound. E.g.,
// (1, 10, 800) - (1, 10, 888) is legal, but (1, 10, 800) - (1, 20, 888) is not, because there are two ranges,
// 10-20 and 800-888. MySQL support requires a different approach in which we start at the lower bound and
// scan everything in the index up to the upper bound. So (1, 10, 800) - (1, 20, 888) is legal, and could return
// a row that is lexicographically between these bounds, but outside some range, e.g. (1, 11, 900). This will
// also be useful in supporting queries such as select * from t where (x, y) > (5, 7).
private final boolean lexicographic; |
<<<<<<<
public Result compileSelect(SessionTracer tracer, CursorNode cursor) throws StandardException {
try {
// Get into standard form.
tracer.beginEvent(EventTypes.BIND_AND_GROUP);
cursor = (CursorNode)bindAndGroup(cursor);
} finally {
tracer.endEvent();
}
=======
enum ProductMethod { HKEY_ORDERED, BY_RUN };
public Result compileSelect(CursorNode cursor) throws StandardException {
// Get into standard form.
cursor = (CursorNode)bindAndGroup(cursor);
>>>>>>>
enum ProductMethod { HKEY_ORDERED, BY_RUN };
public Result compileSelect(SessionTracer tracer, CursorNode cursor) throws StandardException {
try {
// Get into standard form.
tracer.beginEvent(EventTypes.BIND_AND_GROUP);
cursor = (CursorNode)bindAndGroup(cursor);
} finally {
tracer.endEvent();
}
<<<<<<<
FlattenState[] fls = new FlattenState[nbranches];
Flattener fl = new Flattener(resultOperator);
try {
tracer.beginEvent(EventTypes.FLATTEN);
for (int i = 0; i < nbranches; i++)
fls[i] = fl.flatten(squery.getJoins(), i);
} finally {
tracer.endEvent();
}
=======
Flattener fl = new Flattener(resultOperator, nbranches);
FlattenState[] fls = fl.flatten(squery.getJoins());
>>>>>>>
Flattener fl = new Flattener(resultOperator, nbranches);
FlattenState[] fls = null;
try {
tracer.beginEvent(EventTypes.FLATTEN);
fls = fl.flatten(squery.getJoins());
} finally {
tracer.endEvent();
} |
<<<<<<<
import com.akiban.server.types.AkType;
import com.akiban.server.types3.Types3Switch;
import com.akiban.server.types3.mcompat.mtypes.MBigDecimal;
=======
>>>>>>>
import com.akiban.server.types.AkType;
import com.akiban.server.types3.Types3Switch;
import com.akiban.server.types3.mcompat.mtypes.MBigDecimal;
<<<<<<<
private static boolean isFixedDecimal(Column column)
{
if (Types3Switch.ON) {
return column.tInstance().typeClass() instanceof MBigDecimal;
} else {
AkType type = column.getType().akType();
return type == AkType.DECIMAL;
}
}
=======
>>>>>>> |
<<<<<<<
List<TAggregator> candidates = new ArrayList<TAggregator>(aggregatesRegistry.getAggregates(name));
for (Iterator<TAggregator> iterator = candidates.iterator(); iterator.hasNext(); ) {
TAggregator candidate = iterator.next();
=======
List<TAggregator> candidates = new ArrayList<TAggregator>(registry.getAggregates(name));
for (TAggregator candidate : candidates) {
>>>>>>>
List<TAggregator> candidates = new ArrayList<TAggregator>(registry.getAggregates(name));
for (Iterator<TAggregator> iterator = candidates.iterator(); iterator.hasNext(); ) {
TAggregator candidate = iterator.next(); |
<<<<<<<
"constraint __akiban_c2p foreign key __akiban_c2p(gid, pid) references parent(gid, pid)");
schema = new Schema(ais());
=======
"grouping foreign key(gid, pid) references parent(gid, pid)");
createIndex("schema", "child2", "idx_cid2_copy", "cid2_copy");
schema = new Schema(rowDefCache().ais());
>>>>>>>
"grouping foreign key(gid, pid) references parent(gid, pid)");
createIndex("schema", "child2", "idx_cid2_copy", "cid2_copy");
schema = new Schema(ais()); |
<<<<<<<
/** Is this operator and everything below it inner joined?
* In that case a null-tolerant predicate doesn't interfere with
* reordering.
*/
protected boolean allInnerJoins(JoinOperator op) {
return ((op.getJoinType() == JoinType.INNER) &&
((op.left == null) || allInnerJoins(op.left)) &&
((op.right == null) || allInnerJoins(op.right)));
}
=======
/** Get join conditions from top-level WHERE predicates. */
>>>>>>>
/** Is this operator and everything below it inner joined?
* In that case a null-tolerant predicate doesn't interfere with
* reordering.
*/
protected boolean allInnerJoins(JoinOperator op) {
return ((op.getJoinType() == JoinType.INNER) &&
((op.left == null) || allInnerJoins(op.left)) &&
((op.right == null) || allInnerJoins(op.right)));
}
/** Get join conditions from top-level WHERE predicates. */ |
<<<<<<<
private InsertProcessor insertProcessor;
private DeleteProcessor deleteProcessor;
private UpdateProcessor updateProcessor;
=======
private final EmbeddedJDBCService jdbcService;
private final InsertProcessor insertProcessor;
private final DeleteProcessor deleteProcessor;
>>>>>>>
private InsertProcessor insertProcessor;
private DeleteProcessor deleteProcessor;
private UpdateProcessor updateProcessor;
private final EmbeddedJDBCService jdbcService;
<<<<<<<
this.updateProcessor = new UpdateProcessor (configService, treeService, store, registryService);
this.extDataService = extDataService;
=======
>>>>>>>
this.updateProcessor = new UpdateProcessor (configService, treeService, store, registryService); |
<<<<<<<
import com.akiban.sql.optimizer.explain.Explainer;
import com.akiban.sql.optimizer.explain.Label;
import com.akiban.sql.optimizer.explain.OperationExplainer;
import com.akiban.sql.optimizer.explain.PrimitiveExplainer;
import com.akiban.sql.optimizer.explain.std.LookUpOperatorExplainer;
=======
import com.akiban.qp.rowtype.UserTableRowType;
>>>>>>>
import com.akiban.sql.optimizer.explain.Explainer;
import com.akiban.sql.optimizer.explain.Label;
import com.akiban.sql.optimizer.explain.OperationExplainer;
import com.akiban.sql.optimizer.explain.PrimitiveExplainer;
import com.akiban.sql.optimizer.explain.std.LookUpOperatorExplainer;
import com.akiban.qp.rowtype.UserTableRowType; |
<<<<<<<
import com.akiban.server.types.util.ValueHolder;
=======
import com.persistit.exception.PersistitException;
>>>>>>>
import com.akiban.server.types.util.ValueHolder;
import com.persistit.exception.PersistitException; |
<<<<<<<
import com.akiban.server.RowData;
import com.akiban.server.error.DuplicateTableNameException;
import com.akiban.server.error.NoSuchTableException;
import com.akiban.server.error.ProtectedTableDDLException;
=======
import com.akiban.server.rowdata.RowData;
import com.akiban.server.api.common.NoSuchTableException;
import com.akiban.server.api.ddl.DuplicateTableNameException;
import com.akiban.server.api.ddl.ProtectedTableDDLException;
>>>>>>>
import com.akiban.server.error.DuplicateTableNameException;
import com.akiban.server.error.NoSuchTableException;
import com.akiban.server.error.ProtectedTableDDLException;
import com.akiban.server.rowdata.RowData; |
<<<<<<<
=======
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URL;
import java.util.Arrays;
import java.util.Dictionary;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
>>>>>>> |
<<<<<<<
import java.util.BitSet;
=======
import java.util.Collections;
>>>>>>>
import java.util.BitSet;
import java.util.Collections; |
<<<<<<<
import com.akiban.server.RowData;
=======
import com.akiban.server.InvalidOperationException;
import com.akiban.server.rowdata.RowData;
>>>>>>>
import com.akiban.server.rowdata.RowData; |
<<<<<<<
=======
import com.akiban.ais.model.Group;
import com.akiban.ais.model.IndexColumn;
import com.akiban.qp.operator.API.InputPreservationOption;
import com.akiban.qp.operator.API.JoinType;
import com.akiban.server.collation.AkCollator;
import com.akiban.qp.operator.QueryContext;
import com.akiban.server.expression.std.FieldExpression;
import com.akiban.server.expression.subquery.ResultSetSubqueryExpression;
import com.akiban.server.expression.subquery.ScalarSubqueryExpression;
import com.akiban.server.types3.TInstance;
import com.akiban.server.types3.Types3Switch;
import com.akiban.server.types3.pvalue.PUnderlying;
import com.akiban.server.types3.pvalue.PValueSources;
import com.akiban.server.types3.texpressions.AnySubqueryTExpression;
import com.akiban.server.types3.texpressions.TNullExpression;
import com.akiban.server.types3.texpressions.TPreparedExpression;
import com.akiban.server.types3.texpressions.TPreparedField;
import com.akiban.server.types3.texpressions.TPreparedLiteral;
>>>>>>>
<<<<<<<
import com.akiban.ais.model.IndexColumn;
import com.akiban.ais.model.GroupTable;
import com.akiban.ais.model.TableName;
=======
>>>>>>>
import com.akiban.ais.model.IndexColumn;
import com.akiban.ais.model.TableName; |
<<<<<<<
=======
tinstances = new TInstance[dimensions];
fieldDefs = new FieldDef[dimensions];
coords = new double[dimensions];
positions = new int[dimensions];
>>>>>>>
<<<<<<<
/** @deprecated */
=======
public long zValue (Row row)
{
bind (row);
return Spatial.shuffle(space, coords[0], coords[1]);
}
>>>>>>>
public long zValue (Row row)
{
bind (row);
return Spatial.shuffle(space, coords[0], coords[1]);
}
<<<<<<<
public void processSpatialObject(RowData rowData, Operation operation)
=======
private void bind (Row row) {
for (int d = 0; d < dimensions; d++) {
ValueSource source = row.value(positions[d]);
TClass tclass = source.getType().typeClass();
if (tclass == MNumeric.DECIMAL) {
BigDecimalWrapper wrapper = TBigDecimal.getWrapper(source, tinstances[d]);
coords[d] = wrapper.asBigDecimal().doubleValue();
}
else if (tclass == MNumeric.BIGINT) {
coords[d] = source.getInt64();
}
else if (tclass == MNumeric.INT) {
coords[d] = source.getInt32();
}
else {
assert false : row.rowType().table().getColumn(positions[d]);
}
}
}
private void bind(RowData rowData)
>>>>>>>
public void processSpatialObject(RowData rowData, Operation operation)
{
bind(rowData);
long[] zs = zArray();
Spatial.shuffle(space, spatialObject, zs);
for (int i = 0; i < zs.length && zs[i] != Space.Z_NULL; i++) {
operation.handleZValue(zs[i]);
}
}
public void processSpatialObject(Row rowData, Operation operation) |
<<<<<<<
public PostgresStatement generateInitial(PostgresServerSession server,
StatementNode stmt,
List<ParameterNode> params, int[] paramTypes)
=======
public PostgresStatement generate(PostgresServerSession server,
String sql, StatementNode stmt,
List<ParameterNode> params, int[] paramTypes)
>>>>>>>
public PostgresStatement generateInitial(PostgresServerSession server,
String sql, StatementNode stmt,
List<ParameterNode> params, int[] paramTypes) |
<<<<<<<
else if (object instanceof SpatialObject) {
type = MBinary.VARBINARY.instance(65535, false);
value = new Value(type, Spatial.serialize((JTSSpatialObject) object));
}
=======
>>>>>>>
<<<<<<<
if (source.getType().typeClass() == MBinary.VARBINARY) {
=======
if (source.getType().typeClass() == AkGeometry.INSTANCE.widestComparable()) {
if (source.getObject() instanceof Geometry) {
return (Geometry) source.getObject();
}
logger.error("Geometry with underlying object of : {}", source.getObject().getClass());
}
if (source.getType().typeClass() == MBinary.LONGBLOB ||
source.getType().typeClass() == MBinary.BLOB ||
source.getType().typeClass() == MBinary.MEDIUMBLOB ||
source.getType().typeClass() == MBinary.TINYBLOB ||
source.getType().typeClass() == MBinary.VARBINARY) {
>>>>>>>
if (source.getType().typeClass() == AkGeometry.INSTANCE.widestComparable()) {
if (source.getObject() instanceof Geometry) {
return (Geometry) source.getObject();
}
logger.error("Geometry with underlying object of : {}", source.getObject().getClass());
}
if (source.getType().typeClass() == MBinary.VARBINARY) { |
<<<<<<<
=======
import org.opencastproject.manager.api.PluginManagerConstants;
import org.opencastproject.manager.core.MetadataDocumentHandler;
import org.opencastproject.manager.system.workflow.utils.JSONWorkflowBuilder;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import org.apache.commons.io.FileUtils;
>>>>>>> |
<<<<<<<
import com.akiban.server.error.AISTooLargeException;
import com.akiban.server.error.BranchingGroupIndexException;
import com.akiban.server.error.DuplicateIndexException;
import com.akiban.server.error.DuplicateTableNameException;
import com.akiban.server.error.IndexLacksColumnsException;
import com.akiban.server.error.InvalidOperationException;
import com.akiban.server.error.JoinColumnMismatchException;
import com.akiban.server.error.JoinColumnTypesMismatchException;
import com.akiban.server.error.JoinToMultipleParentsException;
import com.akiban.server.error.JoinToProtectedTableException;
import com.akiban.server.error.JoinToUnknownTableException;
import com.akiban.server.error.JoinToWrongColumnsException;
import com.akiban.server.error.NoSuchColumnException;
import com.akiban.server.error.NoSuchGroupException;
import com.akiban.server.error.NoSuchTableException;
import com.akiban.server.error.ParseException;
import com.akiban.server.error.PersistItErrorException;
import com.akiban.server.error.ProtectedIndexException;
import com.akiban.server.error.ProtectedTableDDLException;
import com.akiban.server.error.ReferencedTableException;
import com.akiban.server.error.TableNotInGroupException;
import com.akiban.server.error.UnsupportedCharsetException;
import com.akiban.server.error.UnsupportedDataTypeException;
import com.akiban.server.error.UnsupportedIndexDataTypeException;
import com.akiban.server.error.UnsupportedIndexSizeException;
=======
import com.akiban.server.rowdata.RowDef;
import com.akiban.server.rowdata.RowDefCache;
>>>>>>>
import com.akiban.server.error.AISTooLargeException;
import com.akiban.server.error.BranchingGroupIndexException;
import com.akiban.server.error.DuplicateIndexException;
import com.akiban.server.error.DuplicateTableNameException;
import com.akiban.server.error.IndexLacksColumnsException;
import com.akiban.server.error.InvalidOperationException;
import com.akiban.server.error.JoinColumnMismatchException;
import com.akiban.server.error.JoinColumnTypesMismatchException;
import com.akiban.server.error.JoinToMultipleParentsException;
import com.akiban.server.error.JoinToProtectedTableException;
import com.akiban.server.error.JoinToUnknownTableException;
import com.akiban.server.error.JoinToWrongColumnsException;
import com.akiban.server.error.NoSuchColumnException;
import com.akiban.server.error.NoSuchGroupException;
import com.akiban.server.error.NoSuchTableException;
import com.akiban.server.error.ParseException;
import com.akiban.server.error.PersistItErrorException;
import com.akiban.server.error.ProtectedIndexException;
import com.akiban.server.error.ProtectedTableDDLException;
import com.akiban.server.error.ReferencedTableException;
import com.akiban.server.error.ScanRetryAbandonedException;
import com.akiban.server.error.TableNotInGroupException;
import com.akiban.server.error.UnsupportedCharsetException;
import com.akiban.server.error.UnsupportedDataTypeException;
import com.akiban.server.error.UnsupportedIndexDataTypeException;
import com.akiban.server.error.UnsupportedIndexSizeException;
import com.akiban.server.rowdata.RowDefCache;
<<<<<<<
import com.akiban.server.RowDef;
import com.akiban.server.RowDefCache;
=======
import com.akiban.server.InvalidOperationException;
>>>>>>>
<<<<<<<
public TableDefinition getTableDefinition(Session session, TableName tableName) {
final Table table = getAis(session).getTable(tableName);
=======
public TableDefinition getTableDefinition(Session session, TableName tableName) throws NoSuchTableException {
final Table table = getAis().getTable(tableName);
>>>>>>>
public TableDefinition getTableDefinition(Session session, TableName tableName) {
final Table table = getAis(session).getTable(tableName);
<<<<<<<
public List<String> schemaStrings(Session session, boolean withGroupTables) {
final AkibanInformationSchema ais = getAis(session);
=======
public List<String> schemaStrings(Session session, boolean withGroupTables) throws Exception {
final AkibanInformationSchema ais = getAis();
>>>>>>>
public List<String> schemaStrings(Session session, boolean withGroupTables) {
final AkibanInformationSchema ais = getAis();
<<<<<<<
if (!ais.isTypeSupported(typeName)) {
throw new UnsupportedDataTypeException (
new TableName(schemaName, tableName),
col.getName(), typeName);
=======
if (!getAis().isTypeSupported(typeName)) {
throw new InvalidOperationException(
ErrorCode.UNSUPPORTED_DATA_TYPE,
"Table `%s`.`%s` column `%s` is unsupported type %s",
schemaName, tableName, col.getName(), typeName);
>>>>>>>
if (!getAis().isTypeSupported(typeName)) {
throw new UnsupportedDataTypeException (
new TableName(schemaName, tableName),
col.getName(), typeName);
<<<<<<<
if (!ais.isTypeSupportedAsIndex(typeName)) {
throw new UnsupportedIndexDataTypeException (new TableName(schemaName, tableName),
=======
if (!getAis().isTypeSupportedAsIndex(typeName)) {
complainAboutIndexDataType(schemaName, tableName,
>>>>>>>
if (!getAis().isTypeSupportedAsIndex(typeName)) {
throw new UnsupportedIndexDataTypeException (new TableName(schemaName, tableName),
<<<<<<<
if (!ais.isTypeSupportedAsIndex(typeName)) {
throw new UnsupportedIndexDataTypeException (new TableName (schemaName, tableName),
=======
if (!getAis().isTypeSupportedAsIndex(typeName)) {
complainAboutIndexDataType(schemaName, tableName,
>>>>>>>
if (!getAis().isTypeSupportedAsIndex(typeName)) {
throw new UnsupportedIndexDataTypeException (new TableName (schemaName, tableName),
<<<<<<<
if (!ais.canTypesBeJoined(parentType, type)) {
throw new JoinToWrongColumnsException (new TableName (schemaName, tableName), columnDef.getName(),
new TableName (parentSchema, parentTableName), parentPKColumn.getName());
=======
if (!getAis().canTypesBeJoined(parentType, type)) {
throw new InvalidOperationException(
ErrorCode.JOIN_TO_WRONG_COLUMNS,
"Table `%s`.`%s` column `%s` [%s] cannot be joined to `%s`.`%s` column `%s` [%s]",
schemaName, tableName, columnDef.getName(), type,
parentSchema, parentTableName,
parentPKColumn.getName(), parentType);
>>>>>>>
if (!getAis().canTypesBeJoined(parentType, type)) {
throw new JoinToWrongColumnsException (new TableName (schemaName, tableName), columnDef.getName(),
new TableName (parentSchema, parentTableName), parentPKColumn.getName()); |
<<<<<<<
import com.akiban.qp.operator.QueryCanceledException;
=======
import com.akiban.server.aggregation.AggregatorRegistry;
>>>>>>>
import com.akiban.qp.operator.QueryCanceledException;
import com.akiban.server.aggregation.AggregatorRegistry; |
<<<<<<<
import com.akiban.cserver.service.session.SessionImpl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.Logger;
=======
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import com.akiban.cserver.service.session.SessionImpl; |
<<<<<<<
import com.akiban.server.types3.Attribute;
import com.akiban.server.types3.IllegalNameException;
import com.akiban.server.types3.TAttributeValues;
import com.akiban.server.types3.TAttributesDeclaration;
import com.akiban.server.types3.TClass;
import com.akiban.server.types3.TExecutionContext;
import com.akiban.server.types3.TFactory;
import com.akiban.server.types3.TInstance;
=======
import com.akiban.qp.operator.QueryContext;
import com.akiban.server.types3.*;
import com.akiban.server.types3.common.NumericFormatter;
>>>>>>>
import com.akiban.server.types3.Attribute;
import com.akiban.server.types3.IllegalNameException;
import com.akiban.server.types3.TAttributeValues;
import com.akiban.server.types3.TAttributesDeclaration;
import com.akiban.server.types3.TClass;
import com.akiban.server.types3.TExecutionContext;
import com.akiban.server.types3.TFactory;
import com.akiban.server.types3.TInstance;
import com.akiban.server.types3.common.NumericFormatter;
<<<<<<<
import java.util.Arrays;
=======
import com.akiban.util.AkibanAppender;
>>>>>>>
import java.util.Arrays;
<<<<<<<
super(MBundle.INSTANCE.id(), name, Attrs.class, 1, 1, -1, PUnderlying.BYTES, null, typeId);
=======
super(MBundle.INSTANCE.id(), name, Attrs.class, NumericFormatter.FORMAT.BYTES, 1, 1, -1, PUnderlying.BYTES, typeId);
>>>>>>>
super(MBundle.INSTANCE.id(), name, NumericFormatter.FORMAT.BYTES, Attrs.class, 1, 1, -1, PUnderlying.BYTES, null, typeId); |
<<<<<<<
inputSets.add(new TInputSet(targetType, BitSets.of(covering), false, false, exact, nextNormalizer));
nextNormalizer = null;
=======
inputSets.add(new TInputSet(targetType, BitSets.of(covering), false, false));
setExacts(covering);
>>>>>>>
inputSets.add(new TInputSet(targetType, BitSets.of(covering), false, false, nextNormalizer));
nextNormalizer = null;
setExacts(covering);
<<<<<<<
inputSets.add(new TInputSet(targetType, BitSets.of(covering), false, true, exact, nextNormalizer));
nextNormalizer = null;
=======
inputSets.add(new TInputSet(targetType, BitSets.of(covering), false, true));
>>>>>>>
inputSets.add(new TInputSet(targetType, BitSets.of(covering), false, true, nextNormalizer));
nextNormalizer = null;
<<<<<<<
inputSets.add(new TInputSet(targetType, BitSets.of(covering), true, false, exact, nextNormalizer));
nextNormalizer = null;
=======
assert vararg == null : vararg;
vararg = new TInputSet(targetType, BitSets.of(covering), true, false);
inputSets.add(vararg);
exactsBuilder.setVararg(exact);
>>>>>>>
assert vararg == null : vararg;
vararg = new TInputSet(targetType, BitSets.of(covering), true, false, nextNormalizer);
nextNormalizer = null;
inputSets.add(vararg);
exactsBuilder.setVararg(exact);
<<<<<<<
inputSets.add(new TInputSet(targetType, BitSets.of(covering), true, true, exact, nextNormalizer));
nextNormalizer = null;
=======
assert vararg == null : vararg;
vararg = new TInputSet(targetType, BitSets.of(covering), true, true);
inputSets.add(vararg);
exactsBuilder.setVararg(exact);
>>>>>>>
assert vararg == null : vararg;
vararg = new TInputSet(targetType, BitSets.of(covering), true, true, nextNormalizer);
inputSets.add(vararg);
nextNormalizer = null;
exactsBuilder.setVararg(exact);
<<<<<<<
public TInputSetBuilder nextInputPicksWith(TInstanceNormalizer nextNormalizer) {
this.nextNormalizer = nextNormalizer;
return this;
}
=======
public void setExact(int pos, boolean exact) {
if (exact) {
exactsBuilder.set(pos, true);
}
}
private void setExacts(int[] positions) {
if (exact) {
for (int pos : positions) {
setExact(pos, true);
}
}
}
public InputSetFlags exactInputs() {
return exactsBuilder.get();
}
>>>>>>>
public TInputSetBuilder nextInputPicksWith(TInstanceNormalizer nextNormalizer) {
this.nextNormalizer = nextNormalizer;
return this;
}
public void setExact(int pos, boolean exact) {
if (exact) {
exactsBuilder.set(pos, true);
}
}
private void setExacts(int[] positions) {
if (exact) {
for (int pos : positions) {
setExact(pos, true);
}
}
}
public InputSetFlags exactInputs() {
return exactsBuilder.get();
}
<<<<<<<
private boolean exact;
private TInstanceNormalizer nextNormalizer;
=======
private final InputSetFlags.Builder exactsBuilder = new InputSetFlags.Builder();
private boolean exact = false;
private TInputSet vararg = null;
>>>>>>>
private final InputSetFlags.Builder exactsBuilder = new InputSetFlags.Builder();
private TInstanceNormalizer nextNormalizer;
private boolean exact = false;
private TInputSet vararg = null; |
<<<<<<<
import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
import static org.apache.commons.lang.exception.ExceptionUtils.getStackTrace;
=======
import static org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace;
>>>>>>>
import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
import static org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace; |
<<<<<<<
private final static File TESTDIR = new File("/tmp/foundationdb-sql-junit");
private final static String TEST_FDB_ROOT_DIR = "sql_test";
=======
private final static File TESTDIR = new File("/tmp/fdb-sql-layer");
>>>>>>>
private final static File TESTDIR = new File("/tmp/fdb-sql-layer");
private final static String TEST_FDB_ROOT_DIR = "sql_test"; |
<<<<<<<
long ceil();
=======
BigDecimalWrapper divide(BigDecimalWrapper augend, int scale);
int getSign();
>>>>>>>
long ceil();
BigDecimalWrapper divide(BigDecimalWrapper augend, int scale);
int getSign(); |
<<<<<<<
import com.akiban.sql.optimizer.explain.*;
=======
import com.akiban.server.types3.aksql.aktypes.AkBool;
import com.akiban.server.types3.texpressions.TEvaluatableExpression;
import com.akiban.server.types3.texpressions.TPreparedExpression;
import com.akiban.sql.optimizer.explain.Attributes;
import com.akiban.sql.optimizer.explain.Explainer;
import com.akiban.sql.optimizer.explain.Label;
import com.akiban.sql.optimizer.explain.OperationExplainer;
import com.akiban.sql.optimizer.explain.PrimitiveExplainer;
import com.akiban.sql.optimizer.explain.Type;
>>>>>>>
import com.akiban.server.types3.aksql.aktypes.AkBool;
import com.akiban.server.types3.texpressions.TEvaluatableExpression;
import com.akiban.server.types3.texpressions.TPreparedExpression;
import com.akiban.sql.optimizer.explain.*;
<<<<<<<
return Format.Describe(this.getExplainer());
=======
Object toStringPredicate = (predicate == null) ? pPredicate : predicate;
return String.format("%s(%s, %s)", getClass().getSimpleName(), predicateRowType, toStringPredicate);
>>>>>>>
return Format.Describe(this.getExplainer()); |
<<<<<<<
super(TEST_BUNDLE_ID, name, 1, 1, 1, pUnderlying, null, null);
=======
super(TEST_BUNDLE_ID, name, null, 1, 1, 1, pUnderlying, null);
>>>>>>>
super(TEST_BUNDLE_ID, name, null, 1, 1, 1, pUnderlying, null, null); |
<<<<<<<
=======
private static final Tap.InOutTap TABLE_INDEX_MAINTENANCE_TAP = Tap.createTimer("index: maintain_table");
private static final Tap.PointTap TX_COMMIT_COUNT = Tap.createCount("write: tx_commit", true);
private static final Tap.PointTap TX_RETRY_TAP = Tap.createCount("write: tx_retry", true);
>>>>>>>
private static final Tap.InOutTap TABLE_INDEX_MAINTENANCE_TAP = Tap.createTimer("index: maintain_table"); |
<<<<<<<
private int[] keyDepth;
=======
private transient int[] keyDepth;
private transient int nColumns = -1;
>>>>>>>
private int[] keyDepth;
private int nColumns = -1; |
<<<<<<<
import com.akiban.server.types3.TClass;
import com.akiban.server.types3.TExecutionContext;
import com.akiban.server.types3.TFactory;
import com.akiban.server.types3.TInstance;
import com.akiban.server.types3.TParser;
import com.akiban.server.types3.TParsers;
import com.akiban.server.types3.TypeDeclarationException;
=======
import com.akiban.qp.operator.QueryContext;
import com.akiban.server.types3.*;
>>>>>>>
import com.akiban.server.types3.TClass;
import com.akiban.server.types3.TExecutionContext;
import com.akiban.server.types3.TFactory;
import com.akiban.server.types3.TInstance;
import com.akiban.server.types3.TParser;
import com.akiban.server.types3.TParsers;
import com.akiban.server.types3.TypeDeclarationException;
import com.akiban.server.types3.*;
<<<<<<<
protected MNumeric(String name, int serializationSize, PUnderlying pUnderlying, int defaultWidth, TParser parser) {
=======
protected MNumeric(String name, TClassFormatter formatter, int serializationSize, PUnderlying pUnderlying, int defaultWidth) {
>>>>>>>
protected MNumeric(String name, TClassFormatter formatter, int serializationSize, PUnderlying pUnderlying,
int defaultWidth, TParser parser)
{ |
<<<<<<<
return hasMore;
} catch (BufferFullException e) {
throw e; // Don't want this to be handled as an Exception
} catch (Exception e) {
cursor.setFinished();
throw new GenericInvalidOperationException(e);
=======
>>>>>>> |
<<<<<<<
public abstract AkCollator collatorAt(int index);
=======
public abstract TInstance typeInstanceAt(int index);
>>>>>>>
public abstract AkCollator collatorAt(int index);
public abstract TInstance typeInstanceAt(int index); |
<<<<<<<
import com.akiban.server.types3.common.IntAttribute;
import com.akiban.server.types3.Attribute;
=======
>>>>>>> |
<<<<<<<
=======
getJmxRegistryService().register(this);
>>>>>>>
getJmxRegistryService().register(this); |
<<<<<<<
leftSkipRow = new ValuesHolderRow(rowType());
assert leftRow.isHolding();
=======
leftSkipRow = new ValuesHolderRow(rowType);
assert leftRow != null;
>>>>>>>
leftSkipRow = new ValuesHolderRow(rowType);
assert leftRow != null;
<<<<<<<
while (f < rowType().nFields()) {
leftSkipRow.pvalueAt(f++).putNull();
=======
while (f < rowType.nFields()) {
leftSkipRow.valueAt(f++).putNull();
>>>>>>>
while (f < rowType().nFields()) {
leftSkipRow.valueAt(f++).putNull();
<<<<<<<
rightSkipRow = new ValuesHolderRow(rowType());
assert rightRow.isHolding();
=======
rightSkipRow = new ValuesHolderRow(rowType);
assert rightRow != null;
>>>>>>>
rightSkipRow = new ValuesHolderRow(rowType());
assert rightRow != null;
<<<<<<<
while (f < rowType().nFields()) {
rightSkipRow.pvalueAt(f++).putNull();
=======
while (f < rowType.nFields()) {
rightSkipRow.valueAt(f++).putNull();
>>>>>>>
while (f < rowType().nFields()) {
rightSkipRow.pvalueAt(f++).putNull(); |
<<<<<<<
import com.akiban.sql.optimizer.explain.Explainer;
import com.akiban.sql.optimizer.explain.Label;
import com.akiban.sql.optimizer.explain.PrimitiveExplainer;
import com.akiban.sql.optimizer.explain.Type;
import com.akiban.sql.optimizer.explain.std.ExpressionExplainer;
import com.akiban.util.ArgumentValidation;
import java.util.List;
=======
>>>>>>>
import com.akiban.sql.optimizer.explain.Explainer;
import com.akiban.sql.optimizer.explain.Label;
import com.akiban.sql.optimizer.explain.PrimitiveExplainer;
import com.akiban.sql.optimizer.explain.Type;
import com.akiban.sql.optimizer.explain.std.ExpressionExplainer;
import com.akiban.util.ArgumentValidation;
import java.util.List;
<<<<<<<
@Override
public String name()
{
return "FIELD";
}
@Override
public Explainer getExplainer()
{
Explainer ex = new ExpressionExplainer(Type.FUNCTION, name(), (List)null);
ex.addAttribute(Label.BINDING_POSITION, PrimitiveExplainer.getInstance(fieldIndex));
ex.addAttribute(Label.ROWTYPE, PrimitiveExplainer.getInstance(rowType.toString())); // TODO: Explainer for RowType?
return ex;
}
=======
@Override
public boolean nullIsContaminating()
{
return true;
}
>>>>>>>
@Override
public String name()
{
return "FIELD";
}
@Override
public Explainer getExplainer()
{
Explainer ex = new ExpressionExplainer(Type.FUNCTION, name(), (List)null);
ex.addAttribute(Label.BINDING_POSITION, PrimitiveExplainer.getInstance(fieldIndex));
ex.addAttribute(Label.ROWTYPE, PrimitiveExplainer.getInstance(rowType.toString())); // TODO: Explainer for RowType?
return ex;
}
public boolean nullIsContaminating()
{
return true;
} |
<<<<<<<
=======
// For use by RankExpression
protected CompareExpression(Expression lhs, Expression rhs) {
this(AkType.INT, lhs, null, rhs);
}
// overriding protected methods
@Override
protected void buildToString(StringBuilder sb) {//Field(2) < Literal(8888)
sb.append(left()).append(' ').append(comparison).append(' ').append(right());
}
>>>>>>>
// For use by RankExpression
protected CompareExpression(Expression lhs, Expression rhs) {
this(AkType.INT, lhs, null, rhs);
} |
<<<<<<<
MetricsService metricsService,
=======
CostModelFactory costModel,
>>>>>>>
CostModelFactory costModel,
MetricsService metricsService,
<<<<<<<
this.metricsService = metricsService;
=======
this.costModel = costModel;
>>>>>>>
this.costModel = costModel;
this.metricsService = metricsService;
<<<<<<<
public MetricsService metricsService() {
return metricsService;
}
=======
public CostModelFactory costModel() {
return costModel;
}
public ServiceManager serviceManager() {
return serviceManager;
}
>>>>>>>
public CostModelFactory costModel() {
return costModel;
}
public MetricsService metricsService() {
return metricsService;
}
public ServiceManager serviceManager() {
return serviceManager;
}
<<<<<<<
private final MetricsService metricsService;
=======
private final CostModelFactory costModel;
>>>>>>>
private final CostModelFactory costModel;
private final MetricsService metricsService; |
<<<<<<<
=======
import com.akiban.server.types3.mcompat.mtypes.MNumeric;
import com.akiban.server.types3.pvalue.PUnderlying;
>>>>>>>
import com.akiban.server.types3.pvalue.PUnderlying;
<<<<<<<
public FromInt8ToInt16(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt8ToInt16(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt8ToInt16(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt8ToInt32(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt8ToInt32(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt8ToInt32(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt8ToInt64(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt8ToInt64(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt8ToInt64(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt16ToInt8(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt16ToInt8(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt16ToInt8(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt16ToInt16(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt16ToInt16(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt16ToInt16(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt16ToInt32(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt16ToInt32(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt16ToInt32(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt16ToInt64(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt16ToInt64(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt16ToInt64(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt32ToInt8(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt32ToInt8(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt32ToInt8(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt32ToInt16(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt32ToInt16(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt32ToInt16(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt32ToInt32(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt32ToInt32(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt32ToInt32(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt32ToInt64(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt32ToInt64(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt32ToInt64(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt64ToInt8(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt64ToInt8(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt64ToInt8(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt64ToInt16(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt64ToInt16(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt64ToInt16(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt64ToInt32(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt64ToInt32(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt64ToInt32(TClass source, TClass target, boolean auto, Constantness c)
<<<<<<<
public FromInt64ToInt64(TClass source, TClass target, boolean auto, Constantness c)
=======
public FromInt64ToInt64(TClass source, MNumeric target, boolean auto, Constantness c)
>>>>>>>
public FromInt64ToInt64(TClass source, TClass target, boolean auto, Constantness c) |
<<<<<<<
private static final Logger logger = LoggerFactory.getLogger(SQLResource.class);
=======
>>>>>>>
private static final Logger logger = LoggerFactory.getLogger(SQLResource.class);
<<<<<<<
@QueryParam("q") final String query) {
logger.debug("/sql/query: {}", query);
=======
final String jsonParams) {
>>>>>>>
final String jsonParams) {
logger.debug("/sql/query: {}", jsonParams);
<<<<<<<
@QueryParam("q") final String query) {
logger.debug("/sql/explain: {}", query);
=======
final String jsonParams) {
>>>>>>>
final String jsonParams) {
logger.debug("/sql/explain: {}", jsonParams); |
<<<<<<<
public Cursor newIndexCursor(Index index, IndexKeyRange keyRange, API.Ordering ordering, UserTable innerJoinUntil)
=======
public Cursor newIndexCursor(Index index, boolean reverse, IndexKeyRange keyRange, IndexScanSelector selector)
>>>>>>>
public Cursor newIndexCursor(Index index, IndexKeyRange keyRange, API.Ordering ordering, IndexScanSelector selector)
<<<<<<<
cursor = new PersistitIndexCursor(this, schema.indexRowType(index), keyRange, ordering, innerJoinUntil);
=======
cursor = new PersistitIndexCursor(this, schema.indexRowType(index), reverse, keyRange, selector);
>>>>>>>
cursor = new PersistitIndexCursor(this, schema.indexRowType(index), keyRange, ordering, selector); |
<<<<<<<
import com.akiban.server.error.PersistItErrorException;
import com.akiban.server.rowdata.FieldDef;
import com.akiban.server.rowdata.RowDef;
=======
>>>>>>>
import com.akiban.server.error.PersistItErrorException; |
<<<<<<<
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
=======
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
>>>>>>>
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
<<<<<<<
public static MutableDateTime toJodaDatetime(long ymd_hms[], String tz)
{
return new MutableDateTime((int)ymd_hms[YEAR_INDEX], (int)ymd_hms[MONTH_INDEX], (int)ymd_hms[DAY_INDEX],
(int)ymd_hms[HOUR_INDEX], (int)ymd_hms[MIN_INDEX], (int)ymd_hms[SEC_INDEX], 0,
DateTimeZone.forID(tz));
}
public static long[] fromEncodedDate(long val)
=======
/**
* TODO: This function is ised in CUR_DATE/TIME, could speed up the performance
* by directly passing the Date(Time) object to this function
* so it won't have to create one.
*
* @param millis
* @param tz
* @return the (MySQL) encoded DATE value
*/
public static int encodeDate(long millis, String tz)
{
DateTime dt = new DateTime(millis, DateTimeZone.forID(tz));
return dt.getYear() * 512
+ dt.getMonthOfYear() * 32
+ dt.getDayOfMonth();
}
public static long[] decodeDate(long val)
>>>>>>>
public static MutableDateTime toJodaDatetime(long ymd_hms[], String tz)
{
return new MutableDateTime((int)ymd_hms[YEAR_INDEX], (int)ymd_hms[MONTH_INDEX], (int)ymd_hms[DAY_INDEX],
(int)ymd_hms[HOUR_INDEX], (int)ymd_hms[MIN_INDEX], (int)ymd_hms[SEC_INDEX], 0,
DateTimeZone.forID(tz));
}
/**
* TODO: This function is ised in CUR_DATE/TIME, could speed up the performance
* by directly passing the Date(Time) object to this function
* so it won't have to create one.
*
* @param millis
* @param tz
* @return the (MySQL) encoded DATE value
*/
public static int encodeDate(long millis, String tz)
{
DateTime dt = new DateTime(millis, DateTimeZone.forID(tz));
return dt.getYear() * 512
+ dt.getMonthOfYear() * 32
+ dt.getDayOfMonth();
}
public static long[] decodeDate(long val) |
<<<<<<<
import com.akiban.rest.ResponseHelper;
=======
import com.akiban.rest.ResourceRequirements;
>>>>>>>
import com.akiban.rest.ResourceRequirements;
<<<<<<<
import com.akiban.server.service.session.SessionService;
import com.akiban.server.service.transaction.TransactionService;
import com.google.inject.Inject;
=======
>>>>>>> |
<<<<<<<
@SuppressWarnings("unchecked")
public static <T> T getCached(PValueSource source, TInstance tInstance, PValueCacher<? extends T> cacher) {
if (source.hasCacheValue())
return (T) source.getObject();
return cacher.valueToCache(source, tInstance);
}
public static int hash(PValueSource source, AkCollator collator) {
=======
public static int hash(PValueSource source) {
>>>>>>>
public static int hash(PValueSource source, AkCollator collator) { |
<<<<<<<
private static AggregatorFactory FACTORY = new TestFactory();
private static TestAdapter ADAPTER = new TestAdapter();
// nested classes
private static class TestFactory implements AggregatorFactory {
=======
private static final AggregatorFactory TEST_AGGREGATOR = new AggregatorFactory() {
>>>>>>>
private static final TestAdapter ADAPTER = new TestAdapter();
private static final AggregatorFactory TEST_AGGREGATOR = new AggregatorFactory() { |
<<<<<<<
public TInstanceNormalizer instanceAdjuster() {
assert normalizer != null;
return normalizer;
}
public TInputSet(TClass targetType, BitSet covering, boolean coversRemaining, boolean isPicking, boolean isExact,
TInstanceNormalizer normalizer)
=======
public TInputSet(TClass targetType, BitSet covering, boolean coversRemaining, boolean isPicking)
>>>>>>>
public TInstanceNormalizer instanceAdjuster() {
assert normalizer != null;
return normalizer;
}
public TInputSet(TClass targetType, BitSet covering, boolean coversRemaining, boolean isPicking,
TInstanceNormalizer normalizer)
<<<<<<<
this.isExact = isExact;
if (normalizer != null)
this.normalizer = normalizer;
else if (targetType != null)
this.normalizer = new PickingNormalizer(targetType);
else
this.normalizer = null;
=======
>>>>>>>
if (normalizer != null)
this.normalizer = normalizer;
else if (targetType != null)
this.normalizer = new PickingNormalizer(targetType);
else
this.normalizer = null;
<<<<<<<
private final boolean isExact;
private final TInstanceNormalizer normalizer;
private static class PickingNormalizer implements TInstanceNormalizer {
@Override
public void apply(TInstanceAdjuster adjuster, TValidatedOverload overload, TInputSet inputSet, int max) {
throw new UnsupportedOperationException(); // TODO
}
private PickingNormalizer(TClass tclass) {
this.tclass = tclass;
}
private final TClass tclass;
}
=======
>>>>>>>
private final TInstanceNormalizer normalizer;
private static class PickingNormalizer implements TInstanceNormalizer {
@Override
public void apply(TInstanceAdjuster adjuster, TValidatedOverload overload, TInputSet inputSet, int max) {
throw new UnsupportedOperationException(); // TODO
}
private PickingNormalizer(TClass tclass) {
this.tclass = tclass;
}
private final TClass tclass;
} |
<<<<<<<
=======
import com.akiban.server.types.AkType;
import com.akiban.server.types.ValueSource;
>>>>>>>
import com.akiban.server.types.ValueSource;
<<<<<<<
if (endBoundColumns == 0 || start == null) {
startKey.append(startBoundary);
=======
BoundExpressions startExpressions = null;
if (start == null) {
startKey = null;
>>>>>>>
BoundExpressions startExpressions = null;
if (startBoundColumns == 0 || start == null) {
startKey.append(startBoundary);
<<<<<<<
for (int f = 0; f < endBoundColumns; f++) {
endKeyTarget.expectingType(types[f]);
Converters.convert(endExpressions.eval(f), endKeyTarget);
=======
for (int f = 0; f < boundColumns; f++) {
if (end.columnSelector().includesColumn(f)) {
ValueSource valueSource = endExpressions.eval(f);
if (valueSource.isNull() && startExpressions != null && !startExpressions.eval(f).isNull()) {
endKey.append(Key.AFTER);
} else {
endKeyTarget.expectingType(types[f]);
Converters.convert(valueSource, endKeyTarget);
}
} else {
endKey.append(Key.AFTER);
}
>>>>>>>
for (int f = 0; f < endBoundColumns; f++) {
if (end.columnSelector().includesColumn(f)) {
ValueSource valueSource = endExpressions.eval(f);
if (valueSource.isNull() && startExpressions != null && !startExpressions.eval(f).isNull()) {
endKey.append(Key.AFTER);
} else {
endKeyTarget.expectingType(types[f]);
Converters.convert(valueSource, endKeyTarget);
}
} else {
endKey.append(Key.AFTER);
} |
<<<<<<<
import com.akiban.ais.model.GroupTable;
import com.akiban.ais.model.Index;
import com.akiban.ais.model.IndexColumn;
import com.akiban.server.RowData;
import com.akiban.server.RowDefCache;
import com.akiban.server.api.dml.ColumnSelector;
import com.akiban.server.api.dml.scan.RowDataOutput;
import com.akiban.server.service.config.Property;
import com.akiban.server.service.memcache.HapiProcessorFactory;
import com.akiban.server.store.PersistitStore;
import com.akiban.server.service.memcache.MemcacheService;
import com.akiban.server.store.Store;
import com.akiban.util.ListUtils;
=======
>>>>>>> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.